[automerger skipped] Merge "C2 VTS: Disable multi-frame tests in audio decoder" into android13-tests-dev am: 0ca052b7ef -s ours

am skip reason: Merged-In I8084b48b4a181e87bd151bc2870eddad36ae3a0b with SHA-1 04c36855dd is already in history

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/2753242

Change-Id: I64dc95434133e726baeb825ae9d5bf062a275e38
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/Android.bp b/Android.bp
index ee609e1..37f6457 100644
--- a/Android.bp
+++ b/Android.bp
@@ -44,7 +44,7 @@
     srcs: [
         "aidl/android/media/InterpolatorConfig.aidl",
         "aidl/android/media/InterpolatorType.aidl",
-        "aidl/android/media/MicrophoneInfoData.aidl",
+        "aidl/android/media/MicrophoneInfoFw.aidl",
         "aidl/android/media/VolumeShaperConfiguration.aidl",
         "aidl/android/media/VolumeShaperConfigurationOptionFlag.aidl",
         "aidl/android/media/VolumeShaperConfigurationType.aidl",
@@ -52,16 +52,22 @@
         "aidl/android/media/VolumeShaperOperationFlag.aidl",
         "aidl/android/media/VolumeShaperState.aidl",
     ],
+    imports: [
+        "android.media.audio.common.types-V2",
+    ],
     backend: {
         cpp: {
             min_sdk_version: "29",
             apex_available: [
                 "//apex_available:platform",
-                "com.android.bluetooth",
+                "com.android.btservices",
                 "com.android.media",
                 "com.android.media.swcodec",
             ],
         },
+        java: {
+            sdk_version: "module_current",
+        },
     },
 }
 
@@ -75,10 +81,10 @@
         "av-types-aidl-cpp",
     ],
     header_libs: [
-        "libaudioclient_aidl_conversion_util",
+        "libaudio_aidl_conversion_common_util_cpp",
     ],
     export_header_lib_headers: [
-        "libaudioclient_aidl_conversion_util",
+        "libaudio_aidl_conversion_common_util_cpp",
     ],
     host_supported: true,
     vendor_available: true,
@@ -86,7 +92,7 @@
     min_sdk_version: "29",
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth",
+        "com.android.btservices",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/MainlineFiles.cfg b/MainlineFiles.cfg
index 490bbbf..bf39c1a 100644
--- a/MainlineFiles.cfg
+++ b/MainlineFiles.cfg
@@ -21,11 +21,20 @@
 #
 # matching is purely prefix
 # so 'foo' will match 'foo', 'foo.c', 'foo/bar/baz'
-# if you want to exclude a directory, best to use a pattern like "foo/"
+# if you want to specify a directory, best to use a pattern like "foo/"
 #
 
+apex/
 media/codec2/components/
-media/codecs/
-media/extractors/
-media/libstagefright/mpeg2ts/
-media/libstagefright/flac/
+media/codec2/core/
+media/codec2/hidl/
+media/codec2/sfplugin/utils/
+media/codec2/vndk/
+media/libstagefright/data/media_codecs_sw.xml
+media/module/
+services/mediacodec/
+
+# source code used in both framework and mainline libraries
+media/libstagefright/HevcUtils.cpp
+media/libstagefright/MediaSource.cpp
+media/libstagefright/Utils.cpp
diff --git a/OWNERS b/OWNERS
index 40c65e7..3c7a3ab 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,7 +1,16 @@
-# Bug component: 1344
+## Media team top-level OWNERS, bug component: 1344
+## Only contact for camera changes as a fallback
 elaurent@google.com
-etalvala@google.com
 lajos@google.com
 
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
+
+## Camera team top-level OWNERS, bug component: 41727
+## Only contact for media changes as a fallback
+etalvala@google.com
+shuzhenwang@google.com
+
+# mainline related
+per-file MainlineFiles.cfg=essick@google.com
+per-file MainlineFiles.cfg=file:/media/janitors/reliability_mainline_OWNERS
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 716b550..1f7083b 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -11,3 +11,4 @@
 clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
                media/libmediatranscoding/
                services/mediatranscoding/
+               media/libaudioclient/tests/
diff --git a/aidl/android/media/MicrophoneInfoData.aidl b/aidl/android/media/MicrophoneInfoData.aidl
deleted file mode 100644
index 747bfa5..0000000
--- a/aidl/android/media/MicrophoneInfoData.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-parcelable MicrophoneInfoData {
-    @utf8InCpp String deviceId;
-    int portId;
-    int type;
-    @utf8InCpp String address;
-    int deviceLocation;
-    int deviceGroup;
-    int indexInTheGroup;
-    float[] geometricLocation;
-    float[] orientation;
-    float[] frequencies;
-    float[] frequencyResponses;
-    int[] channelMapping;
-    float sensitivity;
-    float maxSpl;
-    float minSpl;
-    int directionality;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/aidl/android/media/MicrophoneInfoFw.aidl
similarity index 69%
copy from media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
copy to aidl/android/media/MicrophoneInfoFw.aidl
index 3a4ca31..bad0e0a 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
+++ b/aidl/android/media/MicrophoneInfoFw.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright 2020 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -16,13 +16,14 @@
 
 package android.media;
 
-import android.media.AudioPortConfigSys;
-import android.media.audio.common.AudioPortConfig;
+import android.media.audio.common.MicrophoneDynamicInfo;
+import android.media.audio.common.MicrophoneInfo;
 
 /**
  * {@hide}
  */
-parcelable AudioPortConfig {
-    AudioPortConfig hal;
-    AudioPortConfigSys sys;
+parcelable MicrophoneInfoFw {
+    MicrophoneInfo info;
+    MicrophoneDynamicInfo dynamic;
+    int portId;
 }
diff --git a/apex/Android.bp b/apex/Android.bp
index 570ca01..b0d7c02 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -67,13 +67,15 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media-androidManifest",
 
-    // IMPORTANT: q-launched-apex-module enables the build system to make
-    // sure the package compatible to Android 10 in two ways:
+    // IMPORTANT: q-launched-dcla-enabled-apex-module enables the build system to make
+    // sure the package compatible to Android 10 in two ways(if flag APEX_BUILD_FOR_PRE_S_DEVICES=1
+    // is set):
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    defaults: ["q-launched-apex-module"],
+    // If the flag is not set, the package is built to be compatible with Android 12.
+    defaults: ["q-launched-dcla-enabled-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
@@ -191,13 +193,15 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media.swcodec-androidManifest",
 
-    // IMPORTANT: q-launched-apex-module enables the build system to make
-    // sure the package compatible to Android 10 in two ways:
+    // IMPORTANT: q-launched-dcla-enabled-apex-module enables the build system to make
+    // sure the package compatible to Android 10 in two ways(if flag APEX_BUILD_FOR_PRE_S_DEVICES=1
+    // is set):
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    defaults: ["q-launched-apex-module"],
+    // If the flag is not set, the package is built to be compatible with Android 12.
+    defaults: ["q-launched-dcla-enabled-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
diff --git a/apex/TEST_MAPPING b/apex/TEST_MAPPING
index 4b7c019..bb4f089 100644
--- a/apex/TEST_MAPPING
+++ b/apex/TEST_MAPPING
@@ -7,16 +7,16 @@
   "presubmit": [
     // The following tests validate codec and drm path.
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+          "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 713f0b7..4dc5fb1 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -33,7 +33,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.default.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.default.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libRS.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
+namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
 
 ###############################################################################
 # "platform" namespace
@@ -138,7 +138,7 @@
 # TODO: replace the following when apex has a way to auto-generate this list
 # namespace.sphal.link.platform.shared_libs  = %LLNDK_LIBRARIES%
 # namespace.sphal.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libRS.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
+namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
 
 # Add a link for libz.so which is llndk on devices where VNDK is not enforced.
 namespace.sphal.link.platform.shared_libs += libz.so
diff --git a/apex/manifest.json b/apex/manifest.json
index 4b75b04..5b235cd 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,10 @@
 {
   "name": "com.android.media",
-  "version": 339990000,
+
+  // Placeholder module version to be replaced during build.
+  // Do not change!
+  "version": 0,
+
   "requireNativeLibs": [
     "libandroid.so",
     "libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index fbcbb69..f2b8b36 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,10 @@
 {
   "name": "com.android.media.swcodec",
-  "version": 339990000,
+
+  // Placeholder module version to be replaced during build.
+  // Do not change!
+  "version": 0,
+
   "requireNativeLibs": [
     ":sphal"
   ]
diff --git a/camera/Android.bp b/camera/Android.bp
index e44202b..b3f70f4 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -47,7 +47,7 @@
     name: "camera_headers",
     export_include_dirs: ["include"],
 }
-cc_library_shared {
+cc_library {
     name: "libcamera_client",
 
     aidl: {
@@ -93,6 +93,7 @@
         "libgui",
         "libcamera_metadata",
         "libnativewindow",
+        "lib-platform-compat-native-api",
     ],
 
     include_dirs: [
@@ -141,14 +142,15 @@
 filegroup {
     name: "libcamera_client_aidl",
     srcs: [
+        "aidl/android/hardware/CameraExtensionSessionStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceUser.aidl",
-        "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
         "aidl/android/hardware/camera2/ICameraInjectionCallback.aidl",
         "aidl/android/hardware/camera2/ICameraInjectionSession.aidl",
+        "aidl/android/hardware/camera2/ICameraOfflineSession.aidl",
     ],
     path: "aidl",
 }
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 604dbb8..2244682 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,10 +71,11 @@
 }
 
 sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion)
+        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
+        bool forceSlowJpegMode)
 {
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion);
+            clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 24c9108..9ae4607 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -23,6 +23,7 @@
 #include <cutils/properties.h>
 
 #include <android/hardware/ICameraService.h>
+#include <com/android/internal/compat/IPlatformCompatNative.h>
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
@@ -161,7 +162,8 @@
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                                const String16& clientPackageName,
-                                               int clientUid, int clientPid, int targetSdkVersion)
+                                               int clientUid, int clientPid, int targetSdkVersion,
+                                               bool overrideToPortrait, bool forceSlowJpegMode)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -171,8 +173,11 @@
     binder::Status ret;
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
+        ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
+                overrideToPortrait, forceSlowJpegMode);
         ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                                               clientPid, targetSdkVersion, /*out*/ &c->mCamera);
+                clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode,
+                 /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -273,10 +278,11 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
+        bool overrideToPortrait,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index d1aa36a..9e9793d 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -130,6 +130,12 @@
         return err;
     }
 
+    int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+    if ((err = parcel->readInt32(&colorSpace)) != OK) {
+        ALOGE("%s: Failed to read color space from parcel", __FUNCTION__);
+        return err;
+    }
+
     mWidth = width;
     mHeight = height;
     mFormat = format;
@@ -146,6 +152,7 @@
     mHistogramCounts = std::move(histogramCounts);
     mDynamicRangeProfile = dynamicRangeProfile;
     mStreamUseCase = streamUseCase;
+    mColorSpace = colorSpace;
 
     return OK;
 }
@@ -238,6 +245,11 @@
         return err;
     }
 
+    if ((err = parcel->writeInt32(mColorSpace)) != OK) {
+        ALOGE("%s: Failed to write color space", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
@@ -259,17 +271,20 @@
         mApiLevel(0),
         mIsNdk(false),
         mLatencyMs(-1),
+        mLogId(0),
         mMaxPreviewFps(0),
         mSessionType(0),
         mInternalReconfigure(0),
         mRequestCount(0),
         mResultErrorCount(0),
         mDeviceError(false),
-        mVideoStabilizationMode(-1) {}
+        mVideoStabilizationMode(-1),
+        mSessionIndex(0),
+        mCameraExtensionSessionStats() {}
 
 CameraSessionStats::CameraSessionStats(const String16& cameraId,
         int facing, int newCameraState, const String16& clientName,
-        int apiLevel, bool isNdk, int32_t latencyMs) :
+        int apiLevel, bool isNdk, int32_t latencyMs, int64_t logId) :
                 mCameraId(cameraId),
                 mFacing(facing),
                 mNewCameraState(newCameraState),
@@ -277,13 +292,16 @@
                 mApiLevel(apiLevel),
                 mIsNdk(isNdk),
                 mLatencyMs(latencyMs),
+                mLogId(logId),
                 mMaxPreviewFps(0),
                 mSessionType(0),
                 mInternalReconfigure(0),
                 mRequestCount(0),
                 mResultErrorCount(0),
                 mDeviceError(0),
-                mVideoStabilizationMode(-1) {}
+                mVideoStabilizationMode(-1),
+                mSessionIndex(0),
+                mCameraExtensionSessionStats() {}
 
 status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
     if (parcel == NULL) {
@@ -335,6 +353,12 @@
         return err;
     }
 
+    int64_t logId;
+    if ((err = parcel->readInt64(&logId)) != OK) {
+        ALOGE("%s: Failed to read log ID from parcel", __FUNCTION__);
+        return err;
+    }
+
     float maxPreviewFps;
     if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
         ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
@@ -389,6 +413,18 @@
         return err;
     }
 
+    int32_t sessionIdx;
+    if ((err = parcel->readInt32(&sessionIdx)) != OK) {
+        ALOGE("%s: Failed to read session index from parcel", __FUNCTION__);
+        return err;
+    }
+
+    CameraExtensionSessionStats extStats{};
+    if ((err = extStats.readFromParcel(parcel)) != OK) {
+        ALOGE("%s: Failed to read extension session stats from parcel", __FUNCTION__);
+        return err;
+    }
+
     mCameraId = id;
     mFacing = facing;
     mNewCameraState = newCameraState;
@@ -396,6 +432,7 @@
     mApiLevel = apiLevel;
     mIsNdk = isNdk;
     mLatencyMs = latencyMs;
+    mLogId = logId;
     mMaxPreviewFps = maxPreviewFps;
     mSessionType = sessionType;
     mInternalReconfigure = internalReconfigure;
@@ -405,6 +442,8 @@
     mStreamStats = std::move(streamStats);
     mUserTag = userTag;
     mVideoStabilizationMode = videoStabilizationMode;
+    mSessionIndex = sessionIdx;
+    mCameraExtensionSessionStats = extStats;
 
     return OK;
 }
@@ -452,6 +491,11 @@
         return err;
     }
 
+    if ((err = parcel->writeInt64(mLogId)) != OK) {
+        ALOGE("%s: Failed to write log ID!", __FUNCTION__);
+        return err;
+    }
+
     if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
         ALOGE("%s: Failed to write maxPreviewFps!", __FUNCTION__);
         return err;
@@ -496,6 +540,17 @@
         ALOGE("%s: Failed to write video stabilization mode!", __FUNCTION__);
         return err;
     }
+
+    if ((err = parcel->writeInt32(mSessionIndex)) != OK) {
+        ALOGE("%s: Failed to write session index!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = mCameraExtensionSessionStats.writeToParcel(parcel)) != OK) {
+        ALOGE("%s: Failed to write extension sessions stats!", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index be47898..bb880d1 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -52,7 +52,10 @@
     parcel->readInt64(&lastCompletedRegularFrameNumber);
     parcel->readInt64(&lastCompletedReprocessFrameNumber);
     parcel->readInt64(&lastCompletedZslFrameNumber);
-
+    parcel->readBool(&hasReadoutTimestamp);
+    if (hasReadoutTimestamp) {
+        parcel->readInt64(&readoutTimestamp);
+    }
     return OK;
 }
 
@@ -82,6 +85,10 @@
     parcel->writeInt64(lastCompletedRegularFrameNumber);
     parcel->writeInt64(lastCompletedReprocessFrameNumber);
     parcel->writeInt64(lastCompletedZslFrameNumber);
+    parcel->writeBool(hasReadoutTimestamp);
+    if (hasReadoutTimestamp) {
+        parcel->writeInt64(readoutTimestamp);
+    }
 
     return OK;
 }
diff --git a/camera/OWNERS b/camera/OWNERS
index 385c163..b705548 100644
--- a/camera/OWNERS
+++ b/camera/OWNERS
@@ -1,7 +1,11 @@
 # Bug component: 41727
 etalvala@google.com
 arakesh@google.com
+borgera@google.com
+bkukreja@google.com
 epeev@google.com
 jchowdhary@google.com
-shuzhenwang@google.com
+rdhanjal@google.com
 ruchamk@google.com
+shuzhenwang@google.com
+
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index b37803a..151b653 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -598,7 +598,6 @@
 status_t VendorTagDescriptor::setAsGlobalVendorTagDescriptor(const sp<VendorTagDescriptor>& desc) {
     status_t res = OK;
     Mutex::Autolock al(sLock);
-    sGlobalVendorTagDescriptor = desc;
 
     vendor_tag_ops_t* opsPtr = NULL;
     if (desc != NULL) {
@@ -613,6 +612,9 @@
         ALOGE("%s: Could not set vendor tag descriptor, received error %s (%d)."
                 , __FUNCTION__, strerror(-res), res);
     }
+
+    sGlobalVendorTagDescriptor = desc;
+
     return res;
 }
 
@@ -631,7 +633,6 @@
         const sp<VendorTagDescriptorCache>& cache) {
     status_t res = OK;
     Mutex::Autolock al(sLock);
-    sGlobalVendorTagDescriptorCache = cache;
 
     struct vendor_tag_cache_ops* opsPtr = NULL;
     if (cache != NULL) {
@@ -646,6 +647,9 @@
         ALOGE("%s: Could not set vendor tag cache, received error %s (%d)."
                 , __FUNCTION__, strerror(-res), res);
     }
+
+    sGlobalVendorTagDescriptorCache = cache;
+
     return res;
 }
 
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
new file mode 100644
index 0000000..1c81831
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * Metrics specific to Extension Sessions (see CameraExtensionSession) for logging.
+ *
+ * Each Extension Session is mapped to one camera session internally, and will be sent to
+ * CameraServiceProxy with IDLE/CLOSE calls.
+ * @hide
+ */
+parcelable CameraExtensionSessionStats {
+    /**
+     * Value should match {@code CameraExtensionCharacteristics#EXTENSION_*}
+     */
+    @Backing(type="int")
+    enum Type {
+        EXTENSION_NONE = -1,
+        EXTENSION_AUTOMATIC = 0,
+        EXTENSION_FACE_RETOUCH = 1,
+        EXTENSION_BOKEH = 2,
+        EXTENSION_HDR = 3,
+        EXTENSION_NIGHT = 4
+    }
+
+    /**
+     * Key to uniquely identify the session this stat is associated with. The first call to
+     * 'ICameraService.reportExtensionSessionStats' should set this to an empty string.
+     * 'ICameraService.reportExtensionSessionStats' will return the key which should be used with
+     * the next calls.
+     */
+    String key;
+
+    /**
+     * Camera ID for which the stats is being reported.
+     */
+    String cameraId;
+
+    /**
+     * Package name of the client using the camera
+     */
+    String clientName;
+
+
+    /**
+     * Type of extension session requested by the app. Note that EXTENSION_AUTOMATIC is reported
+     * as such.
+     */
+    Type type = Type.EXTENSION_NONE;
+
+    /**
+     * true if advanced extensions are being used, false otherwise
+     */
+    boolean isAdvanced = false;
+}
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 1e748c7..f8e1631 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -30,6 +30,7 @@
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
 import android.hardware.CameraStatus;
+import android.hardware.CameraExtensionSessionStats;
 
 /**
  * Binder interface for the native camera service running in mediaserver.
@@ -67,7 +68,7 @@
     /**
      * Fetch basic camera information for a camera device
      */
-    CameraInfo getCameraInfo(int cameraId);
+    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait);
 
     /**
      * Default UID/PID values for non-privileged callers of
@@ -83,7 +84,9 @@
             int cameraId,
             String opPackageName,
             int clientUid, int clientPid,
-            int targetSdkVersion);
+            int targetSdkVersion,
+            boolean overrideToPortrait,
+            boolean forceSlowJpegMode);
 
     /**
      * Open a camera device through the new camera API
@@ -94,7 +97,8 @@
             String opPackageName,
             @nullable String featureId,
             int clientUid, int oomScoreOffset,
-            int targetSdkVersion);
+            int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Add listener for changes to camera device and flashlight state.
@@ -135,7 +139,8 @@
      * Read the static camera metadata for a camera device.
      * Only supported for device HAL versions >= 3.2
      */
-    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion);
+    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -210,6 +215,26 @@
      */
     oneway void notifyDeviceStateChange(long newState);
 
+    /**
+     * Report Extension specific metrics to camera service for logging. This should only be called
+     * by CameraExtensionSession to log extension metrics. All calls after the first must set
+     * CameraExtensionSessionStats.key to the value returned by this function.
+     *
+     * Each subsequent call fully overwrites the existing CameraExtensionSessionStats for the
+     * current session, so the caller is responsible for keeping the stats complete.
+     *
+     * Due to cameraservice and cameraservice_proxy architecture, there is no guarantee that
+     * {@code stats} will be logged immediately (or at all). CameraService will log whatever
+     * extension stats it has at the time of camera session closing which may be before the app
+     * process receives a session/device closed callback; so CameraExtensionSession
+     * should send metrics to the cameraservice preriodically, and cameraservice must handle calls
+     * to this function from sessions that have not been logged yet and from sessions that have
+     * already been closed.
+     *
+     * @return the key that must be used to report updates to previously reported stats.
+     */
+    String reportExtensionSessionStats(in CameraExtensionSessionStats stats);
+
     // Bitfield constants for notifyDeviceStateChange
     // All bits >= 32 are for custom vendor states
     // Written as ints since AIDL does not support long constants.
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index 88783fb..4faa6b4 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -17,6 +17,7 @@
 package android.hardware;
 
 import android.hardware.CameraSessionStats;
+import android.hardware.CameraExtensionSessionStats;
 
 /**
  * Binder interface for the camera service proxy running in system_server.
@@ -46,7 +47,14 @@
     int getRotateAndCropOverride(String packageName, int lensFacing, int userId);
 
     /**
+     * Returns the necessary autoframing override for the top activity which
+     * will be one of ({@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_FALSE},
+     * {@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_TRUE}).
+     */
+    int getAutoframingOverride(String packageName);
+
+    /**
      * Checks if the camera has been disabled via device policy.
      */
-    boolean isCameraDisabled();
+    boolean isCameraDisabled(int userId);
 }
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 11d4960..da4484a 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -26,8 +26,8 @@
 #include <system/camera_metadata.h>
 #include <utils/String8.h>
 
-namespace android {
 
+namespace android {
 
 const int OutputConfiguration::INVALID_ROTATION = -1;
 const int OutputConfiguration::INVALID_SET_ID = -1;
@@ -81,6 +81,10 @@
     return mDynamicRangeProfile;
 }
 
+int32_t OutputConfiguration::getColorSpace() const {
+    return mColorSpace;
+}
+
 int64_t OutputConfiguration::getStreamUseCase() const {
     return mStreamUseCase;
 }
@@ -93,6 +97,10 @@
     return mMirrorMode;
 }
 
+bool OutputConfiguration::useReadoutTimestamp() const {
+    return mUseReadoutTimestamp;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -103,9 +111,11 @@
         mIsShared(false),
         mIsMultiResolution(false),
         mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+        mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
         mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
         mTimestampBase(TIMESTAMP_BASE_DEFAULT),
-        mMirrorMode(MIRROR_MODE_AUTO) {
+        mMirrorMode(MIRROR_MODE_AUTO),
+        mUseReadoutTimestamp(false) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -191,6 +201,11 @@
         ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
         return err;
     }
+    int32_t colorSpace;
+    if ((err = parcel->readInt32(&colorSpace)) != OK) {
+        ALOGE("%s: Failed to read surface color space flag from parcel", __FUNCTION__);
+        return err;
+    }
 
     int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
     if ((err = parcel->readInt64(&streamUseCase)) != OK) {
@@ -210,6 +225,12 @@
         return err;
     }
 
+    int useReadoutTimestamp = 0;
+    if ((err = parcel->readInt32(&useReadoutTimestamp)) != OK) {
+        ALOGE("%s: Failed to read useReadoutTimestamp flag from parcel", __FUNCTION__);
+        return err;
+    }
+
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -221,6 +242,7 @@
     mStreamUseCase = streamUseCase;
     mTimestampBase = timestampBase;
     mMirrorMode = mirrorMode;
+    mUseReadoutTimestamp = useReadoutTimestamp != 0;
     for (auto& surface : surfaceShims) {
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
@@ -230,13 +252,14 @@
 
     mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
     mDynamicRangeProfile = dynamicProfile;
+    mColorSpace = colorSpace;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
           " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
-          ", timestampBase = %d, mirrorMode = %d",
+          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
           __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
           String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase,
-          mMirrorMode);
+          mMirrorMode, mUseReadoutTimestamp);
 
     return err;
 }
@@ -252,9 +275,11 @@
     mPhysicalCameraId = physicalId;
     mIsMultiResolution = false;
     mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+    mColorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
     mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
     mTimestampBase = TIMESTAMP_BASE_DEFAULT;
     mMirrorMode = MIRROR_MODE_AUTO;
+    mUseReadoutTimestamp = false;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -265,9 +290,10 @@
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
     mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
     mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+    mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
     mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
     mTimestampBase(TIMESTAMP_BASE_DEFAULT),
-    mMirrorMode(MIRROR_MODE_AUTO) { }
+    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -317,6 +343,9 @@
     err = parcel->writeInt64(mDynamicRangeProfile);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mColorSpace);
+    if (err != OK) return err;
+
     err = parcel->writeInt64(mStreamUseCase);
     if (err != OK) return err;
 
@@ -326,6 +355,9 @@
     err = parcel->writeInt32(mMirrorMode);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
+    if (err != OK) return err;
+
     return OK;
 }
 
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 094a3c1..8472562 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -37,13 +37,14 @@
         "libui",
         "libgui",
         "libbinder",
+        "libbinder_ndk",
         "libhidlbase",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -59,6 +60,6 @@
     init_rc: ["cameraserver.rc"],
 
     vintf_fragments: [
-        "manifest_android.frameworks.cameraservice.service@2.2.xml",
+        "manifest_android.frameworks.cameraservice.service.xml",
     ],
 }
diff --git a/camera/cameraserver/cameraserver.rc b/camera/cameraserver/cameraserver.rc
index 8f51458..e307653 100644
--- a/camera/cameraserver/cameraserver.rc
+++ b/camera/cameraserver/cameraserver.rc
@@ -5,3 +5,5 @@
     ioprio rt 4
     task_profiles CameraServiceCapacity MaxPerformance
     rlimit rtprio 10 10
+    onrestart class_restart cameraWatchdog
+    interface aidl android.frameworks.cameraservice.service.ICameraService/default
diff --git a/camera/cameraserver/main_cameraserver.cpp b/camera/cameraserver/main_cameraserver.cpp
index cef8ef5..c494732 100644
--- a/camera/cameraserver/main_cameraserver.cpp
+++ b/camera/cameraserver/main_cameraserver.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 
 #include "CameraService.h"
+#include <android/binder_process.h>
 #include <hidl/HidlTransportSupport.h>
 
 using namespace android;
@@ -26,15 +27,21 @@
 {
     signal(SIGPIPE, SIG_IGN);
 
-    // Set 5 threads for HIDL calls. Now cameraserver will serve HIDL calls in
-    // addition to consuming them from the Camera HAL as well.
+    // Set 5 threads for HIDL calls. Now cameraserver will serve HIDL calls.
     hardware::configureRpcThreadpool(5, /*willjoin*/ false);
 
+    // Set 5 threads for VNDK AIDL calls. Now cameraserver will serve
+    // VNDK AIDL calls in addition to consuming them from the Camera HAL as well.
+    ABinderProcess_setThreadPoolMaxThreadCount(5);
+
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
     ALOGI("ServiceManager: %p", sm.get());
     CameraService::instantiate();
     ALOGI("ServiceManager: %p done instantiate", sm.get());
     ProcessState::self()->startThreadPool();
+    ABinderProcess_startThreadPool();
+
     IPCThreadState::self()->joinThreadPool();
+    ABinderProcess_joinThreadPool();
 }
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
new file mode 100644
index 0000000..f7e455f
--- /dev/null
+++ b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
@@ -0,0 +1,20 @@
+<manifest version="1.0" type="framework">
+    <hal format="hidl" max-level="7">
+        <name>android.frameworks.cameraservice.service</name>
+        <transport>hwbinder</transport>
+        <version>2.2</version>
+        <interface>
+            <name>ICameraService</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+
+    <hal format="aidl">
+        <name>android.frameworks.cameraservice.service</name>
+        <version>1</version>
+        <interface>
+            <name>ICameraService</name>
+            <instance>default</instance>
+        </interface>
+    </hal>
+</manifest>
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
deleted file mode 100644
index eeafc91..0000000
--- a/camera/cameraserver/manifest_android.frameworks.cameraservice.service@2.2.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<manifest version="1.0" type="framework">
-    <hal>
-        <name>android.frameworks.cameraservice.service</name>
-        <transport>hwbinder</transport>
-        <version>2.2</version>
-        <interface>
-            <name>ICameraService</name>
-            <instance>default</instance>
-        </interface>
-    </hal>
-</manifest>
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 58ccd69..21b57af 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -58,7 +58,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const String16&, int, int, int,
+        int, const String16&, int, int, int, bool, bool,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -81,7 +81,8 @@
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
                                 const String16& clientPackageName,
-                                int clientUid, int clientPid, int targetSdkVersion);
+                                int clientUid, int clientPid, int targetSdkVersion,
+                                bool overrideToPortrait, bool forceSlowJpegMode);
 
             virtual     ~Camera();
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 8e53968..b20dc1b 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -119,7 +119,8 @@
 
     static sp<TCam>      connect(int cameraId,
                                  const String16& clientPackageName,
-                                 int clientUid, int clientPid, int targetSdkVersion);
+                                 int clientUid, int clientPid, int targetSdkVersion,
+                                 bool overrideToPortrait, bool forceSlowJpegMode);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
@@ -127,6 +128,7 @@
     static int           getNumberOfCameras();
 
     static status_t      getCameraInfo(int cameraId,
+                                       bool overrideToPortrait,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
 
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index aaa88b2..071bc73 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -20,6 +20,7 @@
 #include <binder/Parcelable.h>
 
 #include <camera/CameraMetadata.h>
+#include <android/hardware/CameraExtensionSessionStats.h>
 
 namespace android {
 namespace hardware {
@@ -67,22 +68,26 @@
     int64_t mDynamicRangeProfile;
     // Stream use case
     int64_t mStreamUseCase;
+    // Color space
+    int32_t mColorSpace;
 
     CameraStreamStats() :
             mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
             mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
             mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
             mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
-            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+            mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
     CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
             int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
-            int streamUseCase)
+            int streamUseCase, int32_t colorSpace)
             : mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
               mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
               mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
               mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
               mDynamicRangeProfile(dynamicRangeProfile),
-              mStreamUseCase(streamUseCase) {}
+              mStreamUseCase(streamUseCase),
+              mColorSpace(colorSpace) {}
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
@@ -124,6 +129,22 @@
     bool mIsNdk;
     // latency in ms for camera open, close, or session creation.
     int mLatencyMs;
+
+    /*
+     * A randomly generated identifier to map the open/active/idle/close stats to each other after
+     * being logged. Every 'open' event will have a newly generated id which will be logged with
+     * active/idle/closed that correspond to the particular 'open' event.
+     *
+     * This ID is not meant to be globally unique forever. Probabilistically, this ID can be
+     * safely considered unique across all logs from one android build for 48 to 72 hours from
+     * its generation. Chances of identifier collisions are significant past a week or two.
+     *
+     * NOTE: There are no guarantees that the identifiers will be unique. The probability of
+     * collision within a short timeframe is low, but any system consuming these identifiers at
+     * scale should handle identifier collisions, potentially even from the same device.
+     */
+    int64_t mLogId;
+
     float mMaxPreviewFps;
 
     // Session info and statistics
@@ -138,11 +159,15 @@
     std::vector<CameraStreamStats> mStreamStats;
     String16 mUserTag;
     int mVideoStabilizationMode;
+    int mSessionIndex;
+
+    CameraExtensionSessionStats mCameraExtensionSessionStats;
 
     // Constructors
     CameraSessionStats();
     CameraSessionStats(const String16& cameraId, int facing, int newCameraState,
-            const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs);
+                       const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs,
+                       int64_t logId);
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/CaptureResult.h b/camera/include/camera/CaptureResult.h
index f163c1e..de534ab 100644
--- a/camera/include/camera/CaptureResult.h
+++ b/camera/include/camera/CaptureResult.h
@@ -103,6 +103,17 @@
      */
     int64_t lastCompletedZslFrameNumber;
 
+    /**
+     * Whether the readoutTimestamp variable is valid and should be used.
+     */
+    bool hasReadoutTimestamp;
+
+    /**
+     * The readout timestamp of the capture. Its value is equal to the
+     * start-of-exposure timestamp plus the exposure time (and a possible fixed
+     * offset due to sensor crop).
+     */
+    int64_t readoutTimestamp;
 
     /**
      * Constructor initializes object as invalid by setting requestId to be -1.
@@ -118,7 +129,9 @@
           errorPhysicalCameraId(),
           lastCompletedRegularFrameNumber(-1),
           lastCompletedReprocessFrameNumber(-1),
-          lastCompletedZslFrameNumber(-1) {
+          lastCompletedZslFrameNumber(-1),
+          hasReadoutTimestamp(false),
+          readoutTimestamp(0) {
     }
 
     /**
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index b842885..16fddb5 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -43,7 +43,8 @@
         TIMESTAMP_BASE_SENSOR = 1,
         TIMESTAMP_BASE_MONOTONIC = 2,
         TIMESTAMP_BASE_REALTIME = 3,
-        TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4
+        TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4,
+        TIMESTAMP_BASE_MAX = TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
     };
     enum MirrorModeType {
         MIRROR_MODE_AUTO = 0,
@@ -59,6 +60,7 @@
     int                        getWidth() const;
     int                        getHeight() const;
     int64_t                    getDynamicRangeProfile() const;
+    int32_t                    getColorSpace() const;
     bool                       isDeferred() const;
     bool                       isShared() const;
     String16                   getPhysicalCameraId() const;
@@ -66,6 +68,7 @@
     int64_t                    getStreamUseCase() const;
     int                        getTimestampBase() const;
     int                        getMirrorMode() const;
+    bool                       useReadoutTimestamp() const;
 
     // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
     const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -109,9 +112,11 @@
                 mIsMultiResolution == other.mIsMultiResolution &&
                 sensorPixelModesUsedEqual(other) &&
                 mDynamicRangeProfile == other.mDynamicRangeProfile &&
+                mColorSpace == other.mColorSpace &&
                 mStreamUseCase == other.mStreamUseCase &&
                 mTimestampBase == other.mTimestampBase &&
-                mMirrorMode == other.mMirrorMode);
+                mMirrorMode == other.mMirrorMode &&
+                mUseReadoutTimestamp == other.mUseReadoutTimestamp);
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -151,6 +156,9 @@
         if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
             return mDynamicRangeProfile < other.mDynamicRangeProfile;
         }
+        if (mColorSpace != other.mColorSpace) {
+            return mColorSpace < other.mColorSpace;
+        }
         if (mStreamUseCase != other.mStreamUseCase) {
             return mStreamUseCase < other.mStreamUseCase;
         }
@@ -160,6 +168,9 @@
         if (mMirrorMode != other.mMirrorMode) {
             return mMirrorMode < other.mMirrorMode;
         }
+        if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
+            return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
+        }
         return gbpsLessThan(other);
     }
 
@@ -185,9 +196,11 @@
     bool                       mIsMultiResolution;
     std::vector<int32_t>       mSensorPixelModesUsed;
     int64_t                    mDynamicRangeProfile;
+    int32_t                    mColorSpace;
     int64_t                    mStreamUseCase;
     int                        mTimestampBase;
     int                        mMirrorMode;
+    bool                       mUseReadoutTimestamp;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index fef873b..bfd02b3 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -141,6 +141,7 @@
     ],
 
     shared_libs: [
+        "libbinder_ndk",
         "libfmq",
         "libhidlbase",
         "libhardware",
@@ -151,15 +152,13 @@
         "libcutils",
         "libcamera_metadata",
         "libmediandk",
-        "android.frameworks.cameraservice.device@2.0",
-        "android.frameworks.cameraservice.device@2.1",
-        "android.frameworks.cameraservice.common@2.0",
-        "android.frameworks.cameraservice.service@2.0",
-        "android.frameworks.cameraservice.service@2.1",
-        "android.frameworks.cameraservice.service@2.2",
+        "android.frameworks.cameraservice.common-V1-ndk",
+        "android.frameworks.cameraservice.device-V1-ndk",
+        "android.frameworks.cameraservice.service-V1-ndk",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
+        "libaidlcommonsupport",
         "libarect",
     ],
     // TODO: jchowdhary@, use header_libs instead b/131165718
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 9c98778..4387cc6 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -29,6 +29,7 @@
 #include "impl/ACameraCaptureSession.h"
 
 #include "impl/ACameraCaptureSession.inc"
+
 #include "NdkCameraCaptureSession.inc"
 
 using namespace android;
@@ -190,3 +191,38 @@
     }
     return session->updateOutputConfiguration(output);
 }
+
+EXPORT
+camera_status_t ACameraCaptureSession_setWindowPreparedCallback(
+        ACameraCaptureSession* session, void *context,
+        ACameraCaptureSession_prepareCallback cb) {
+    ATRACE_CALL();
+    if (session == nullptr || cb == nullptr) {
+        ALOGE("%s: Error: session %p / callback %p is null", __FUNCTION__, session, cb);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+    session->setWindowPreparedCallback(context, cb);
+    return ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACameraCaptureSession_prepareWindow(
+        ACameraCaptureSession* session,
+        ACameraWindowType *window) {
+    ATRACE_CALL();
+    if (session == nullptr || window == nullptr) {
+        ALOGE("%s: Error: session %p / window %p is null", __FUNCTION__, session, window);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+    return session->prepare(window);
+}
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index 691996b..8211671 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -22,18 +22,11 @@
 #include <utils/Trace.h>
 
 #include <camera/NdkCameraDevice.h>
+
 #include "impl/ACameraCaptureSession.h"
 
 using namespace android::acam;
 
-bool areWindowTypesEqual(ACameraWindowType *a, ACameraWindowType *b) {
-#ifdef __ANDROID_VNDK__
-    return utils::isWindowNativeHandleEqual(a, b);
-#else
-    return a == b;
-#endif
-}
-
 EXPORT
 camera_status_t ACameraDevice_close(ACameraDevice* device) {
     ATRACE_CALL();
@@ -183,14 +176,15 @@
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_OPERATION;
     }
-    if (areWindowTypesEqual(out->mWindow, window)) {
+    if (out->isWindowEqual(window)) {
         ALOGE("%s: Error trying to add the same window associated with the output configuration",
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    auto insert = out->mSharedWindows.insert(window);
-    camera_status_t ret = (insert.second) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
+
+    bool insert = out->addSharedWindow(window);
+    camera_status_t ret = (insert) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
     return ret;
 }
 
@@ -208,13 +202,13 @@
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_OPERATION;
     }
-    if (areWindowTypesEqual(out->mWindow, window)) {
+    if (out->isWindowEqual(window)) {
         ALOGE("%s: Error trying to remove the same window associated with the output configuration",
                 __FUNCTION__);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    auto remove = out->mSharedWindows.erase(window);
+    auto remove = out->removeSharedWindow(window);
     camera_status_t ret = (remove) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
     return ret;
 }
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 3d231a8..2de4a50 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -81,7 +81,7 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance().registerAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->registerAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
@@ -100,7 +100,7 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance().unregisterAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->unregisterAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
@@ -131,7 +131,7 @@
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
-    CameraManagerGlobal::getInstance().registerExtendedAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->registerExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
@@ -154,7 +154,7 @@
                callback->onCameraAccessPrioritiesChanged);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance().unregisterExtendedAvailabilityCallback(callback);
+    CameraManagerGlobal::getInstance()->unregisterExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index 68db233..73439c7 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -23,7 +23,11 @@
 
 ACameraCaptureSession::~ACameraCaptureSession() {
     ALOGV("~ACameraCaptureSession: %p notify device end of life", this);
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev != nullptr && !dev->isClosed()) {
         dev->lockDeviceForSessionOps();
         {
@@ -50,7 +54,11 @@
         mClosedByApp = true;
     }
 
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev != nullptr) {
         dev->lockDeviceForSessionOps();
     }
@@ -75,7 +83,11 @@
 
 camera_status_t
 ACameraCaptureSession::stopRepeating() {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -93,7 +105,11 @@
 
 camera_status_t
 ACameraCaptureSession::abortCaptures() {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -110,7 +126,11 @@
 }
 
 camera_status_t ACameraCaptureSession::updateOutputConfiguration(ACaptureSessionOutput *output) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -126,10 +146,35 @@
     return ret;
 }
 
+camera_status_t ACameraCaptureSession::prepare(ACameraWindowType* window) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
+    sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
+    if (dev == nullptr) {
+        ALOGE("Error: Device associated with session %p has been closed!", this);
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+
+    camera_status_t ret;
+    dev->lockDeviceForSessionOps();
+    {
+        Mutex::Autolock _l(mSessionLock);
+        ret = dev->prepareLocked(window);
+    }
+    dev->unlockDevice();
+    return ret;
+}
+
 ACameraDevice*
 ACameraCaptureSession::getDevice() {
     Mutex::Autolock _l(mSessionLock);
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return nullptr;
@@ -143,6 +188,17 @@
     mIsClosed = true;
 }
 
+#ifdef __ANDROID_VNDK__
+std::shared_ptr<acam::CameraDevice>
+ACameraCaptureSession::getDevicePtr() {
+    std::shared_ptr<acam::CameraDevice> device = mDevice.lock();
+    if (device == nullptr || device->isClosed()) {
+        ALOGW("Device is closed but session %d is not notified", mId);
+        return nullptr;
+    }
+    return device;
+}
+#else
 sp<acam::CameraDevice>
 ACameraCaptureSession::getDeviceSp() {
     sp<acam::CameraDevice> device = mDevice.promote();
@@ -152,5 +208,4 @@
     }
     return device;
 }
-
-
+#endif
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 08a9226..88135ba 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -47,6 +47,21 @@
         return mWindow > other.mWindow;
     }
 
+    inline bool isWindowEqual(ACameraWindowType* window) const {
+        return mWindow == window;
+    }
+
+    // returns true if the window was successfully added, false otherwise.
+    inline bool addSharedWindow(ACameraWindowType* window) {
+        auto ret = mSharedWindows.insert(window);
+        return ret.second;
+    }
+
+    // returns the number of elements removed.
+    inline size_t removeSharedWindow(ACameraWindowType* window) {
+        return mSharedWindows.erase(window);
+    }
+
     ACameraWindowType* mWindow;
     std::set<ACameraWindowType *> mSharedWindows;
     bool           mIsShared;
@@ -60,11 +75,31 @@
 };
 
 /**
+ * Capture session state callbacks used in {@link ACameraDevice_setPrepareCallbacks}
+ */
+typedef struct ACameraCaptureSession_prepareCallbacks {
+    /// optional application context. This will be passed in the context
+    /// parameter of the {@link onWindowPrepared} callback.
+    void*                               context;
+
+    ACameraCaptureSession_prepareCallback onWindowPrepared;
+} ACameraCaptureSession_prepareCallbacks;
+
+/**
  * ACameraCaptureSession opaque struct definition
  * Leave outside of android namespace because it's NDK struct
  */
 struct ACameraCaptureSession : public RefBase {
   public:
+#ifdef __ANDROID_VNDK__
+    ACameraCaptureSession(
+            int id,
+            const ACaptureSessionOutputContainer* outputs,
+            const ACameraCaptureSession_stateCallbacks* cb,
+            std::weak_ptr<android::acam::CameraDevice> device) :
+            mId(id), mOutput(*outputs), mUserSessionCallback(*cb),
+            mDevice(std::move(device)) {}
+#else
     ACameraCaptureSession(
             int id,
             const ACaptureSessionOutputContainer* outputs,
@@ -72,6 +107,7 @@
             android::acam::CameraDevice* device) :
             mId(id), mOutput(*outputs), mUserSessionCallback(*cb),
             mDevice(device) {}
+#endif
 
     // This can be called in app calling close() or after some app callback is finished
     // Make sure the caller does not hold device or session lock!
@@ -105,6 +141,14 @@
 
     camera_status_t updateOutputConfiguration(ACaptureSessionOutput *output);
 
+    void setWindowPreparedCallback(void *context,
+            ACameraCaptureSession_prepareCallback cb) {
+        Mutex::Autolock _l(mSessionLock);
+        mPreparedCb.context = context;
+        mPreparedCb.onWindowPrepared = cb;
+    }
+    camera_status_t prepare(ACameraWindowType *window);
+
     ACameraDevice* getDevice();
 
   private:
@@ -114,14 +158,24 @@
     // or a new session is replacing this session.
     void closeByDevice();
 
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<android::acam::CameraDevice> getDevicePtr();
+#else
     sp<android::acam::CameraDevice> getDeviceSp();
+#endif
 
     const int mId;
     const ACaptureSessionOutputContainer mOutput;
     const ACameraCaptureSession_stateCallbacks mUserSessionCallback;
+#ifdef __ANDROID_VNDK__
+    const std::weak_ptr<android::acam::CameraDevice> mDevice;
+#else
     const wp<android::acam::CameraDevice> mDevice;
+#endif
+
     bool  mIsClosed = false;
     bool  mClosedByApp = false;
+    ACameraCaptureSession_prepareCallbacks mPreparedCb;
     Mutex mSessionLock;
 };
 
diff --git a/camera/ndk/impl/ACameraCaptureSession.inc b/camera/ndk/impl/ACameraCaptureSession.inc
index 86bf8a5..da535f8 100644
--- a/camera/ndk/impl/ACameraCaptureSession.inc
+++ b/camera/ndk/impl/ACameraCaptureSession.inc
@@ -15,9 +15,8 @@
  */
 
 #include "ACameraCaptureSession.h"
-
 #ifdef __ANDROID_VNDK__
-#include "ndk_vendor/impl/ACameraDeviceVendor.inc"
+#include <ndk_vendor/impl/ACameraDeviceVendor.inc>
 #else
 #include "ACameraDevice.inc"
 #endif
@@ -30,7 +29,11 @@
         /*optional*/T* cbs,
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
@@ -52,7 +55,11 @@
         /*optional*/T* cbs,
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
+#ifdef __ANDROID_VNDK__
+    std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
     sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
     if (dev == nullptr) {
         ALOGE("Error: Device associated with session %p has been closed!", this);
         return ACAMERA_ERROR_SESSION_CLOSED;
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 7997768..1dae0f9 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -342,6 +342,58 @@
     return ACAMERA_OK;
 }
 
+camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+
+    if (window == nullptr) {
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    int32_t streamId = -1;
+    for (auto& kvPair : mConfiguredOutputs) {
+        if (window == kvPair.second.first) {
+            streamId = kvPair.first;
+            break;
+        }
+    }
+    if (streamId < 0) {
+        ALOGE("Error: Invalid output configuration");
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    auto remoteRet = mRemote->prepare(streamId);
+    if (!remoteRet.isOk()) {
+        // TODO:(b/259735869) Do this check for all other binder calls in the
+        // ndk as well.
+        if (remoteRet.exceptionCode() != EX_SERVICE_SPECIFIC) {
+            ALOGE("Camera device %s failed to prepare output window %p: %s", getId(), window,
+                    remoteRet.toString8().string());
+            return ACAMERA_ERROR_UNKNOWN;
+
+        }
+        switch (remoteRet.serviceSpecificErrorCode()) {
+            case hardware::ICameraService::ERROR_INVALID_OPERATION:
+                ALOGE("Camera device %s invalid operation: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_INVALID_OPERATION;
+                break;
+            case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+                ALOGE("Camera device %s invalid input argument: %s", getId(),
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_INVALID_PARAMETER;
+                break;
+            default:
+                ALOGE("Camera device %s failed to prepare output window %p: %s", getId(), window,
+                        remoteRet.toString8().string());
+                return ACAMERA_ERROR_UNKNOWN;
+        }
+    }
+
+    return ACAMERA_OK;
+}
+
 camera_status_t
 CameraDevice::allocateCaptureRequest(
         const ACaptureRequest* request, /*out*/sp<CaptureRequest>& outReq) {
@@ -919,6 +971,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
             ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
             break;
         case kWhatCleanUpSessions:
@@ -992,6 +1045,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
         {
             sp<RefBase> obj;
             found = msg->findObject(kSessionSpKey, &obj);
@@ -1034,6 +1088,26 @@
                     (*onState)(context, session.get());
                     break;
                 }
+                case kWhatPreparedCb:
+                {
+                    ACameraCaptureSession_prepareCallback onWindowPrepared;
+                    found = msg->findPointer(kCallbackFpKey, (void**) &onWindowPrepared);
+                    if (!found) {
+                        ALOGE("%s: Cannot find window prepared callback!", __FUNCTION__);
+                        return;
+                    }
+                    if (onWindowPrepared == nullptr) {
+                        return;
+                    }
+                    ACameraWindowType* anw;
+                    found = msg->findPointer(kAnwKey, (void**) &anw);
+                    if (!found) {
+                        ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
+                        return;
+                    }
+                    (*onWindowPrepared)(context, anw, session.get());
+                    break;
+                }
                 case kWhatCaptureStart:
                 {
                     ACameraCaptureSession_captureCallback_start onStart;
@@ -1412,7 +1486,6 @@
     while (it != mSequenceLastFrameNumberMap.end()) {
         int sequenceId = it->first;
         int64_t lastFrameNumber = it->second.lastFrameNumber;
-        bool hasCallback = true;
 
         if (mRemote == nullptr) {
             ALOGW("Camera %s closed while checking sequence complete", getId());
@@ -1425,7 +1498,6 @@
             // This should not happen because we always register callback (with nullptr inside)
             if (mSequenceCallbackMap.count(sequenceId) == 0) {
                 ALOGW("No callback found for sequenceId %d", sequenceId);
-                hasCallback = false;
             }
 
             if (lastFrameNumber <= completedFrameNumber) {
@@ -1731,8 +1803,36 @@
 }
 
 binder::Status
-CameraDevice::ServiceCallback::onPrepared(int) {
-    // Prepare not yet implemented in NDK
+CameraDevice::ServiceCallback::onPrepared(int streamId) {
+    ALOGV("%s: callback for stream id %d", __FUNCTION__, streamId);
+    binder::Status ret = binder::Status::ok();
+    sp<CameraDevice> dev = mDevice.promote();
+    if (dev == nullptr) {
+        return ret; // device has been closed
+    }
+    Mutex::Autolock _l(dev->mDeviceLock);
+    if (dev->isClosed() || dev->mRemote == nullptr) {
+        return ret;
+    }
+    auto it = dev->mConfiguredOutputs.find(streamId);
+    if (it == dev->mConfiguredOutputs.end()) {
+        ALOGE("%s: stream id %d does not exist", __FUNCTION__ , streamId);
+        return ret;
+    }
+    sp<ACameraCaptureSession> session = dev->mCurrentSession.promote();
+    if (session == nullptr) {
+        ALOGE("%s: Session is dead already", __FUNCTION__ );
+        return ret;
+    }
+    // We've found the window corresponding to the surface id.
+    ACameraWindowType *window = it->second.first;
+    sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
+    msg->setPointer(kContextKey, session->mPreparedCb.context);
+    msg->setPointer(kAnwKey, window);
+    msg->setObject(kSessionSpKey, session);
+    msg->setPointer(kCallbackFpKey, (void *)session->mPreparedCb.onWindowPrepared);
+    dev->postSessionMsgAndCleanup(msg);
+
     return binder::Status::ok();
 }
 
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 17988fe..382d6ce 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -151,6 +151,8 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
+    camera_status_t prepareLocked(ACameraWindowType *window);
+
     camera_status_t allocateCaptureRequest(
             const ACaptureRequest* request, sp<CaptureRequest>& outReq);
 
@@ -222,7 +224,8 @@
         kWhatLogicalCaptureFail, // onLogicalCameraCaptureFailed
         kWhatCaptureSeqEnd,    // onCaptureSequenceCompleted
         kWhatCaptureSeqAbort,  // onCaptureSequenceAborted
-        kWhatCaptureBufferLost,// onCaptureBufferLost
+        kWhatCaptureBufferLost, // onCaptureBufferLost
+        kWhatPreparedCb, // onWindowPrepared
         // Internal cleanup
         kWhatCleanUpSessions   // Cleanup cached sp<ACameraCaptureSession>
     };
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 5892f1a..837b5be 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -38,17 +38,16 @@
 const char* CameraManagerGlobal::kContextKey    = "CallbackContext";
 const nsecs_t CameraManagerGlobal::kCallbackDrainTimeout = 5000000; // 5 ms
 Mutex                CameraManagerGlobal::sLock;
-CameraManagerGlobal* CameraManagerGlobal::sInstance = nullptr;
+wp<CameraManagerGlobal> CameraManagerGlobal::sInstance = nullptr;
 
-CameraManagerGlobal&
-CameraManagerGlobal::getInstance() {
+sp<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
     Mutex::Autolock _l(sLock);
-    CameraManagerGlobal* instance = sInstance;
+    sp<CameraManagerGlobal> instance = sInstance.promote();
     if (instance == nullptr) {
         instance = new CameraManagerGlobal();
         sInstance = instance;
     }
-    return *instance;
+    return instance;
 }
 
 CameraManagerGlobal::~CameraManagerGlobal() {
@@ -637,7 +636,7 @@
     Mutex::Autolock _l(mLock);
 
     std::vector<String8> idList;
-    CameraManagerGlobal::getInstance().getCameraIdList(&idList);
+    CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
 
     int numCameras = idList.size();
     ACameraIdList *out = new ACameraIdList;
@@ -687,15 +686,16 @@
         const char* cameraIdStr, sp<ACameraMetadata>* characteristics) {
     Mutex::Autolock _l(mLock);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
+
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
     binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr),
-            targetSdkVersion, &rawMetadata);
+            targetSdkVersion, /*overrideToPortrait*/false, &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
             case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -732,7 +732,7 @@
 
     ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         delete device;
@@ -747,7 +747,7 @@
     binder::Status serviceRet = cs->connectDevice(
             callbacks, String16(cameraId), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
-            targetSdkVersion, /*out*/&deviceRemote);
+            targetSdkVersion, /*overrideToPortrait*/false, /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
         ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().string());
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index d53d809..0dd79da 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -46,7 +46,7 @@
  */
 class CameraManagerGlobal final : public RefBase {
   public:
-    static CameraManagerGlobal& getInstance();
+    static sp<CameraManagerGlobal> getInstance();
     sp<hardware::ICameraService> getCameraService();
 
     void registerAvailabilityCallback(
@@ -257,7 +257,7 @@
 
     // For the singleton instance
     static Mutex sLock;
-    static CameraManagerGlobal* sInstance;
+    static wp<CameraManagerGlobal> sInstance;
     CameraManagerGlobal() {};
     ~CameraManagerGlobal();
 };
@@ -271,7 +271,7 @@
  */
 struct ACameraManager {
     ACameraManager() :
-            mGlobalManager(&(android::acam::CameraManagerGlobal::getInstance())) {}
+            mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
     ~ACameraManager();
     camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
     static void     deleteCameraIdList(ACameraIdList* cameraIdList);
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 05124c0..4995dc4 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -537,6 +537,8 @@
         case ACAMERA_CONTROL_ENABLE_ZSL:
         case ACAMERA_CONTROL_EXTENDED_SCENE_MODE:
         case ACAMERA_CONTROL_ZOOM_RATIO:
+        case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
+        case ACAMERA_CONTROL_AUTOFRAMING:
         case ACAMERA_EDGE_MODE:
         case ACAMERA_FLASH_MODE:
         case ACAMERA_HOT_PIXEL_MODE:
@@ -585,6 +587,7 @@
     ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
     ANDROID_CONTROL_AE_PRECAPTURE_ID,
     ANDROID_CONTROL_AF_TRIGGER_ID,
+    ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
     ANDROID_DEMOSAIC_MODE,
     ANDROID_EDGE_STRENGTH,
     ANDROID_FLASH_FIRING_POWER,
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index b0fd00c..099c5c5 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -99,6 +99,34 @@
     ACameraCaptureSession_stateCallback onActive;
 } ACameraCaptureSession_stateCallbacks;
 
+/**
+ * The definition of camera capture session onWindowPrepared callback.
+ *
+ * <p>This callback is called when the buffer pre-allocation for an output window Surface is
+ * complete. </p>
+ *
+ * <p>Buffer pre-allocation for an output window is started by
+ * {@link ACameraCaptureSession_prepare}
+ * call. While allocation is underway, the output must not be used in a capture request.
+ * Once this callback is called, the output provided can be used as a target for a
+ * capture request. In case of an error during pre-allocation (such as running out of
+ * suitable-memory), this callback is still invoked after the error is encountered, though some
+ * buffers may not have been successfully pre-allocated.</p>
+ *
+ * Introduced in API 34.
+ *
+ * @param context The optional app-provided context pointer that was included in
+ *        the {@link ACameraCaptureSession_setWindowPreparedCallback} method
+ *        call.
+ * @param window The window that {@link ACameraCaptureSession_prepare} was called on.
+ * @param session The camera capture session on which {@link ACameraCaptureSession_prepare} was
+ *                called on.
+ */
+typedef void (*ACameraCaptureSession_prepareCallback)(
+        void *context,
+        ACameraWindowType *window,
+        ACameraCaptureSession *session);
+
 /// Enum for describing error reason in {@link ACameraCaptureFailure}
 enum {
     /**
@@ -165,7 +193,7 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param timestamp The timestamp when the capture is started. This timestmap will match
+ * @param timestamp The timestamp when the capture is started. This timestamp will match
  *                  {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
  *                  {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
  */
@@ -200,7 +228,7 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param failure The {@link ACameraCaptureFailure} desribes the capture failure. The memory is
+ * @param failure The {@link ACameraCaptureFailure} describes the capture failure. The memory is
  *                managed by camera framework. Do not access this pointer after this callback
  *                returns.
  */
@@ -412,7 +440,7 @@
  * and any repeating requests are stopped (as if {@link ACameraCaptureSession_stopRepeating} was
  * called). However, any in-progress capture requests submitted to the session will be completed as
  * normal; once all captures have completed and the session has been torn down,
- * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback will be called and the seesion
+ * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback will be called and the session
  * will be removed from memory.</p>
  *
  * <p>Closing a session is idempotent; closing more than once has no effect.</p>
@@ -499,7 +527,7 @@
  *
  * <p>Repeating burst requests are a simple way for an application to
  * maintain a preview or other continuous stream of frames where each
- * request is different in a predicatable way, without having to continually
+ * request is different in a predictable way, without having to continually
  * submit requests through {@link ACameraCaptureSession_capture}.</p>
  *
  * <p>To stop the repeating capture, call {@link ACameraCaptureSession_stopRepeating}. Any
@@ -710,7 +738,7 @@
  *                capture request sent by application, so the address is different to what
  *                application sent but the content will match. This request will be freed by
  *                framework immediately after this callback returns.
- * @param failure The {@link ALogicalCameraCaptureFailure} desribes the capture failure. The memory
+ * @param failure The {@link ALogicalCameraCaptureFailure} describes the capture failure. The memory
  *                is managed by camera framework. Do not access this pointer after this callback
  *                returns.
  */
@@ -989,6 +1017,92 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
 
+/**
+ * Set the callback that is called when the output window for which the client has requested
+ * pre-allocation of buffers through the {@link ACameraCaptureSession_prepareWindow} call has
+ * completed the pre-allocation of buffers.
+ * @param session the ACameraCaptureSession on which ACameraCaptureSession_prepareWindow was called.
+ * @param context optional application provided context. This will be passed into the context
+ *        parameter of the {@link onWindowPrepared} callback.
+ * @param callback the callback to be called when the output window's buffer pre-allocation is
+ *        complete.
+ * @return <ul><li> {@link ACAMERA_OK} if the method succeeds</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or callbacks is
+ *              NULL. Or if the session has not been configured with the window</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} the camera service encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
+ */
+camera_status_t ACameraCaptureSession_setWindowPreparedCallback(
+    ACameraCaptureSession* session,
+    void *context,
+    ACameraCaptureSession_prepareCallback callback) __INTRODUCED_IN(34);
+
+/**
+ *
+ * <p>Pre-allocate all buffers for an output window.</p>
+ *
+ * <p>Normally, the image buffers for a given output window are allocated on-demand,
+ * to minimize startup latency and memory overhead.</p>
+ *
+ * <p>However, in some cases, it may be desirable for the buffers to be allocated before
+ * any requests targeting the window are actually submitted to the device. Large buffers
+ * may take some time to allocate, which can result in delays in submitting requests until
+ * sufficient buffers are allocated to reach steady-state behavior. Such delays can cause
+ * bursts to take longer than desired, or cause skips or stutters in preview output.</p>
+ *
+ * <p>The ACameraCaptureSession_prepare() call can be used to perform this pre-allocation.
+ * It may only be called for a given output window before that window is used as a target for a
+ * request. The number of buffers allocated is the sum of the count needed by the consumer providing
+ * the output window, and the maximum number needed by the camera device to fill its pipeline.
+ * Since this may be a larger number than what is actually required for steady-state operation,
+ * using this call may result in higher memory consumption than the normal on-demand behavior
+ * results in. This method will also delay the time to first output to a given Surface, in exchange
+ * for smoother frame rate once the allocation is complete.</p>
+ *
+ * <p>For example, an application that creates an
+ * {@link AImageReader} with a maxImages argument of 10,
+ * but only uses 3 simultaneous {@link AImage}s at once, would normally only cause those 3 images
+ * to be allocated (plus what is needed by the camera device for smooth operation).  But using
+ * ACameraCaptureSession_prepare() on the {@link AImageReader}'s window will result in all 10
+ * {@link AImage}s being allocated. So applications using this method should take care to request
+ * only the number of buffers actually necessary for their application.</p>
+ *
+ * <p>If the same output window is used in consecutive sessions (without closing the first
+ * session explicitly), then its already-allocated buffers are carried over, and if it was
+ * used as a target of a capture request in the first session, prepare cannot be called on it
+ * in the second session. If it is, {@link ACAMERA_ERROR_INVALID_PARAMETER} will
+ * be returned by the method</p>
+ *
+ * <p>Once allocation is complete, {@link ACameraCaptureSession_prepareCallback#onWindowPrepared}
+ * will be invoked with the output provided to this method. Between the prepare call and the
+ * {@link ACameraCaptureSession_prepareCallback#onWindowPrepared} call,
+ * the output provided to prepare must not be used as a target of a capture request submitted
+ * to this session.</p>
+ *
+ * <p>{@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}
+ * devices cannot pre-allocate output buffers; for those devices,
+ * {@link ACameraCaptureSession_prepareCallback#onWindowPrepared} will be immediately called,
+ * and no pre-allocation is done.</p>
+ *
+ * @param session the {@link ACameraCaptureSession} that needs to prepare output buffers.
+ * @param window the {@link ACameraWindowType} for which the output buffers need to be prepared.
+ *
+ * @return <ul><li>
+ *             {@link ACAMERA_OK} if the method succeeds</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session/ window is
+ *              NULL. Or if the session has not been configured with the window</li>
+ *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
+ */
+camera_status_t ACameraCaptureSession_prepareWindow(
+    ACameraCaptureSession* session,
+    ACameraWindowType *window) __INTRODUCED_IN(34);
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 7be4bd3..de10eb3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -113,7 +113,7 @@
  * @param context The optional context in {@link ACameraDevice_StateCallbacks} will be
  *                passed to this callback.
  * @param device The {@link ACameraDevice} that is being disconnected.
- * @param error The error code describes the cause of this error callback. See the folowing
+ * @param error The error code describes the cause of this error callback. See the following
  *              links for more detail.
  *
  * @see ERROR_CAMERA_IN_USE
@@ -447,8 +447,8 @@
  *   returned by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
  *   before creating a Surface from the SurfaceTexture with <a href=
  *   "http://developer.android.com/reference/android/view/Surface.html#Surface(android.graphics.SurfaceTexture)">
- *   Surface\#Surface(SurfaceTextrue)</a>. If the size is not set by the application, it will be set to be the
- *   smallest supported size less than 1080p, by the camera device.</li>
+ *   Surface\#Surface(SurfaceTexture)</a>. If the size is not set by the application, it will be
+ *   set to be the smallest supported size less than 1080p, by the camera device.</li>
  *
  * <li>For recording with <a href=
  *     "http://developer.android.com/reference/android/media/MediaCodec.html">
@@ -470,18 +470,6 @@
  *   <a href="http://developer.android.com/reference/android/media/CamcorderProfile.html">
  *    CamcorderProfiles</a>.</li>
  *
- * <li>For efficient YUV processing with <a href=
- *   "http://developer.android.com/reference/android/renderscript/package-summary.html">
- *   RenderScript</a>:
- *   Create a RenderScript
- *   <a href="http://developer.android.com/reference/android/renderscript/Allocation.html">
- *   Allocation</a> with a supported YUV
- *   type, the IO_INPUT flag, and one of the YUV output sizes returned by
- *   {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS},
- *   Then obtain the Surface with
- *   <a href="http://developer.android.com/reference/android/renderscript/Allocation.html#getSurface()">
- *   Allocation#getSurface}</a>.</li>
- *
  * <li>For access to RAW, uncompressed YUV, or compressed JPEG data in the application: Create an
  *   {@link AImageReader} object using the {@link AImageReader_new} method with one of the supported
  *   output formats given by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}. Then obtain a
@@ -599,7 +587,7 @@
  * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
- * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processing.</td> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>Video recording with maximum-size video snapshot</td> </tr>
  * <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Standard video recording plus maximum-resolution in-app processing.</td> </tr>
  * <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Preview plus two-input maximum-resolution in-app processing.</td> </tr>
@@ -641,7 +629,7 @@
  * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
  * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
- * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution two-input in-app processing.</td> </tr>
  * </table><br>
  * </p>
  *
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 26db7f2..88063d6 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -97,7 +97,7 @@
     ACAMERA_ERROR_CAMERA_SERVICE        = ACAMERA_ERROR_BASE - 6,
 
     /**
-     * The {@link ACameraCaptureSession} has been closed and cannnot perform any operation other
+     * The {@link ACameraCaptureSession} has been closed and cannot perform any operation other
      * than {@link ACameraCaptureSession_close}.
      */
     ACAMERA_ERROR_SESSION_CLOSED        = ACAMERA_ERROR_BASE - 7,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 729182e..b4f3bf1 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -209,7 +209,8 @@
  * Query the capabilities of a camera device. These capabilities are
  * immutable for a given camera.
  *
- * <p>See {@link ACameraMetadata} document and {@link NdkCameraMetadataTags.h} for more details.</p>
+ * <p>See {@link ACameraMetadata} document and <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details.</p>
  *
  * <p>The caller must call {@link ACameraMetadata_free} to free the memory of the output
  * characteristics.</p>
@@ -217,7 +218,7 @@
  * @param manager the {@link ACameraManager} of interest.
  * @param cameraId the ID string of the camera device of interest.
  * @param characteristics the output {@link ACameraMetadata} will be filled here if the method call
- *        succeeeds.
+ *        succeeds.
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index b331d50..cf29736 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -96,9 +96,12 @@
     /**
      * The tag identifying the entry.
      *
-     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * <p> It is one of the values defined in
+     * <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * , and defines how the
      * entry should be interpreted and which parts of the API provide it.
-     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * for more details. </p>
      */
     uint32_t tag;
 
@@ -141,9 +144,11 @@
     /**
      * The tag identifying the entry.
      *
-     * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+     * <p> It is one of the values defined in <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * , and defines how the
      * entry should be interpreted and which parts of the API provide it.
-     * See {@link NdkCameraMetadataTags.h} for more details. </p>
+     * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+     * for more details. </p>
      */
     uint32_t tag;
 
@@ -185,7 +190,7 @@
  * @param metadata the {@link ACameraMetadata} of interest.
  * @param tag the tag value of the camera metadata entry to be get.
  * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
- *        call succeeeds.
+ *        call succeeds.
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index b6f8552..bd679e5 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -74,6 +74,8 @@
     ACAMERA_HEIC_INFO,
     ACAMERA_AUTOMOTIVE,
     ACAMERA_AUTOMOTIVE_LENS,
+    ACAMERA_EXTENSION,
+    ACAMERA_JPEGR,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -119,6 +121,8 @@
     ACAMERA_HEIC_INFO_START        = ACAMERA_HEIC_INFO         << 16,
     ACAMERA_AUTOMOTIVE_START       = ACAMERA_AUTOMOTIVE        << 16,
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
+    ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
+    ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -541,7 +545,9 @@
      * mode.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability,
+     * capability or devices where
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>
      * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -750,7 +756,10 @@
      * mode.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * capability or devices where
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -953,7 +962,10 @@
      * mode.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * capability or devices where
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -2044,6 +2056,175 @@
      */
     ACAMERA_CONTROL_ZOOM_RATIO =                                // float
             ACAMERA_CONTROL_START + 47,
+    /**
+     * <p>The desired CaptureRequest settings override with which certain keys are
+     * applied earlier so that they can take effect sooner.</p>
+     *
+     * <p>Type: int32 (acamera_metadata_enum_android_control_settings_override_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>There are some CaptureRequest keys which can be applied earlier than others
+     * when controls within a CaptureRequest aren't required to take effect at the same time.
+     * One such example is zoom. Zoom can be applied at a later stage of the camera pipeline.
+     * As soon as the camera device receives the CaptureRequest, it can apply the requested
+     * zoom value onto an earlier request that's already in the pipeline, thus improves zoom
+     * latency.</p>
+     * <p>This key's value in the capture result reflects whether the controls for this capture
+     * are overridden "by" a newer request. This means that if a capture request turns on
+     * settings override, the capture result of an earlier request will contain the key value
+     * of ZOOM. On the other hand, if a capture request has settings override turned on,
+     * but all newer requests have it turned off, the key's value in the capture result will
+     * be OFF because this capture isn't overridden by a newer capture. In the two examples
+     * below, the capture results columns illustrate the settingsOverride values in different
+     * scenarios.</p>
+     * <p>Assuming the zoom settings override can speed up by 1 frame, below example illustrates
+     * the speed-up at the start of capture session:</p>
+     * <pre><code>Camera session created
+     * Request 1 (zoom=1.0x, override=ZOOM) -&gt;
+     * Request 2 (zoom=1.2x, override=ZOOM) -&gt;
+     * Request 3 (zoom=1.4x, override=ZOOM) -&gt;  Result 1 (zoom=1.2x, override=ZOOM)
+     * Request 4 (zoom=1.6x, override=ZOOM) -&gt;  Result 2 (zoom=1.4x, override=ZOOM)
+     * Request 5 (zoom=1.8x, override=ZOOM) -&gt;  Result 3 (zoom=1.6x, override=ZOOM)
+     *                                      -&gt;  Result 4 (zoom=1.8x, override=ZOOM)
+     *                                      -&gt;  Result 5 (zoom=1.8x, override=OFF)
+     * </code></pre>
+     * <p>The application can turn on settings override and use zoom as normal. The example
+     * shows that the later zoom values (1.2x, 1.4x, 1.6x, and 1.8x) overwrite the zoom
+     * values (1.0x, 1.2x, 1.4x, and 1.8x) of earlier requests (#1, #2, #3, and #4).</p>
+     * <p>The application must make sure the settings override doesn't interfere with user
+     * journeys requiring simultaneous application of all controls in CaptureRequest on the
+     * requested output targets. For example, if the application takes a still capture using
+     * CameraCaptureSession#capture, and the repeating request immediately sets a different
+     * zoom value using override, the inflight still capture could have its zoom value
+     * overwritten unexpectedly.</p>
+     * <p>So the application is strongly recommended to turn off settingsOverride when taking
+     * still/burst captures, and turn it back on when there is only repeating viewfinder
+     * request and no inflight still/burst captures.</p>
+     * <p>Below is the example demonstrating the transitions in and out of the
+     * settings override:</p>
+     * <pre><code>Request 1 (zoom=1.0x, override=OFF)
+     * Request 2 (zoom=1.2x, override=OFF)
+     * Request 3 (zoom=1.4x, override=ZOOM)  -&gt; Result 1 (zoom=1.0x, override=OFF)
+     * Request 4 (zoom=1.6x, override=ZOOM)  -&gt; Result 2 (zoom=1.4x, override=ZOOM)
+     * Request 5 (zoom=1.8x, override=OFF)   -&gt; Result 3 (zoom=1.6x, override=ZOOM)
+     *                                       -&gt; Result 4 (zoom=1.6x, override=OFF)
+     *                                       -&gt; Result 5 (zoom=1.8x, override=OFF)
+     * </code></pre>
+     * <p>This example shows that:</p>
+     * <ul>
+     * <li>The application "ramps in" settings override by setting the control to ZOOM.
+     * In the example, request #3 enables zoom settings override. Because the camera device
+     * can speed up applying zoom by 1 frame, the outputs of request #2 has 1.4x zoom, the
+     * value specified in request #3.</li>
+     * <li>The application "ramps out" of settings override by setting the control to OFF. In
+     * the example, request #5 changes the override to OFF. Because request #4's zoom
+     * takes effect in result #3, result #4's zoom remains the same until new value takes
+     * effect in result #5.</li>
+     * </ul>
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE =                         // int32 (acamera_metadata_enum_android_control_settings_override_t)
+            ACAMERA_CONTROL_START + 49,
+    /**
+     * <p>List of available settings overrides supported by the camera device that can
+     * be used to speed up certain controls.</p>
+     *
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>When not all controls within a CaptureRequest are required to take effect
+     * at the same time on the outputs, the camera device may apply certain request keys sooner
+     * to improve latency. This list contains such supported settings overrides. Each settings
+     * override corresponds to a set of CaptureRequest keys that can be sped up when applying.</p>
+     * <p>A supported settings override can be passed in via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html#CONTROL_SETTINGS_OVERRIDE">CaptureRequest#CONTROL_SETTINGS_OVERRIDE</a>, and the
+     * CaptureRequest keys corresponding to the override are applied as soon as possible, not
+     * bound by per-frame synchronization. See ACAMERA_CONTROL_SETTINGS_OVERRIDE for the
+     * CaptureRequest keys for each override.</p>
+     * <p>OFF is always included in this list.</p>
+     *
+     * @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
+     */
+    ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES =              // int32[n]
+            ACAMERA_CONTROL_START + 50,
+    /**
+     * <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     *   <li>ACaptureRequest</li>
+     * </ul></p>
+     *
+     * <p>Auto-framing is a special mode provided by the camera device to dynamically crop, zoom
+     * or pan the camera feed to try to ensure that the people in a scene occupy a reasonable
+     * portion of the viewport. It is primarily designed to support video calling in
+     * situations where the user isn't directly in front of the device, especially for
+     * wide-angle cameras.
+     * ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO in CaptureResult will be used
+     * to denote the coordinates of the auto-framed region.
+     * Zoom and video stabilization controls are disabled when auto-framing is enabled. The 3A
+     * regions must map the screen coordinates into the scaler crop returned from the capture
+     * result instead of using the active array sensor.</p>
+     *
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_AUTOFRAMING =                               // byte (acamera_metadata_enum_android_control_autoframing_t)
+            ACAMERA_CONTROL_START + 52,
+    /**
+     * <p>Whether the camera device supports ACAMERA_CONTROL_AUTOFRAMING.</p>
+     *
+     * @see ACAMERA_CONTROL_AUTOFRAMING
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_available_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Will be <code>false</code> if auto-framing is not available.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE =                     // byte (acamera_metadata_enum_android_control_autoframing_available_t)
+            ACAMERA_CONTROL_START + 53,
+    /**
+     * <p>Current state of auto-framing.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_control_autoframing_state_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>When the camera doesn't have auto-framing available (i.e
+     * <code>ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE</code> == false) or it is not enabled (i.e
+     * <code>ACAMERA_CONTROL_AUTOFRAMING</code> == OFF), the state will always be INACTIVE.
+     * Other states indicate the current auto-framing state:</p>
+     * <ul>
+     * <li>When <code>ACAMERA_CONTROL_AUTOFRAMING</code> is set to ON, auto-framing will take
+     * place. While the frame is aligning itself to center the object (doing things like
+     * zooming in, zooming out or pan), the state will be FRAMING.</li>
+     * <li>When field of view is not being adjusted anymore and has reached a stable state, the
+     * state will be CONVERGED.</li>
+     * </ul>
+     *
+     * @see ACAMERA_CONTROL_AUTOFRAMING
+     * @see ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE =                         // byte (acamera_metadata_enum_android_control_autoframing_state_t)
+            ACAMERA_CONTROL_START + 54,
     ACAMERA_CONTROL_END,
 
     /**
@@ -2198,6 +2379,10 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#turnOnTorchWithStrengthLevel">CameraManager#turnOnTorchWithStrengthLevel</a>.
      * If this value is equal to 1, flashlight brightness control is not supported.
      * The value for this key will be null for devices with no flash unit.</p>
+     * <p>The maximum value is guaranteed to be safe to use for an indefinite duration in
+     * terms of device flashlight lifespan, but may be too bright for comfort for many
+     * use cases. Use the default torch brightness value to avoid problems with an
+     * over-bright flashlight.</p>
      */
     ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL =                 // int32
             ACAMERA_FLASH_INFO_START + 2,
@@ -3516,6 +3701,26 @@
      */
     ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP =      // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
             ACAMERA_REQUEST_START + 19,
+    /**
+     * <p>A list of all possible color space profiles supported by a camera device.</p>
+     *
+     * <p>Type: int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A color space profile is a combination of a color space, an image format, and a dynamic range
+     * profile. If a camera does not support the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT</a>
+     * capability, the dynamic range profile will always be
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>. Camera clients can
+     * use <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a> to select
+     * a color space.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP =        // int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)
+            ACAMERA_REQUEST_START + 21,
     ACAMERA_REQUEST_END,
 
     /**
@@ -3543,9 +3748,9 @@
      * <p>Output streams use this rectangle to produce their output, cropping to a smaller region
      * if necessary to maintain the stream's aspect ratio, then scaling the sensor input to
      * match the output's configured resolution.</p>
-     * <p>The crop region is applied after the RAW to other color space (e.g. YUV)
-     * conversion. Since raw streams (e.g. RAW16) don't have the conversion stage, they are not
-     * croppable. The crop region will be ignored by raw streams.</p>
+     * <p>The crop region is usually applied after the RAW to other color space (e.g. YUV)
+     * conversion. As a result RAW streams are not croppable unless supported by the
+     * camera device. See ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES#CROPPED_RAW for details.</p>
      * <p>For non-raw streams, any additional per-stream cropping will be done to maximize the
      * final pixel area of the stream.</p>
      * <p>For example, if the crop region is set to a 4:3 aspect ratio, then 4:3 streams will use
@@ -3626,7 +3831,9 @@
      * ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
      * <p>For camera devices with the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
+     * <p>ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
      * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
      * coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -3636,6 +3843,7 @@
      * @see ACAMERA_CONTROL_ZOOM_RATIO
      * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
+     * @see ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES
      * @see ACAMERA_SCALER_CROPPING_TYPE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
@@ -4279,8 +4487,8 @@
      * <p>The guaranteed stream combinations related to stream use case for a camera device with
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
      * capability is documented in the camera device
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a>. The
-     * application is strongly recommended to use one of the guaranteed stream combinations.
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">guideline</a>. The application is strongly recommended to use one of the guaranteed stream
+     * combinations.
      * If the application creates a session with a stream combination not in the guaranteed
      * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
      * the camera device may ignore some stream use cases due to hardware constraints
@@ -4291,6 +4499,59 @@
      */
     ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES =                 // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
             ACAMERA_SCALER_START + 25,
+    /**
+     * <p>The region of the sensor that corresponds to the RAW read out for this
+     * capture when the stream use case of a RAW stream is set to CROPPED_RAW.</p>
+     *
+     * <p>Type: int32[4]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+     * </ul></p>
+     *
+     * <p>The coordinate system follows that of ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.</p>
+     * <p>This CaptureResult key will be set when the corresponding CaptureRequest has a RAW target
+     * with stream use case set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW">CameraMetadata#SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW</a>,
+     * otherwise it will be {@code null}.
+     * The value of this key specifies the region of the sensor used for the RAW capture and can
+     * be used to calculate the corresponding field of view of RAW streams.
+     * This field of view will always be &gt;= field of view for (processed) non-RAW streams for the
+     * capture. Note: The region specified may not necessarily be centered.</p>
+     * <p>For example: Assume a camera device has a pre correction active array size of
+     * {@code {0, 0, 1500, 2000}}. If the RAW_CROP_REGION is {@code {500, 375, 1500, 1125}}, that
+     * corresponds to a centered crop of 1/4th of the full field of view RAW stream.</p>
+     * <p>The metadata keys which describe properties of RAW frames:</p>
+     * <ul>
+     * <li>ACAMERA_STATISTICS_HOT_PIXEL_MAP</li>
+     * <li>android.statistics.lensShadingCorrectionMap</li>
+     * <li>ACAMERA_LENS_DISTORTION</li>
+     * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+     * <li>ACAMERA_LENS_POSE_ROTATION</li>
+     * <li>ACAMERA_LENS_DISTORTION</li>
+     * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
+     * </ul>
+     * <p>should be interpreted in the effective after raw crop field-of-view coordinate system.
+     * In this coordinate system,
+     * {preCorrectionActiveArraySize.left, preCorrectionActiveArraySize.top} corresponds to the
+     * the top left corner of the cropped RAW frame and
+     * {preCorrectionActiveArraySize.right, preCorrectionActiveArraySize.bottom} corresponds to
+     * the bottom right corner. Client applications must use the values of the keys
+     * in the CaptureResult metadata if present.</p>
+     * <p>Crop regions (android.scaler.CropRegion), AE/AWB/AF regions and face coordinates still
+     * use the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate system as usual.</p>
+     *
+     * @see ACAMERA_LENS_DISTORTION
+     * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
+     */
+    ACAMERA_SCALER_RAW_CROP_REGION =                            // int32[4]
+            ACAMERA_SCALER_START + 26,
     ACAMERA_SCALER_END,
 
     /**
@@ -5113,13 +5374,10 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
      * When operating in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
-     * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability would typically perform pixel binning in order to improve low light
+     * would typically perform pixel binning in order to improve low light
      * performance, noise reduction etc. However, in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
-     * mode (supported only
-     * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
+     * mode, sensors typically operate in unbinned mode allowing for a larger image size.
      * The stream configurations supported in
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
      * mode are also different from those of
@@ -5133,7 +5391,36 @@
      * <code>android.scaler.streamConfigurationMap</code>
      * must not be mixed in the same CaptureRequest. In other words, these outputs are
      * exclusive to each other.
-     * This key does not need to be set for reprocess requests.</p>
+     * This key does not need to be set for reprocess requests.
+     * This key will be be present on devices supporting the
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability. It may also be present on devices which do not support the aforementioned
+     * capability. In that case:</p>
+     * <ul>
+     * <li>
+     * <p>The mandatory stream combinations listed in
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
+     *   would not apply.</p>
+     * </li>
+     * <li>
+     * <p>The bayer pattern of {@code RAW} streams when
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+     *   is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.</p>
+     * </li>
+     * <li>
+     * <p>The following keys will always be present:</p>
+     * <ul>
+     * <li>android.scaler.streamConfigurationMapMaximumResolution</li>
+     * <li>ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+     * <li>ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+     * <li>ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+     * </ul>
+     * </li>
+     * </ul>
+     *
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      */
     ACAMERA_SENSOR_PIXEL_MODE =                                 // byte (acamera_metadata_enum_android_sensor_pixel_mode_t)
             ACAMERA_SENSOR_START + 32,
@@ -5478,7 +5765,8 @@
      * counterparts.
      * This key will only be present for devices which advertise the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -5510,7 +5798,8 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
      *
      * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
      * @see ACAMERA_SENSOR_PIXEL_MODE
@@ -5538,7 +5827,8 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
      * This key will only be present for devices which advertise the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
@@ -5563,12 +5853,27 @@
      * to improve various aspects of imaging such as noise reduction, low light
      * performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
      * 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
-     * the same color filter.</p>
-     * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
-     * will have a regular bayer pattern.</p>
-     * <p>This key will not be present for sensors which don't have the
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
-     * capability.</p>
+     * the same color filter.
+     * In case the device has the
+     * <a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability :</p>
+     * <ul>
+     * <li>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW
+     *   images will have a regular bayer pattern.</li>
+     * </ul>
+     * <p>In case the device does not have the
+     * <a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+     * capability :</p>
+     * <ul>
+     * <li>This key will be present if
+     *   <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+     *   lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>, since RAW
+     *   images may not necessarily have a regular bayer pattern when
+     *   <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a> is set to
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</li>
+     * </ul>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
      */
     ACAMERA_SENSOR_INFO_BINNING_FACTOR =                        // int32[2]
             ACAMERA_SENSOR_INFO_START + 14,
@@ -7318,6 +7623,145 @@
             ACAMERA_AUTOMOTIVE_LENS_START,
     ACAMERA_AUTOMOTIVE_LENS_END,
 
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+     * <p>If the camera device supports Jpeg/R, it will support the same stream combinations with
+     * Jpeg/R as it does with P010. The stream combinations with Jpeg/R (or P010) supported
+     * by the device is determined by the device's hardware level and capabilities.</p>
+     * <p>All the static, control, and dynamic metadata tags related to JPEG apply to Jpeg/R formats.
+     * Configuring JPEG and Jpeg/R streams at the same time is not supported.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEGR format as OUTPUT only.</p>
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS =      // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t)
+            ACAMERA_JPEGR_START,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This should correspond to the frame duration when only that
+     * stream is active, with all processing (typically in android.*.mode)
+     * set to either OFF or FAST.</p>
+     * <p>When multiple streams are used in a request, the minimum frame
+     * duration will be max(individual stream min durations).</p>
+     * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+     * calculating the max frame rate.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     * @see ACAMERA_SENSOR_FRAME_DURATION
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS =        // int64[4*n]
+            ACAMERA_JPEGR_START + 1,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams.</p>
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>A stall duration is how much extra time would get added
+     * to the normal minimum frame duration for a repeating request
+     * that has streams with non-zero stall.</p>
+     * <p>This functions similarly to
+     * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for Jpeg/R
+     * streams.</p>
+     * <p>All Jpeg/R output stream formats may have a nonzero stall
+     * duration.</p>
+     *
+     * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS =            // int64[4*n]
+            ACAMERA_JPEGR_START + 2,
+    /**
+     * <p>The available Jpeg/R stream
+     * configurations that this camera device supports
+     * (i.e. format, width, height, output/input stream).</p>
+     *
+     * <p>Type: int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS for details.</p>
+     * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+     * AIMAGE_FORMAT_JPEG_R format as OUTPUT only.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int32[n*4] (acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t)
+            ACAMERA_JPEGR_START + 3,
+    /**
+     * <p>This lists the minimum frame duration for each
+     * format/size combination for Jpeg/R output formats for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 4,
+    /**
+     * <p>This lists the maximum stall duration for each
+     * output format/size combination for Jpeg/R streams for CaptureRequests where
+     * ACAMERA_SENSOR_PIXEL_MODE is set to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+     *
+     * @see ACAMERA_SENSOR_PIXEL_MODE
+     *
+     * <p>Type: int64[4*n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Refer to ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS for details.</p>
+     *
+     * @see ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS
+     */
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION = 
+                                                                // int64[4*n]
+            ACAMERA_JPEGR_START + 5,
+    ACAMERA_JPEGR_END,
+
 } acamera_metadata_tag_t;
 
 /**
@@ -7534,7 +7978,7 @@
     /**
      * <p>An external flash has been turned on.</p>
      * <p>It informs the camera device that an external flash has been turned on, and that
-     * metering (and continuous focus if active) should be quickly recaculated to account
+     * metering (and continuous focus if active) should be quickly recalculated to account
      * for the external flash. Otherwise, this mode acts like ON.</p>
      * <p>When the external flash is turned off, AE mode should be changed to one of the
      * other available AE modes.</p>
@@ -8471,6 +8915,82 @@
 
 } acamera_metadata_enum_android_control_extended_scene_mode_t;
 
+// ACAMERA_CONTROL_SETTINGS_OVERRIDE
+typedef enum acamera_metadata_enum_acamera_control_settings_override {
+    /**
+     * <p>No keys are applied sooner than the other keys when applying CaptureRequest
+     * settings to the camera device. This is the default value.</p>
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE_OFF                            = 0,
+
+    /**
+     * <p>Zoom related keys are applied sooner than the other keys in the CaptureRequest. The
+     * zoom related keys are:</p>
+     * <ul>
+     * <li>ACAMERA_CONTROL_ZOOM_RATIO</li>
+     * <li>ACAMERA_SCALER_CROP_REGION</li>
+     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+     * </ul>
+     * <p>Even though ACAMERA_CONTROL_AE_REGIONS, ACAMERA_CONTROL_AWB_REGIONS,
+     * and ACAMERA_CONTROL_AF_REGIONS are not directly zoom related, applications
+     * typically scale these regions together with ACAMERA_SCALER_CROP_REGION to have a
+     * consistent mapping within the current field of view. In this aspect, they are
+     * related to ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO.</p>
+     *
+     * @see ACAMERA_CONTROL_AE_REGIONS
+     * @see ACAMERA_CONTROL_AF_REGIONS
+     * @see ACAMERA_CONTROL_AWB_REGIONS
+     * @see ACAMERA_CONTROL_ZOOM_RATIO
+     * @see ACAMERA_SCALER_CROP_REGION
+     */
+    ACAMERA_CONTROL_SETTINGS_OVERRIDE_ZOOM                           = 1,
+
+} acamera_metadata_enum_android_control_settings_override_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING
+typedef enum acamera_metadata_enum_acamera_control_autoframing {
+    /**
+     * <p>Disable autoframing.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_OFF                                  = 0,
+
+    /**
+     * <p>Enable autoframing to keep people in the frame's field of view.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_ON                                   = 1,
+
+} acamera_metadata_enum_android_control_autoframing_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
+typedef enum acamera_metadata_enum_acamera_control_autoframing_available {
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE_FALSE                      = 0,
+
+    ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE_TRUE                       = 1,
+
+} acamera_metadata_enum_android_control_autoframing_available_t;
+
+// ACAMERA_CONTROL_AUTOFRAMING_STATE
+typedef enum acamera_metadata_enum_acamera_control_autoframing_state {
+    /**
+     * <p>Auto-framing is inactive.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_INACTIVE                       = 0,
+
+    /**
+     * <p>Auto-framing is in process - either zooming in, zooming out or pan is taking place.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_FRAMING                        = 1,
+
+    /**
+     * <p>Auto-framing has reached a stable state (frame/fov is not being adjusted). The state
+     * may transition back to FRAMING if the scene changes.</p>
+     */
+    ACAMERA_CONTROL_AUTOFRAMING_STATE_CONVERGED                      = 2,
+
+} acamera_metadata_enum_android_control_autoframing_state_t;
+
 
 
 // ACAMERA_EDGE_MODE
@@ -9211,24 +9731,25 @@
      * camera's crop region is set to maximum size, the FOV of the physical streams for the
      * ultrawide lens will be the same as the logical stream, by making the crop region
      * smaller than its active array size to compensate for the smaller focal length.</p>
-     * <p>There are two ways for the application to capture RAW images from a logical camera
-     * with RAW capability:</p>
+     * <p>For a logical camera, typically the underlying physical cameras have different RAW
+     * capabilities (such as resolution or CFA pattern). There are two ways for the
+     * application to capture RAW images from the logical camera:</p>
      * <ul>
-     * <li>Because the underlying physical cameras may have different RAW capabilities (such
-     * as resolution or CFA pattern), to maintain backward compatibility, when a RAW stream
-     * is configured, the camera device makes sure the default active physical camera remains
-     * active and does not switch to other physical cameras. (One exception is that, if the
-     * logical camera consists of identical image sensors and advertises multiple focalLength
-     * due to different lenses, the camera device may generate RAW images from different
-     * physical cameras based on the focalLength being set by the application.) This
-     * backward-compatible approach usually results in loss of optical zoom, to telephoto
-     * lens or to ultrawide lens.</li>
-     * <li>Alternatively, to take advantage of the full zoomRatio range of the logical camera,
-     * the application should use <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
-     * to capture RAW images from the currently active physical camera. Because different
-     * physical camera may have different RAW characteristics, the application needs to use
-     * the characteristics and result metadata of the active physical camera for the
-     * relevant RAW metadata.</li>
+     * <li>If the logical camera has RAW capability, the application can create and use RAW
+     * streams in the same way as before. In case a RAW stream is configured, to maintain
+     * backward compatibility, the camera device makes sure the default active physical
+     * camera remains active and does not switch to other physical cameras. (One exception
+     * is that, if the logical camera consists of identical image sensors and advertises
+     * multiple focalLength due to different lenses, the camera device may generate RAW
+     * images from different physical cameras based on the focalLength being set by the
+     * application.) This backward-compatible approach usually results in loss of optical
+     * zoom, to telephoto lens or to ultrawide lens.</li>
+     * <li>Alternatively, if supported by the device,
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
+     * can be used to capture RAW images from one of the underlying physical cameras (
+     * depending on current zoom level). Because different physical cameras may have
+     * different RAW characteristics, the application needs to use the characteristics
+     * and result metadata of the active physical camera for the relevant RAW metadata.</li>
      * </ul>
      * <p>The capture request and result metadata tags required for backward compatible camera
      * functionalities will be solely based on the logical camera capability. On the other
@@ -9345,9 +9866,10 @@
      * </ul>
      * <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES</a>
      * lists all of the supported stream use cases.</p>
-     * <p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for the
-     * mandatory stream combinations involving stream use cases, which can also be queried
-     * via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
+     * <p>Refer to
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">CameraDevice#stream-use-case-capability-additional-guaranteed-configurations</a>
+     * for the mandatory stream combinations involving stream use cases, which can also be
+     * queried via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
      */
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE           = 19,
 
@@ -9444,6 +9966,31 @@
 
 } acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
 
+// ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_color_space_profiles_map {
+    /**
+     * <p>Default value, when not explicitly specified. The Camera device will choose the color
+     * space to employ.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED   = -1,
+
+    /**
+     * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB          = 0,
+
+    /**
+     * <p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3    = 7,
+
+    /**
+     * <p>RGB color space BT.2100 standardized as Hybrid Log Gamma encoding.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG    = 16,
+
+} acamera_metadata_enum_android_request_available_color_space_profiles_map_t;
+
 
 // ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
 typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
@@ -9697,6 +10244,30 @@
      */
     ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL             = 0x5,
 
+    /**
+     * <p>Cropped RAW stream when the client chooses to crop the field of view.</p>
+     * <p>Certain types of image sensors can run in binned modes in order to improve signal to
+     * noise ratio while capturing frames. However, at certain zoom levels and / or when
+     * other scene conditions are deemed fit, the camera sub-system may choose to un-bin and
+     * remosaic the sensor's output. This results in a RAW frame which is cropped in field
+     * of view and yet has the same number of pixels as full field of view RAW, thereby
+     * improving image detail.</p>
+     * <p>The resultant field of view of the RAW stream will be greater than or equal to
+     * croppable non-RAW streams. The effective crop region for this RAW stream will be
+     * reflected in the CaptureResult key ACAMERA_SCALER_RAW_CROP_REGION.</p>
+     * <p>If this stream use case is set on a non-RAW stream, i.e. not one of :</p>
+     * <ul>
+     * <li>{@link AIMAGE_FORMAT_RAW16 RAW_SENSOR}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW10 RAW10}</li>
+     * <li>{@link AIMAGE_FORMAT_RAW12 RAW12}</li>
+     * </ul>
+     * <p>session configuration is not guaranteed to succeed.</p>
+     * <p>This stream use case may not be supported on some devices.</p>
+     *
+     * @see ACAMERA_SCALER_RAW_CROP_REGION
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW            = 0x6,
+
 } acamera_metadata_enum_android_scaler_available_stream_use_cases_t;
 
 
@@ -9860,16 +10431,12 @@
 // ACAMERA_SENSOR_PIXEL_MODE
 typedef enum acamera_metadata_enum_acamera_sensor_pixel_mode {
     /**
-     * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
-     * supported unless a camera device advertises
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+     * <p>This is the default sensor pixel mode.</p>
      */
     ACAMERA_SENSOR_PIXEL_MODE_DEFAULT                                = 0,
 
     /**
-     * <p>This sensor pixel mode is offered by devices with capability
-     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
-     * In this mode, sensors typically do not bin pixels, as a result can offer larger
+     * <p>In this mode, sensors typically do not bin pixels, as a result can offer larger
      * image sizes.</p>
      */
     ACAMERA_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION                     = 1,
@@ -10162,7 +10729,8 @@
      * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
      * better.</p>
      * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
      * support for color image capture. The only exception is that the device may
      * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
@@ -10188,7 +10756,8 @@
     /**
      * <p>This camera device is capable of supporting advanced imaging applications.</p>
      * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
-     * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>A <code>FULL</code> device will support below capabilities:</p>
      * <ul>
      * <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -10215,7 +10784,9 @@
 
     /**
      * <p>This camera device is running in backward compatibility mode.</p>
-     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are supported.</p>
+     * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are supported.</p>
      * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
      * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
      * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -10237,7 +10808,9 @@
      * <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
      * FULL-level capabilities.</p>
      * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
-     * <code>LIMITED</code> tables in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+     * <code>LIMITED</code> tables in the
+     * {@link ACameraDevice_createCaptureSession }
+     * documentation are guaranteed to be supported.</p>
      * <p>The following additional capabilities are guaranteed to be supported:</p>
      * <ul>
      * <li><code>YUV_REPROCESSING</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -10622,6 +11195,26 @@
 
 
 
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT       = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t;
+
+// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations_maximum_resolution {
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                      = 0,
+
+    ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                      = 1,
+
+} acamera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t;
+
+
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index d83c5b3..dc18544 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -148,7 +148,7 @@
  * @param request the {@link ACaptureRequest} of interest.
  * @param tag the tag value of the camera metadata entry to be get.
  * @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
- *        call succeeeds.
+ *        call succeeds.
  *
  * @return <ul>
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index b3977ff..4c54658 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -12,6 +12,8 @@
     ACameraCaptureSession_logicalCamera_setRepeatingRequest; # introduced=29
     ACameraCaptureSession_logicalCamera_setRepeatingRequestV2; # introduced=33
     ACameraCaptureSession_stopRepeating;
+    ACameraCaptureSession_setWindowPreparedCallback; # introduced=34
+    ACameraCaptureSession_prepareWindow; # introduced=34
     ACameraCaptureSession_updateSharedOutput; # introduced=28
     ACameraDevice_close;
     ACameraDevice_createCaptureRequest;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
index 5a1af79..45098c3 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
@@ -14,9 +14,14 @@
  * limitations under the License.
  */
 
-#include <string>
 #include "utils.h"
 
+#include <android/binder_auto_utils.h>
+#include <string>
+#include <set>
+
+using ::android::acam::utils::native_handle_ptr_wrapper;
+
 struct ACaptureSessionOutput {
     explicit ACaptureSessionOutput(const native_handle_t* window, bool isShared = false,
             const char* physicalCameraId = "") :
@@ -38,8 +43,23 @@
         return mWindow > other.mWindow;
     }
 
-    android::acam::utils::native_handle_ptr_wrapper mWindow;
-    std::set<android::acam::utils::native_handle_ptr_wrapper> mSharedWindows;
+    inline bool isWindowEqual(ACameraWindowType* window) const {
+        return mWindow == native_handle_ptr_wrapper(window);
+    }
+
+    // returns true if the window was successfully added, false otherwise.
+    inline bool addSharedWindow(ACameraWindowType* window) {
+        auto ret = mSharedWindows.insert(window);
+        return ret.second;
+    }
+
+    // returns the number of elements removed.
+    inline size_t removeSharedWindow(ACameraWindowType* window) {
+        return mSharedWindows.erase(window);
+    }
+
+    native_handle_ptr_wrapper mWindow;
+    std::set<native_handle_ptr_wrapper> mSharedWindows;
     bool           mIsShared;
     int            mRotation = CAMERA3_STREAM_ROTATION_0;
     std::string mPhysicalCameraId;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 0a57590..87102e4 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -17,27 +17,34 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACameraDeviceVendor"
 
-#include <vector>
-#include <inttypes.h>
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <CameraMetadata.h>
-
-#include "ndk_vendor/impl/ACameraDevice.h"
 #include "ACameraCaptureSession.h"
 #include "ACameraMetadata.h"
 #include "ACaptureRequest.h"
+#include "ndk_vendor/impl/ACameraDevice.h"
 #include "utils.h"
+#include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <inttypes.h>
+#include <map>
+#include <utility>
+#include <vector>
 
-#define CHECK_TRANSACTION_AND_RET(remoteRet, status, callName) \
-    if (!remoteRet.isOk()) { \
-        ALOGE("%s: Transaction error during %s call %s", __FUNCTION__, callName, \
-                  remoteRet.description().c_str()); \
-        return ACAMERA_ERROR_UNKNOWN; \
-    } \
-    if (status != Status::NO_ERROR) { \
-        ALOGE("%s: %s call failed", __FUNCTION__, callName); \
-        return utils::convertFromHidl(status); \
+#define CHECK_TRANSACTION_AND_RET(ret, callName)                                            \
+    if (!remoteRet.isOk()) {                                                                \
+        if (remoteRet.getExceptionCode() != EX_SERVICE_SPECIFIC) {                          \
+            ALOGE("%s: Transaction error during %s call %d", __FUNCTION__, callName,        \
+                                ret.getExceptionCode());                                    \
+            return ACAMERA_ERROR_UNKNOWN;                                                   \
+        } else {                                                                            \
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());    \
+            std::string errorMsg =                                                          \
+                    aidl::android::frameworks::cameraservice::common::toString(errStatus);  \
+            ALOGE("%s: %s call failed: %s", __FUNCTION__, callName, errorMsg.c_str());      \
+            return utils::convertFromAidl(errStatus);                                       \
+        }                                                                                   \
     }
 
 using namespace android;
@@ -49,10 +56,10 @@
 namespace android {
 namespace acam {
 
-using HCameraMetadata = frameworks::cameraservice::device::V2_0::CameraMetadata;
-using OutputConfiguration = frameworks::cameraservice::device::V2_0::OutputConfiguration;
-using SessionConfiguration = frameworks::cameraservice::device::V2_0::SessionConfiguration;
-using hardware::Void;
+using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using ::ndk::ScopedAStatus;
 
 // Static member definitions
 const char* CameraDevice::kContextKey        = "Context";
@@ -81,7 +88,6 @@
         mCameraId(id),
         mAppCallbacks(*cb),
         mChars(std::move(chars)),
-        mServiceCallback(new ServiceCallback(this)),
         mWrapper(wrapper),
         mInError(false),
         mError(ACAMERA_OK),
@@ -125,8 +131,11 @@
 
 CameraDevice::~CameraDevice() { }
 
-void
-CameraDevice::postSessionMsgAndCleanup(sp<AMessage>& msg) {
+void CameraDevice::init() {
+    mServiceCallback = ndk::SharedRefBase::make<ServiceCallback>(weak_from_this());
+}
+
+void CameraDevice::postSessionMsgAndCleanup(sp<AMessage>& msg) {
     msg->post();
     msg.clear();
     sp<AMessage> cleanupMsg = new AMessage(kWhatCleanUpSessions, mHandler);
@@ -134,8 +143,7 @@
 }
 
 // TODO: cached created request?
-camera_status_t
-CameraDevice::createCaptureRequest(
+camera_status_t CameraDevice::createCaptureRequest(
         ACameraDevice_request_template templateId,
         const ACameraIdList* physicalCameraIdList,
         ACaptureRequest** request) const {
@@ -147,20 +155,16 @@
     if (mRemote == nullptr) {
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    CameraMetadata rawRequest;
-    Status status = Status::UNKNOWN_ERROR;
-    auto remoteRet = mRemote->createDefaultRequest(
-        utils::convertToHidl(templateId),
-        [&status, &rawRequest](auto s, const hidl_vec<uint8_t> &metadata) {
-            status = s;
-            if (status == Status::NO_ERROR && utils::convertFromHidlCloned(metadata, &rawRequest)) {
-            } else {
-                ALOGE("%s: Couldn't create default request", __FUNCTION__);
-            }
-        });
-    CHECK_TRANSACTION_AND_RET(remoteRet, status, "createDefaultRequest()")
+
+    AidlCameraMetadata aidlMetadata;
+    ScopedAStatus remoteRet = mRemote->createDefaultRequest(
+            utils::convertToAidl(templateId), &aidlMetadata);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "createDefaultRequest()")
+
+    camera_metadata_t* rawRequest;
+    utils::cloneFromAidl(aidlMetadata, &rawRequest);
     ACaptureRequest* outReq = new ACaptureRequest();
-    outReq->settings = new ACameraMetadata(rawRequest.release(), ACameraMetadata::ACM_REQUEST);
+    outReq->settings = new ACameraMetadata(rawRequest, ACameraMetadata::ACM_REQUEST);
     if (physicalCameraIdList != nullptr) {
         for (auto i = 0; i < physicalCameraIdList->numCameras; i++) {
             outReq->physicalSettings.emplace(physicalCameraIdList->cameraIds[i],
@@ -172,9 +176,8 @@
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::createCaptureSession(
-        const ACaptureSessionOutputContainer*       outputs,
+camera_status_t CameraDevice::createCaptureSession(
+        const ACaptureSessionOutputContainer* outputs,
         const ACaptureRequest* sessionParameters,
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) {
@@ -199,7 +202,7 @@
     }
 
     ACameraCaptureSession* newSession = new ACameraCaptureSession(
-            mNextSessionId++, outputs, callbacks, this);
+            mNextSessionId++, outputs, callbacks, weak_from_this());
 
     // set new session as current session
     newSession->incStrong((void *) ACameraDevice_createCaptureSession);
@@ -225,41 +228,39 @@
     sessionConfig.outputStreams.resize(sessionOutputContainer->mOutputs.size());
     size_t index = 0;
     for (const auto& output : sessionOutputContainer->mOutputs) {
-        sessionConfig.outputStreams[index].rotation = utils::convertToHidl(output.mRotation);
-        sessionConfig.outputStreams[index].windowGroupId = -1;
-        sessionConfig.outputStreams[index].windowHandles.resize(output.mSharedWindows.size() + 1);
-        sessionConfig.outputStreams[index].windowHandles[0] = output.mWindow;
-        sessionConfig.outputStreams[index].physicalCameraId = output.mPhysicalCameraId;
+        OutputConfiguration& outputStream = sessionConfig.outputStreams[index];
+        outputStream.rotation = utils::convertToAidl(output.mRotation);
+        outputStream.windowGroupId = -1;
+        outputStream.windowHandles.resize(output.mSharedWindows.size() + 1);
+        outputStream.windowHandles[0] = std::move(dupToAidl(output.mWindow));
+        outputStream.physicalCameraId = output.mPhysicalCameraId;
         index++;
     }
 
     bool configSupported = false;
-    Status status = Status::UNKNOWN_ERROR;
-    auto remoteRet = mRemote->isSessionConfigurationSupported(sessionConfig,
-        [&status, &configSupported](auto s, auto supported) {
-            status = s;
-            configSupported = supported;
-        });
-
-    CHECK_TRANSACTION_AND_RET(remoteRet, status, "isSessionConfigurationSupported()");
+    ScopedAStatus remoteRet = mRemote->isSessionConfigurationSupported(
+            sessionConfig, &configSupported);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "isSessionConfigurationSupported()")
     return configSupported ? ACAMERA_OK : ACAMERA_ERROR_STREAM_CONFIGURE_FAIL;
 }
 
 static void addMetadataToPhysicalCameraSettings(const CameraMetadata *metadata,
         const std::string &cameraId, PhysicalCameraSettings *physicalCameraSettings) {
-    CameraMetadata metadataCopy = *metadata;
-    camera_metadata_t *camera_metadata = metadataCopy.release();
-    HCameraMetadata hCameraMetadata;
-    utils::convertToHidl(camera_metadata, &hCameraMetadata, /*shouldOwn*/ true);
-    physicalCameraSettings->settings.metadata(std::move(hCameraMetadata));
+    const camera_metadata_t* cameraMetadata = metadata->getAndLock();
+    AidlCameraMetadata aidlCameraMetadata;
+    utils::convertToAidl(cameraMetadata, &aidlCameraMetadata);
+    metadata->unlock(cameraMetadata);
+    physicalCameraSettings->settings.set<CaptureMetadataInfo::metadata>(
+            std::move(aidlCameraMetadata));
     physicalCameraSettings->id = cameraId;
 }
 
 void CameraDevice::addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest,
         sp<CaptureRequest> &req) {
     req->mPhysicalCameraSettings.resize(1 + aCaptureRequest->physicalSettings.size());
-    addMetadataToPhysicalCameraSettings(&(aCaptureRequest->settings->getInternalData()), getId(),
-                    &(req->mPhysicalCameraSettings[0]));
+    addMetadataToPhysicalCameraSettings(
+            &(aCaptureRequest->settings->getInternalData()),
+            getId(),&(req->mPhysicalCameraSettings[0]));
     size_t i = 1;
     for (auto &physicalSetting : aCaptureRequest->physicalSettings) {
         addMetadataToPhysicalCameraSettings(&(physicalSetting.second->getInternalData()),
@@ -285,7 +286,7 @@
 
     int32_t streamId = -1;
     for (auto& kvPair : mConfiguredOutputs) {
-        if (utils::isWindowNativeHandleEqual(kvPair.second.first, output->mWindow)) {
+        if (kvPair.second.first == output->mWindow) {
             streamId = kvPair.first;
             break;
         }
@@ -295,56 +296,86 @@
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
 
-    OutputConfigurationWrapper outConfigW;
-    OutputConfiguration &outConfig = outConfigW.mOutputConfiguration;
-    outConfig.rotation = utils::convertToHidl(output->mRotation);
+    OutputConfiguration outConfig;
+    outConfig.rotation = utils::convertToAidl(output->mRotation);
     outConfig.windowHandles.resize(output->mSharedWindows.size() + 1);
-    outConfig.windowHandles[0] = output->mWindow;
+    outConfig.windowHandles[0] = std::move(dupToAidl(output->mWindow));
     outConfig.physicalCameraId = output->mPhysicalCameraId;
     int i = 1;
     for (auto& anw : output->mSharedWindows) {
-        outConfig.windowHandles[i++] = anw;
+        outConfig.windowHandles[i++] = std::move(dupToAidl(anw));
     }
 
-    auto remoteRet = mRemote->updateOutputConfiguration(streamId, outConfig);
+    auto remoteRet = mRemote->updateOutputConfiguration(streamId,
+                                                        outConfig);
+
     if (!remoteRet.isOk()) {
-        ALOGE("%s: Transaction error in updating OutputConfiguration: %s", __FUNCTION__,
-              remoteRet.description().c_str());
-        return ACAMERA_ERROR_UNKNOWN;
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status st = static_cast<Status>(remoteRet.getServiceSpecificError());
+            switch (st) {
+                case Status::NO_ERROR:
+                    break;
+                case Status::INVALID_OPERATION:
+                    ALOGE("Camera device %s invalid operation", getId());
+                    return ACAMERA_ERROR_INVALID_OPERATION;
+                case Status::ALREADY_EXISTS:
+                    ALOGE("Camera device %s output surface already exists", getId());
+                    return ACAMERA_ERROR_INVALID_PARAMETER;
+                case Status::ILLEGAL_ARGUMENT:
+                    ALOGE("Camera device %s invalid input argument", getId());
+                    return ACAMERA_ERROR_INVALID_PARAMETER;
+                default:
+                    ALOGE("Camera device %s failed to add shared output", getId());
+                    return ACAMERA_ERROR_UNKNOWN;
+            }
+        } else {
+            ALOGE("%s: Transaction error in updating OutputConfiguration: %d", __FUNCTION__,
+                remoteRet.getExceptionCode());
+            return ACAMERA_ERROR_UNKNOWN;
+        }
     }
 
-    switch (remoteRet) {
-            case Status::NO_ERROR:
-                break;
-            case Status::INVALID_OPERATION:
-                ALOGE("Camera device %s invalid operation", getId());
-                return ACAMERA_ERROR_INVALID_OPERATION;
-            case Status::ALREADY_EXISTS:
-                ALOGE("Camera device %s output surface already exists", getId());
-                return ACAMERA_ERROR_INVALID_PARAMETER;
-            case Status::ILLEGAL_ARGUMENT:
-                ALOGE("Camera device %s invalid input argument", getId());
-                return ACAMERA_ERROR_INVALID_PARAMETER;
-            default:
-                ALOGE("Camera device %s failed to add shared output", getId());
-                return ACAMERA_ERROR_UNKNOWN;
-    }
-
-    mConfiguredOutputs[streamId] =
-            std::move(std::make_pair(std::move(output->mWindow), std::move(outConfigW)));
-
+    mConfiguredOutputs[streamId] = std::make_pair(output->mWindow,
+                                        std::move(outConfig));
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::allocateCaptureRequestLocked(
+camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+
+    if (window == nullptr) {
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    int32_t streamId = -1;
+    for (auto& kvPair : mConfiguredOutputs) {
+        if (window == kvPair.second.first) {
+            streamId = kvPair.first;
+            break;
+        }
+    }
+    if (streamId < 0) {
+        ALOGE("Error: Invalid output configuration");
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    auto remoteRet = mRemote->prepare(streamId);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "prepare()")
+    return ACAMERA_OK;
+}
+
+camera_status_t CameraDevice::allocateCaptureRequestLocked(
         const ACaptureRequest* request, /*out*/sp<CaptureRequest> &outReq) {
     sp<CaptureRequest> req(new CaptureRequest());
     req->mCaptureRequest.physicalCameraSettings.resize(1 + request->physicalSettings.size());
 
     size_t index = 0;
     allocateOneCaptureRequestMetadata(
-            req->mCaptureRequest.physicalCameraSettings[index++], mCameraId, request->settings);
+            req->mCaptureRequest.physicalCameraSettings[index++],
+            mCameraId, request->settings);
 
     for (auto& physicalEntry : request->physicalSettings) {
         allocateOneCaptureRequestMetadata(
@@ -354,19 +385,20 @@
 
     std::vector<int32_t> requestStreamIdxList;
     std::vector<int32_t> requestSurfaceIdxList;
-    for (auto outputTarget : request->targets->mOutputs) {
-        const native_handle_t* anw = outputTarget.mWindow;
+
+    for (auto& outputTarget : request->targets->mOutputs) {
+        native_handle_ptr_wrapper anw = outputTarget.mWindow;
         bool found = false;
         req->mSurfaceList.push_back(anw);
         // lookup stream/surface ID
         for (const auto& kvPair : mConfiguredOutputs) {
             int streamId = kvPair.first;
-            const OutputConfigurationWrapper& outConfig = kvPair.second.second;
-            const auto& windowHandles = outConfig.mOutputConfiguration.windowHandles;
+            const OutputConfiguration& outConfig = kvPair.second.second;
+            const auto& windowHandles = outConfig.windowHandles;
             for (int surfaceId = 0; surfaceId < (int) windowHandles.size(); surfaceId++) {
-                // If two native handles are equivalent, so are their surfaces.
-                if (utils::isWindowNativeHandleEqual(windowHandles[surfaceId].getNativeHandle(),
-                                                      anw)) {
+                // If two window handles point to the same native window,
+                // they have the same surfaces.
+                if (utils::isWindowNativeHandleEqual(anw, windowHandles[surfaceId])) {
                     found = true;
                     requestStreamIdxList.push_back(streamId);
                     requestSurfaceIdxList.push_back(surfaceId);
@@ -378,7 +410,7 @@
             }
         }
         if (!found) {
-            ALOGE("Unconfigured output target %p in capture request!", anw);
+            ALOGE("Unconfigured output target %p in capture request!", anw.mWindow);
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
@@ -395,54 +427,57 @@
         PhysicalCameraSettings& cameraSettings,
         const std::string& id, const sp<ACameraMetadata>& metadata) {
     cameraSettings.id = id;
-    // TODO: Do we really need to copy the metadata here ?
-    CameraMetadata metadataCopy = metadata->getInternalData();
-    camera_metadata_t *cameraMetadata = metadataCopy.release();
-    HCameraMetadata hCameraMetadata;
-    utils::convertToHidl(cameraMetadata, &hCameraMetadata, true);
-    if (metadata != nullptr) {
-        if (hCameraMetadata.data() != nullptr &&
-            mCaptureRequestMetadataQueue != nullptr &&
-            mCaptureRequestMetadataQueue->write(
-                reinterpret_cast<const uint8_t *>(hCameraMetadata.data()),
-                hCameraMetadata.size())) {
-            // The metadata field of the union would've been destructued, so no need
-            // to re-size it.
-            cameraSettings.settings.fmqMetadataSize(hCameraMetadata.size());
-        } else {
-            ALOGE("Fmq write capture result failed, falling back to hwbinder");
-            cameraSettings.settings.metadata(std::move(hCameraMetadata));
-        }
+
+    if (metadata == nullptr) {
+        return;
+    }
+
+    const camera_metadata_t* cameraMetadata = metadata->getInternalData().getAndLock();
+    AidlCameraMetadata aidlCameraMetadata;
+    utils::convertToAidl(cameraMetadata, &aidlCameraMetadata);
+    metadata->getInternalData().unlock(cameraMetadata);
+
+    if (aidlCameraMetadata.metadata.data() != nullptr &&
+        mCaptureRequestMetadataQueue != nullptr &&
+        mCaptureRequestMetadataQueue->write(
+                reinterpret_cast<const int8_t*>(aidlCameraMetadata.metadata.data()),
+                aidlCameraMetadata.metadata.size())) {
+        cameraSettings.settings.set<CaptureMetadataInfo::fmqMetadataSize>(
+                aidlCameraMetadata.metadata.size());
+    } else {
+        ALOGE("Fmq write capture result failed, falling back to hwbinder");
+        cameraSettings.settings.set<CaptureMetadataInfo::metadata>(std::move(aidlCameraMetadata));
     }
 }
 
 
-ACaptureRequest*
-CameraDevice::allocateACaptureRequest(sp<CaptureRequest>& req, const char* deviceId) {
+ACaptureRequest* CameraDevice::allocateACaptureRequest(sp<CaptureRequest>& req,
+                                                       const char* deviceId) {
     ACaptureRequest* pRequest = new ACaptureRequest();
     for (size_t i = 0; i < req->mPhysicalCameraSettings.size(); i++) {
         const std::string& id = req->mPhysicalCameraSettings[i].id;
-        CameraMetadata clone;
-        utils::convertFromHidlCloned(req->mPhysicalCameraSettings[i].settings.metadata(), &clone);
-        camera_metadata_t *clonep = clone.release();
+        camera_metadata_t* clone;
+        AidlCameraMetadata& aidlCameraMetadata = req->mPhysicalCameraSettings[i].settings
+                                                         .get<CaptureMetadataInfo::metadata>();
+        utils::cloneFromAidl(aidlCameraMetadata, &clone);
+
         if (id == deviceId) {
-            pRequest->settings = new ACameraMetadata(clonep, ACameraMetadata::ACM_REQUEST);
+            pRequest->settings = new ACameraMetadata(clone, ACameraMetadata::ACM_REQUEST);
         } else {
             pRequest->physicalSettings[req->mPhysicalCameraSettings[i].id] =
-                    new ACameraMetadata(clonep, ACameraMetadata::ACM_REQUEST);
+                    new ACameraMetadata(clone, ACameraMetadata::ACM_REQUEST);
         }
     }
     pRequest->targets = new ACameraOutputTargets();
     for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
-        const native_handle_t* anw = req->mSurfaceList[i];
+        native_handle_ptr_wrapper anw = req->mSurfaceList[i];
         ACameraOutputTarget outputTarget(anw);
-        pRequest->targets->mOutputs.insert(outputTarget);
+        pRequest->targets->mOutputs.insert(std::move(outputTarget));
     }
     return pRequest;
 }
 
-void
-CameraDevice::freeACaptureRequest(ACaptureRequest* req) {
+void CameraDevice::freeACaptureRequest(ACaptureRequest* req) {
     if (req == nullptr) {
         return;
     }
@@ -459,7 +494,7 @@
     }
 
     if (mCurrentSession != session) {
-        // Session has been replaced by other seesion or device is closed
+        // Session has been replaced by other session or device is closed
         return;
     }
     mCurrentSession = nullptr;
@@ -471,8 +506,8 @@
         return;
     }
 
-    // No new session, unconfigure now
-    // Note: The unconfiguration of session won't be accounted for session
+    // No new session, un-configure now
+    // Note: The un-configuration of session won't be accounted for session
     // latency because a stream configuration with 0 streams won't ever become
     // active.
     nsecs_t startTimeNs = systemTime();
@@ -494,8 +529,8 @@
         ALOGD("%s: binder disconnect reached", __FUNCTION__);
         auto ret = mRemote->disconnect();
         if (!ret.isOk()) {
-            ALOGE("%s: Transaction error while disconnecting device %s", __FUNCTION__,
-                  ret.description().c_str());
+            ALOGE("%s: Transaction error while disconnecting device %d", __FUNCTION__,
+                  ret.getExceptionCode());
         }
     }
     mRemote = nullptr;
@@ -505,8 +540,7 @@
     }
 }
 
-camera_status_t
-CameraDevice::stopRepeatingLocked() {
+camera_status_t CameraDevice::stopRepeatingLocked() {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
@@ -517,20 +551,14 @@
         mRepeatingSequenceId = REQUEST_ID_NONE;
 
         int64_t lastFrameNumber;
-        Status status = Status::UNKNOWN_ERROR;
-        auto remoteRet = mRemote->cancelRepeatingRequest(
-                [&status, &lastFrameNumber](Status s, auto frameNumber) {
-                    status = s;
-                    lastFrameNumber = frameNumber;
-                });
-        CHECK_TRANSACTION_AND_RET(remoteRet, status, "cancelRepeatingRequest()");
+        ScopedAStatus remoteRet = mRemote->cancelRepeatingRequest(&lastFrameNumber);
+        CHECK_TRANSACTION_AND_RET(remoteRet, "cancelRepeatingRequest()");
         checkRepeatingSequenceCompleteLocked(repeatingSequenceId, lastFrameNumber);
     }
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::flushLocked(ACameraCaptureSession* session) {
+camera_status_t CameraDevice::flushLocked(ACameraCaptureSession* session) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Camera %s abort captures failed! ret %d", getId(), ret);
@@ -571,20 +599,15 @@
     }
 
     int64_t lastFrameNumber;
-    Status status = Status::UNKNOWN_ERROR;
-    auto remoteRet = mRemote->flush([&status, &lastFrameNumber](auto s, auto frameNumber) {
-                                        status = s;
-                                        lastFrameNumber = frameNumber;
-                                    });
-    CHECK_TRANSACTION_AND_RET(remoteRet, status, "flush()")
+    ScopedAStatus remoteRet = mRemote->flush(&lastFrameNumber);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "flush()")
     if (mRepeatingSequenceId != REQUEST_ID_NONE) {
         checkRepeatingSequenceCompleteLocked(mRepeatingSequenceId, lastFrameNumber);
     }
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::waitUntilIdleLocked() {
+camera_status_t CameraDevice::waitUntilIdleLocked() {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Wait until camera %s idle failed! ret %d", getId(), ret);
@@ -597,13 +620,13 @@
     }
 
     auto remoteRet = mRemote->waitUntilIdle();
-    CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "waitUntilIdle()")
+    CHECK_TRANSACTION_AND_RET(remoteRet, "waitUntilIdle()")
     return ACAMERA_OK;
 }
 
-camera_status_t
-CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
-        const ACaptureRequest* sessionParameters, nsecs_t startTimeNs) {
+camera_status_t CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+                                                     const ACaptureRequest* sessionParameters,
+                                                     nsecs_t startTimeNs) {
     ACaptureSessionOutputContainer emptyOutput;
     if (outputs == nullptr) {
         outputs = &emptyOutput;
@@ -614,31 +637,37 @@
         return ret;
     }
 
-    std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> outputSet;
-    for (auto outConfig : outputs->mOutputs) {
-        const native_handle_t* anw = outConfig.mWindow;
-        OutputConfigurationWrapper outConfigInsertW;
-        OutputConfiguration &outConfigInsert = outConfigInsertW.mOutputConfiguration;
-        outConfigInsert.rotation = utils::convertToHidl(outConfig.mRotation);
+    std::map<native_handle_ptr_wrapper, OutputConfiguration> handleToConfig;
+    for (const auto& outConfig : outputs->mOutputs) {
+        native_handle_ptr_wrapper anw = outConfig.mWindow;
+        OutputConfiguration outConfigInsert;
+        outConfigInsert.rotation = utils::convertToAidl(outConfig.mRotation);
         outConfigInsert.windowGroupId = -1;
         outConfigInsert.windowHandles.resize(outConfig.mSharedWindows.size() + 1);
-        outConfigInsert.windowHandles[0] = anw;
+        outConfigInsert.windowHandles[0] = std::move(dupToAidl(anw));
         outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
-        native_handle_ptr_wrapper wrap(anw);
-
-        outputSet.emplace(std::make_pair(std::move(anw), std::move(outConfigInsertW)));
+        handleToConfig.insert({anw, std::move(outConfigInsert)});
     }
-    std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> addSet = outputSet;
+
+    std::set<native_handle_ptr_wrapper> addSet;
+    for (auto& kvPair : handleToConfig) {
+        addSet.insert(kvPair.first);
+    }
+
     std::vector<int32_t> deleteList;
 
     // Determine which streams need to be created, which to be deleted
     for (auto& kvPair : mConfiguredOutputs) {
         int32_t streamId = kvPair.first;
         auto& outputPair = kvPair.second;
-        if (outputSet.count(outputPair)) {
-            deleteList.push_back(streamId); // Need to delete a no longer needed stream
+        auto& anw = outputPair.first;
+        auto& configuredOutput = outputPair.second;
+
+        auto itr = handleToConfig.find(anw);
+        if (itr != handleToConfig.end() && (itr->second) == configuredOutput) {
+            deleteList.push_back(streamId);
         } else {
-            addSet.erase(outputPair);        // No need to add already existing stream
+            addSet.erase(anw);
         }
     }
 
@@ -673,106 +702,96 @@
     mIdle = true;
 
     auto remoteRet = mRemote->beginConfigure();
-    CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "beginConfigure()")
+    CHECK_TRANSACTION_AND_RET(remoteRet, "beginConfigure()")
 
     // delete to-be-deleted streams
     for (auto streamId : deleteList) {
         remoteRet = mRemote->deleteStream(streamId);
-        CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "deleteStream()")
+        CHECK_TRANSACTION_AND_RET(remoteRet, "deleteStream()")
         mConfiguredOutputs.erase(streamId);
     }
 
     // add new streams
-    for (const auto &outputPair : addSet) {
-        int streamId;
-        Status status = Status::UNKNOWN_ERROR;
-        auto ret = mRemote->createStream(outputPair.second,
-                                         [&status, &streamId](Status s, auto stream_id) {
-                                             status = s;
-                                             streamId = stream_id;
-                                         });
-        CHECK_TRANSACTION_AND_RET(ret, status, "createStream()")
-        mConfiguredOutputs.insert(std::make_pair(streamId, outputPair));
+    for (const auto &anw : addSet) {
+        int32_t streamId;
+        auto itr = handleToConfig.find(anw);
+        remoteRet = mRemote->createStream(itr->second, &streamId);
+        CHECK_TRANSACTION_AND_RET(remoteRet, "createStream()")
+        mConfiguredOutputs.insert(std::make_pair(streamId,
+                                                 std::make_pair(anw,
+                                                                std::move(itr->second))));
+        handleToConfig.erase(itr);
     }
 
-    CameraMetadata params;
-    HCameraMetadata hidlParams;
+    AidlCameraMetadata aidlParams;
     if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
-        params.append(sessionParameters->settings->getInternalData());
-        const camera_metadata_t *params_metadata = params.getAndLock();
-        utils::convertToHidl(params_metadata, &hidlParams);
-        params.unlock(params_metadata);
+        const CameraMetadata &params = sessionParameters->settings->getInternalData();
+        const camera_metadata_t* paramsMetadata = params.getAndLock();
+        utils::convertToAidl(paramsMetadata, &aidlParams);
+        params.unlock(paramsMetadata);
     }
-    remoteRet = mRemote->endConfigure_2_1(StreamConfigurationMode::NORMAL_MODE,
-                                          hidlParams, startTimeNs);
-    CHECK_TRANSACTION_AND_RET(remoteRet, remoteRet, "endConfigure()")
+    remoteRet = mRemote->endConfigure(StreamConfigurationMode::NORMAL_MODE,
+                                      aidlParams, startTimeNs);
+    CHECK_TRANSACTION_AND_RET(remoteRet, "endConfigure()")
     return ACAMERA_OK;
 }
 
-void
-CameraDevice::setRemoteDevice(sp<ICameraDeviceUser> remote) {
+void CameraDevice::setRemoteDevice(std::shared_ptr<ICameraDeviceUser> remote) {
     Mutex::Autolock _l(mDeviceLock);
-    mRemote = remote;
+    mRemote = std::move(remote);
 }
 
-bool
-CameraDevice::setDeviceMetadataQueues() {
+bool CameraDevice::setDeviceMetadataQueues() {
         if (mRemote == nullptr) {
           ALOGE("mRemote must not be null while trying to fetch metadata queues");
           return false;
         }
         std::shared_ptr<RequestMetadataQueue> &reqQueue = mCaptureRequestMetadataQueue;
-        auto ret =
-            mRemote->getCaptureRequestMetadataQueue(
-                [&reqQueue](const auto &mqDescriptor) {
-                    reqQueue = std::make_shared<RequestMetadataQueue>(mqDescriptor);
-                    if (!reqQueue->isValid() || reqQueue->availableToWrite() <=0) {
-                        ALOGE("Empty fmq from cameraserver");
-                        reqQueue = nullptr;
-                    }
-                });
+        MQDescriptor<int8_t, SynchronizedReadWrite> reqMqDescriptor;
+        ScopedAStatus ret = mRemote->getCaptureRequestMetadataQueue(&reqMqDescriptor);
         if (!ret.isOk()) {
             ALOGE("Transaction error trying to get capture request metadata queue");
             return false;
         }
+        reqQueue = std::make_shared<RequestMetadataQueue>(reqMqDescriptor);
+        if (!reqQueue->isValid() || reqQueue->availableToWrite() <= 0) {
+            ALOGE("Empty fmq from cameraserver");
+            reqQueue = nullptr;
+        }
+
+        MQDescriptor<int8_t, SynchronizedReadWrite> resMqDescriptor;
         std::shared_ptr<ResultMetadataQueue> &resQueue = mCaptureResultMetadataQueue;
-        ret =
-                mRemote->getCaptureResultMetadataQueue(
-                        [&resQueue](const auto &mqDescriptor) {
-                            resQueue = std::make_shared<ResultMetadataQueue>(mqDescriptor);
-                            if (!resQueue->isValid() || resQueue->availableToWrite() <=0) {
-                                ALOGE("Empty fmq from cameraserver");
-                            }
-                        });
+        ret = mRemote->getCaptureResultMetadataQueue(&resMqDescriptor);
         if (!ret.isOk()) {
             ALOGE("Transaction error trying to get capture result metadata queue");
             return false;
         }
+        resQueue = std::make_shared<ResultMetadataQueue>(resMqDescriptor);
+        if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+            ALOGE("Empty fmq from cameraserver");
+        }
+
         return true;
 }
 
-camera_status_t
-CameraDevice::checkCameraClosedOrErrorLocked() const {
+camera_status_t CameraDevice::checkCameraClosedOrErrorLocked() const {
     if (mRemote == nullptr) {
         ALOGE("%s: camera device already closed", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    if (mInError) {// triggered by onDeviceError
-        ALOGE("%s: camera device has encountered a serious error", __FUNCTION__);
+    if (mInError) { // triggered by onDeviceError
+        ALOGE("%s: camera device has encountered a serious error: %d", __FUNCTION__, mError);
         return mError;
     }
     return ACAMERA_OK;
 }
 
-void
-CameraDevice::setCameraDeviceErrorLocked(camera_status_t error) {
+void CameraDevice::setCameraDeviceErrorLocked(camera_status_t error) {
     mInError = true;
     mError = error;
-    return;
 }
 
-void
-CameraDevice::FrameNumberTracker::updateTracker(int64_t frameNumber, bool isError) {
+void CameraDevice::FrameNumberTracker::updateTracker(int64_t frameNumber, bool isError) {
     ALOGV("updateTracker frame %" PRId64 " isError %d", frameNumber, isError);
     if (isError) {
         mFutureErrorSet.insert(frameNumber);
@@ -791,8 +810,7 @@
     update();
 }
 
-void
-CameraDevice::FrameNumberTracker::update() {
+void CameraDevice::FrameNumberTracker::update() {
     for (auto it = mFutureErrorSet.begin(); it != mFutureErrorSet.end();) {
         int64_t errorFrameNumber = *it;
         if (errorFrameNumber == mCompletedFrameNumber + 1) {
@@ -811,10 +829,8 @@
     ALOGV("Update complete frame %" PRId64, mCompletedFrameNumber);
 }
 
-void
-CameraDevice::onCaptureErrorLocked(
-        ErrorCode errorCode,
-        const CaptureResultExtras& resultExtras) {
+void CameraDevice::onCaptureErrorLocked(ErrorCode errorCode,
+                                        const CaptureResultExtras& resultExtras) {
     int sequenceId = resultExtras.requestId;
     int64_t frameNumber = resultExtras.frameNumber;
     int32_t burstId = resultExtras.burstId;
@@ -826,7 +842,7 @@
         return;
     }
 
-    CallbackHolder cbh = (*it).second;
+    CallbackHolder cbh = it->second;
     sp<ACameraCaptureSession> session = cbh.mSession;
     if ((size_t) burstId >= cbh.mRequests.size()) {
         ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -852,7 +868,7 @@
         // them and try to match the surfaces in the corresponding
         // CaptureRequest.
         const auto& errorWindowHandles =
-                outputPairIt->second.second.mOutputConfiguration.windowHandles;
+                outputPairIt->second.second.windowHandles;
         for (const auto& errorWindowHandle : errorWindowHandles) {
             for (const auto &requestStreamAndWindowId :
                         request->mCaptureRequest.streamAndWindowIds) {
@@ -869,11 +885,11 @@
                 }
 
                 const auto &requestWindowHandles =
-                        requestSurfacePairIt->second.second.mOutputConfiguration.windowHandles;
-                if (utils::isWindowNativeHandleEqual(
-                        requestWindowHandles[requestWindowId], errorWindowHandle)) {
-                    const native_handle_t* anw =
-                            requestWindowHandles[requestWindowId].getNativeHandle();
+                        requestSurfacePairIt->second.second.windowHandles;
+
+                if (requestWindowHandles[requestWindowId] == errorWindowHandle) {
+                    const native_handle_t* anw = makeFromAidl(
+                            requestWindowHandles[requestWindowId]);
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -898,14 +914,16 @@
         failure->sequenceId  = sequenceId;
         failure->wasImageCaptured = (errorCode == ErrorCode::CAMERA_RESULT);
 
-        sp<AMessage> msg = new AMessage(cbh.mIsLogicalCameraCallback ? kWhatLogicalCaptureFail :
-                kWhatCaptureFail, mHandler);
+        sp<AMessage> msg = new AMessage(cbh.mIsLogicalCameraCallback ? kWhatLogicalCaptureFail
+                                                                     : kWhatCaptureFail,
+                                        mHandler);
         msg->setPointer(kContextKey, cbh.mContext);
         msg->setObject(kSessionSpKey, session);
         if (cbh.mIsLogicalCameraCallback) {
-            if (resultExtras.errorPhysicalCameraId.size() > 0) {
-                msg->setString(kFailingPhysicalCameraId, resultExtras.errorPhysicalCameraId.c_str(),
-                        resultExtras.errorPhysicalCameraId.size());
+            if (!resultExtras.errorPhysicalCameraId.empty()) {
+                msg->setString(kFailingPhysicalCameraId,
+                               resultExtras.errorPhysicalCameraId.c_str(),
+                               resultExtras.errorPhysicalCameraId.size());
             }
             msg->setPointer(kCallbackFpKey, (void*) cbh.mOnLogicalCameraCaptureFailed);
         } else {
@@ -919,7 +937,6 @@
         mFrameNumberTracker.updateTracker(frameNumber, /*isError*/true);
         checkAndFireSequenceCompleteLocked();
     }
-    return;
 }
 
 CameraDevice::CallbackHandler::CallbackHandler(const char *id) : mId(id) { }
@@ -939,6 +956,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
             ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
             break;
         case kWhatCleanUpSessions:
@@ -1012,6 +1030,7 @@
         case kWhatCaptureSeqEnd:
         case kWhatCaptureSeqAbort:
         case kWhatCaptureBufferLost:
+        case kWhatPreparedCb:
         {
             sp<RefBase> obj;
             found = msg->findObject(kSessionSpKey, &obj);
@@ -1020,7 +1039,7 @@
                 return;
             }
             sp<ACameraCaptureSession> session(static_cast<ACameraCaptureSession*>(obj.get()));
-            mCachedSessions.push(session);
+            mCachedSessions.push_back(session);
             sp<CaptureRequest> requestSp = nullptr;
             const char *id_cstr = mId.c_str();
             switch (msg->what()) {
@@ -1055,6 +1074,26 @@
                     (*onState)(context, session.get());
                     break;
                 }
+                case kWhatPreparedCb:
+                {
+                    ACameraCaptureSession_prepareCallback onWindowPrepared;
+                    found = msg->findPointer(kCallbackFpKey, (void**) &onWindowPrepared);
+                    if (!found) {
+                        ALOGE("%s: Cannot find state callback!", __FUNCTION__);
+                        return;
+                    }
+                    if (onWindowPrepared == nullptr) {
+                        return;
+                    }
+                    native_handle_t* anw;
+                    found = msg->findPointer(kAnwKey, (void**) &anw);
+                    if (!found) {
+                        ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
+                        return;
+                    }
+                    (*onWindowPrepared)(context, anw, session.get());
+                    break;
+                }
                 case kWhatCaptureStart:
                 {
                     ACameraCaptureSession_captureCallback_start onStart;
@@ -1167,7 +1206,8 @@
                         clone.update(ANDROID_SYNC_FRAME_NUMBER,
                                 &physicalResult->mFrameNumber, /*data_count*/1);
                         sp<ACameraMetadata> metadata =
-                                new ACameraMetadata(clone.release(), ACameraMetadata::ACM_RESULT);
+                                new ACameraMetadata(clone.release(),
+                                                    ACameraMetadata::ACM_RESULT);
                         physicalMetadataCopy.push_back(metadata);
                     }
                     std::vector<const char*> physicalCameraIdPtrs;
@@ -1302,7 +1342,7 @@
                         return;
                     }
 
-                    const native_handle_t* anw;
+                    native_handle_t* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find native_handle_t!", __FUNCTION__);
@@ -1319,6 +1359,7 @@
                     ACaptureRequest* request = allocateACaptureRequest(requestSp, id_cstr);
                     (*onBufferLost)(context, session.get(), request, anw, frameNumber);
                     freeACaptureRequest(request);
+                    native_handle_delete(anw); // clean up anw as it was copied from AIDL
                     break;
                 }
             }
@@ -1329,10 +1370,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_captureCallbacks* cbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(false),
         mIsLogicalCameraCallback(false) {
@@ -1346,10 +1387,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(false),
         mIsLogicalCameraCallback(true) {
@@ -1363,10 +1404,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_captureCallbacksV2* cbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(true),
         mIsLogicalCameraCallback(false) {
@@ -1380,10 +1421,10 @@
 
 CameraDevice::CallbackHolder::CallbackHolder(
         sp<ACameraCaptureSession>          session,
-        const Vector<sp<CaptureRequest> >& requests,
+        std::vector<sp<CaptureRequest>>  requests,
         bool                               isRepeating,
         ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs) :
-        mSession(session), mRequests(requests),
+        mSession(std::move(session)), mRequests(std::move(requests)),
         mIsRepeating(isRepeating),
         mIs2Callback(true),
         mIsLogicalCameraCallback(true) {
@@ -1501,23 +1542,21 @@
 /**
   * Camera service callback implementation
   */
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onDeviceError(
-        ErrorCode errorCode,
-        const CaptureResultExtras& resultExtras) {
+ScopedAStatus CameraDevice::ServiceCallback::onDeviceError(
+        ErrorCode errorCode, const CaptureResultExtras& resultExtras) {
     ALOGD("Device error received, code %d, frame number %" PRId64 ", request ID %d, subseq ID %d"
             " physical camera ID %s", errorCode, resultExtras.frameNumber, resultExtras.requestId,
             resultExtras.burstId, resultExtras.errorPhysicalCameraId.c_str());
-    auto ret = Void();
-    sp<CameraDevice> dev = mDevice.promote();
+
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
 
     sp<ACameraCaptureSession> session = dev->mCurrentSession.promote();
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->mRemote == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
     switch (errorCode) {
         case ErrorCode::CAMERA_DISCONNECTED:
@@ -1570,26 +1609,25 @@
             dev->onCaptureErrorLocked(errorCode, resultExtras);
             break;
     }
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onDeviceIdle() {
+ScopedAStatus CameraDevice::ServiceCallback::onDeviceIdle() {
     ALOGV("Camera is now idle");
-    auto ret = Void();
-    sp<CameraDevice> dev = mDevice.promote();
+
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
 
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->isClosed() || dev->mRemote == nullptr) {
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     if (dev->mIdle) {
         // Already in idle state. Possibly other thread did waitUntilIdle
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     if (dev->mCurrentSession != nullptr) {
@@ -1597,13 +1635,14 @@
         if (dev->mBusySession != dev->mCurrentSession) {
             ALOGE("Current session != busy session");
             dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
-            return ret;
+            return ScopedAStatus::ok();
         }
 
         sp<AMessage> msg = new AMessage(kWhatSessionStateCb, dev->mHandler);
         msg->setPointer(kContextKey, dev->mBusySession->mUserSessionCallback.context);
         msg->setObject(kSessionSpKey, dev->mBusySession);
-        msg->setPointer(kCallbackFpKey, (void*) dev->mBusySession->mUserSessionCallback.onReady);
+        msg->setPointer(kCallbackFpKey,
+                        (void*) dev->mBusySession->mUserSessionCallback.onReady);
         // Make sure we clear the sp first so the session destructor can
         // only happen on handler thread (where we don't hold device/session lock)
         dev->mBusySession.clear();
@@ -1611,22 +1650,20 @@
     }
     dev->mIdle = true;
     dev->mFlushing = false;
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onCaptureStarted(
-        const CaptureResultExtras& resultExtras,
-        uint64_t timestamp) {
-    auto ret = Void();
 
-    sp<CameraDevice> dev = mDevice.promote();
+
+ndk::ScopedAStatus CameraDevice::ServiceCallback::onCaptureStarted(
+        const CaptureResultExtras& resultExtras, int64_t timestamp) {
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->isClosed() || dev->mRemote == nullptr) {
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     int32_t sequenceId = resultExtras.requestId;
@@ -1635,7 +1672,7 @@
 
     auto it = dev->mSequenceCallbackMap.find(sequenceId);
     if (it != dev->mSequenceCallbackMap.end()) {
-        CallbackHolder cbh = (*it).second;
+        CallbackHolder &cbh = it->second;
         ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
         ACameraCaptureSession_captureCallback_startV2 onStart2 = cbh.mOnCaptureStarted2;
         bool v2Callback = cbh.mIs2Callback;
@@ -1646,6 +1683,7 @@
             dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
         }
         sp<CaptureRequest> request = cbh.mRequests[burstId];
+        ALOGE("%s: request = %p", __FUNCTION__, request.get());
         sp<AMessage> msg = nullptr;
         if (v2Callback) {
             msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
@@ -1661,24 +1699,22 @@
         msg->setInt64(kFrameNumberKey, frameNumber);
         dev->postSessionMsgAndCleanup(msg);
     }
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onResultReceived(
-        const FmqSizeOrMetadata& resultMetadata,
+ScopedAStatus CameraDevice::ServiceCallback::onResultReceived(
+        const CaptureMetadataInfo& resultMetadata,
         const CaptureResultExtras& resultExtras,
-        const hidl_vec<PhysicalCaptureResultInfo>& physicalResultInfos) {
-    auto ret = Void();
+        const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
 
-    sp<CameraDevice> dev = mDevice.promote();
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
     int32_t sequenceId = resultExtras.requestId;
     int64_t frameNumber = resultExtras.frameNumber;
     int32_t burstId = resultExtras.burstId;
-    bool    isPartialResult = (resultExtras.partialResultCount < dev->mPartialResultCount);
+    bool isPartialResult = (resultExtras.partialResultCount < dev->mPartialResultCount);
 
     if (!isPartialResult) {
         ALOGV("SeqId %d frame %" PRId64 " result arrive.", sequenceId, frameNumber);
@@ -1686,7 +1722,7 @@
 
     Mutex::Autolock _l(dev->mDeviceLock);
     if (dev->mRemote == nullptr) {
-        return ret; // device has been disconnected
+        return ScopedAStatus::ok(); // device has been disconnected
     }
 
     if (dev->isClosed()) {
@@ -1694,7 +1730,7 @@
             dev->mFrameNumberTracker.updateTracker(frameNumber, /*isError*/false);
         }
         // early return to avoid callback sent to closed devices
-        return ret;
+        return ScopedAStatus::ok();
     }
 
     CameraMetadata metadataCopy;
@@ -1702,11 +1738,12 @@
             dev->mCaptureResultMetadataQueue.get(), &metadataCopy);
     if (status != ACAMERA_OK) {
         ALOGE("%s: result metadata couldn't be converted", __FUNCTION__);
-        return ret;
+        return ScopedAStatus::ok();
     }
 
-    metadataCopy.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, dev->mShadingMapSize, /*data_count*/2);
-    metadataCopy.update(ANDROID_SYNC_FRAME_NUMBER, &frameNumber, /*data_count*/1);
+    metadataCopy.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, dev->mShadingMapSize,
+                        /* data_count= */ 2);
+    metadataCopy.update(ANDROID_SYNC_FRAME_NUMBER, &frameNumber, /* data_count= */1);
 
     auto it = dev->mSequenceCallbackMap.find(sequenceId);
     if (it != dev->mSequenceCallbackMap.end()) {
@@ -1730,7 +1767,7 @@
                     &localPhysicalResult[i].physicalMetadata);
             if (status != ACAMERA_OK) {
                 ALOGE("%s: physical camera result metadata couldn't be converted", __FUNCTION__);
-                return ret;
+                return ScopedAStatus::ok();
             }
         }
         sp<ACameraPhysicalCaptureResultInfo> physicalResult(
@@ -1762,17 +1799,14 @@
         dev->checkAndFireSequenceCompleteLocked();
     }
 
-    return ret;
+    return ScopedAStatus::ok();
 }
 
-android::hardware::Return<void>
-CameraDevice::ServiceCallback::onRepeatingRequestError(
-        uint64_t lastFrameNumber, int32_t stoppedSequenceId) {
-    auto ret = Void();
-
-    sp<CameraDevice> dev = mDevice.promote();
+ScopedAStatus CameraDevice::ServiceCallback::onRepeatingRequestError(int64_t lastFrameNumber,
+                                                                     int32_t stoppedSequenceId) {
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
     if (dev == nullptr) {
-        return ret; // device has been closed
+        return ScopedAStatus::ok(); // device has been closed
     }
 
     Mutex::Autolock _l(dev->mDeviceLock);
@@ -1784,33 +1818,72 @@
 
     dev->checkRepeatingSequenceCompleteLocked(repeatingSequenceId, lastFrameNumber);
 
-    return ret;
+    return ScopedAStatus::ok();
+}
+
+ScopedAStatus CameraDevice::ServiceCallback::onPrepared(int32_t streamId) {
+    ALOGV("%s: callback for stream id %d", __FUNCTION__, streamId);
+    std::shared_ptr<CameraDevice> dev = mDevice.lock();
+    if (dev == nullptr) {
+        return ScopedAStatus::ok();
+    }
+    Mutex::Autolock _l(dev->mDeviceLock);
+    if (dev->isClosed() || dev->mRemote == nullptr) {
+        return ScopedAStatus::ok();
+    }
+    auto it = dev->mConfiguredOutputs.find(streamId);
+    if (it == dev->mConfiguredOutputs.end()) {
+        ALOGE("%s: stream id %d does not exist", __FUNCTION__ , streamId);
+        return ScopedAStatus::ok();
+    }
+    sp<ACameraCaptureSession> session = dev->mCurrentSession.promote();
+    if (session == nullptr) {
+        ALOGE("%s: Session is dead already", __FUNCTION__ );
+        return ScopedAStatus::ok();
+    }
+    // We've found the window corresponding to the surface id.
+    const native_handle_t *anw = it->second.first.mWindow;
+    sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
+    msg->setPointer(kContextKey, session->mPreparedCb.context);
+    msg->setPointer(kAnwKey, (void *)anw);
+    msg->setObject(kSessionSpKey, session);
+    msg->setPointer(kCallbackFpKey, (void *)session->mPreparedCb.onWindowPrepared);
+    dev->postSessionMsgAndCleanup(msg);
+    return ScopedAStatus::ok();
 }
 
 camera_status_t CameraDevice::ServiceCallback::readOneResultMetadata(
-        const FmqSizeOrMetadata& fmqSizeOrMetadata, ResultMetadataQueue* metadataQueue,
+        const CaptureMetadataInfo& captureMetadataInfo, ResultMetadataQueue* metadataQueue,
         CameraMetadata* metadata) {
     if (metadataQueue == nullptr || metadata == nullptr) {
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
     bool converted;
-    HCameraMetadata hCameraMetadata;
-    if (fmqSizeOrMetadata.getDiscriminator() ==
-            FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
-        hCameraMetadata.resize(fmqSizeOrMetadata.fmqMetadataSize());
-        bool read = metadataQueue->read(
-                hCameraMetadata.data(), fmqSizeOrMetadata.fmqMetadataSize());
+    AidlCameraMetadata aidlCameraMetadata;
+    std::vector<uint8_t>& metadataVec = aidlCameraMetadata.metadata;
+    camera_metadata_t* clonedMetadata;
+    if (captureMetadataInfo.getTag() == CaptureMetadataInfo::fmqMetadataSize) {
+        int64_t size = captureMetadataInfo.get<CaptureMetadataInfo::fmqMetadataSize>();
+        metadataVec.resize(size);
+        bool read = metadataQueue->read(reinterpret_cast<int8_t*>(metadataVec.data()), size);
         if (!read) {
             ALOGE("%s capture request settings could't be read from fmq", __FUNCTION__);
             return ACAMERA_ERROR_UNKNOWN;
         }
         // TODO: Do we actually need to clone here ?
-        converted = utils::convertFromHidlCloned(hCameraMetadata, metadata);
+        converted = utils::cloneFromAidl(aidlCameraMetadata, &clonedMetadata);
     } else {
-        converted = utils::convertFromHidlCloned(fmqSizeOrMetadata.metadata(), metadata);
+        const AidlCameraMetadata &embeddedMetadata =
+                captureMetadataInfo.get<CaptureMetadataInfo::metadata>();
+        converted = utils::cloneFromAidl(embeddedMetadata, &clonedMetadata);
     }
 
-    return converted ? ACAMERA_OK : ACAMERA_ERROR_UNKNOWN;
+    if (converted) {
+        *metadata = CameraMetadata(clonedMetadata);
+        return ACAMERA_OK;
+    }
+
+    return ACAMERA_ERROR_UNKNOWN;
 }
 
 } // namespace acam
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index c306206..6e0c772 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -16,54 +16,63 @@
 #ifndef _ACAMERA_DEVICE_H
 #define _ACAMERA_DEVICE_H
 
-#include <memory>
-#include <map>
-#include <set>
-#include <atomic>
-#include <utility>
-#include <vector>
-#include <utils/StrongPointer.h>
-#include <utils/Mutex.h>
-#include <utils/List.h>
-#include <utils/Vector.h>
-#include <android/frameworks/cameraservice/device/2.1/ICameraDeviceUser.h>
-#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceCallback.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <fmq/MessageQueue.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include <camera/NdkCameraManager.h>
-#include <camera/NdkCameraCaptureSession.h>
-
 #include "ACameraMetadata.h"
 #include "utils.h"
 
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceCallback.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureResultExtras.h>
+#include <aidl/android/frameworks/cameraservice/device/ErrorCode.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/ICameraDeviceUser.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCameraSettings.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <atomic>
+#include <camera/NdkCameraCaptureSession.h>
+#include <camera/NdkCameraManager.h>
+#include <fmq/AidlMessageQueue.h>
+#include <fmq/MessageQueue.h>
+#include <map>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <memory>
+#include <set>
+#include <utility>
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
+#include <utils/Vector.h>
+#include <vector>
+
 namespace android {
 namespace acam {
 
-using ICameraDeviceCallback = frameworks::cameraservice::device::V2_0::ICameraDeviceCallback;
-using ICameraDeviceUser_2_0 = frameworks::cameraservice::device::V2_0::ICameraDeviceUser;
-using ICameraDeviceUser = frameworks::cameraservice::device::V2_1::ICameraDeviceUser;
-using CaptureResultExtras = frameworks::cameraservice::device::V2_0::CaptureResultExtras;
-using PhysicalCaptureResultInfo = frameworks::cameraservice::device::V2_0::PhysicalCaptureResultInfo;
-using PhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
-using SubmitInfo = frameworks::cameraservice::device::V2_0::SubmitInfo;
-using CaptureResultExtras = frameworks::cameraservice::device::V2_0::CaptureResultExtras;
-using ErrorCode = frameworks::cameraservice::device::V2_0::ErrorCode;
-using FmqSizeOrMetadata = frameworks::cameraservice::device::V2_0::FmqSizeOrMetadata;
-using StreamConfigurationMode = frameworks::cameraservice::device::V2_0::StreamConfigurationMode;
-using Status = frameworks::cameraservice::common::V2_0::Status;
-using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
-using RequestMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
-using CameraStatusAndId = frameworks::cameraservice::service::V2_0::CameraStatusAndId;
+using ::aidl::android::frameworks::cameraservice::common::Status;
+using ::aidl::android::frameworks::cameraservice::device::BnCameraDeviceCallback;
+using ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using ::aidl::android::frameworks::cameraservice::device::ErrorCode;
+using ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+using ::aidl::android::frameworks::cameraservice::device::ICameraDeviceUser;
+using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
+using ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::acam::utils::native_handle_ptr_wrapper;
 
-using hardware::hidl_vec;
-using hardware::hidl_string;
-using utils::native_handle_ptr_wrapper;
+
+using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+using RequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
 using utils::CaptureRequest;
-using utils::OutputConfigurationWrapper;
 
 // Wrap ACameraCaptureFailure so it can be ref-counted
 struct CameraCaptureFailure : public RefBase, public ACameraCaptureFailure { };
@@ -83,13 +92,16 @@
     int64_t mFrameNumber;
 };
 
-class CameraDevice final : public RefBase {
+class CameraDevice final : public std::enable_shared_from_this<CameraDevice> {
   public:
     CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
                   sp<ACameraMetadata> chars,
                   ACameraDevice* wrapper);
     ~CameraDevice();
 
+    // Called to initialize fields that require shared_ptr to `this`
+    void init();
+
     inline const char* getId() const { return mCameraId.c_str(); }
 
     camera_status_t createCaptureRequest(
@@ -107,30 +119,36 @@
             const ACaptureSessionOutputContainer* sessionOutputContainer) const;
 
     // Callbacks from camera service
-    class ServiceCallback : public ICameraDeviceCallback {
+    class ServiceCallback : public BnCameraDeviceCallback {
       public:
-        explicit ServiceCallback(CameraDevice* device) : mDevice(device) {}
-        android::hardware::Return<void> onDeviceError(ErrorCode errorCode,
-                           const CaptureResultExtras& resultExtras) override;
-        android::hardware::Return<void> onDeviceIdle() override;
-        android::hardware::Return<void> onCaptureStarted(const CaptureResultExtras& resultExtras,
-                              uint64_t timestamp) override;
-        android::hardware::Return<void> onResultReceived(const FmqSizeOrMetadata& result,
-                              const CaptureResultExtras& resultExtras,
-                              const hidl_vec<PhysicalCaptureResultInfo>& physicalResultInfos) override;
-        android::hardware::Return<void> onRepeatingRequestError(uint64_t lastFrameNumber,
-                int32_t stoppedSequenceId) override;
+        explicit ServiceCallback(std::weak_ptr<CameraDevice> device) :
+              mDevice(std::move(device)) {}
+
+        ndk::ScopedAStatus onDeviceError(ErrorCode in_errorCode,
+                                         const CaptureResultExtras& in_resultExtras) override;
+        ndk::ScopedAStatus onDeviceIdle() override;
+
+        ndk::ScopedAStatus onCaptureStarted(const CaptureResultExtras& in_resultExtras,
+                                            int64_t in_timestamp) override;
+        ndk::ScopedAStatus onPrepared(int32_t in_streamId) override;
+        ndk::ScopedAStatus onRepeatingRequestError(int64_t in_lastFrameNumber,
+                                                   int32_t in_repeatingRequestId) override;
+        ndk::ScopedAStatus onResultReceived(const CaptureMetadataInfo& in_result,
+                                            const CaptureResultExtras& in_resultExtras,
+                                            const std::vector<PhysicalCaptureResultInfo>&
+                                                    in_physicalCaptureResultInfos) override;
+
       private:
-        camera_status_t readOneResultMetadata(const FmqSizeOrMetadata& fmqSizeOrMetadata,
+        camera_status_t readOneResultMetadata(const CaptureMetadataInfo& captureMetadataInfo,
                 ResultMetadataQueue* metadataQueue, CameraMetadata* metadata);
-        const wp<CameraDevice> mDevice;
+        const std::weak_ptr<CameraDevice> mDevice;
     };
-    inline sp<ICameraDeviceCallback> getServiceCallback() {
+    inline std::shared_ptr<BnCameraDeviceCallback> getServiceCallback() {
         return mServiceCallback;
     };
 
     // Camera device is only functional after remote being set
-    void setRemoteDevice(sp<ICameraDeviceUser> remote);
+    void setRemoteDevice(std::shared_ptr<ICameraDeviceUser> remote);
 
     bool setDeviceMetadataQueues();
     inline ACameraDevice* getWrapper() const { return mWrapper; };
@@ -179,6 +197,8 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
+    camera_status_t prepareLocked(ACameraWindowType *window);
+
     // Since this writes to ICameraDeviceUser's fmq, clients must take care that:
     //   a) This function is called serially.
     //   b) This function is called in accordance with ICameraDeviceUser.submitRequestList,
@@ -208,15 +228,15 @@
     void postSessionMsgAndCleanup(sp<AMessage>& msg);
 
     mutable Mutex mDeviceLock;
-    const hidl_string mCameraId;                          // Camera ID
+    const std::string mCameraId;                          // Camera ID
     const ACameraDevice_StateCallbacks mAppCallbacks; // Callback to app
     const sp<ACameraMetadata> mChars;    // Camera characteristics
-    const sp<ServiceCallback> mServiceCallback;
+    std::shared_ptr<ServiceCallback> mServiceCallback;
     ACameraDevice* mWrapper;
 
     // stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
     // camera service)
-    std::map<int, std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> mConfiguredOutputs;
+    std::map<int, std::pair<native_handle_ptr_wrapper, OutputConfiguration>> mConfiguredOutputs;
 
     // TODO: maybe a bool will suffice for synchronous implementation?
     std::atomic_bool mClosing;
@@ -232,7 +252,7 @@
     // This will avoid a busy session being deleted before it's back to idle state
     sp<ACameraCaptureSession> mBusySession;
 
-    sp<ICameraDeviceUser> mRemote;
+    std::shared_ptr<ICameraDeviceUser> mRemote;
 
     // Looper thread to handle callback to app
     sp<ALooper> mCbLooper;
@@ -252,7 +272,8 @@
         kWhatLogicalCaptureFail, // onLogicalCameraCaptureFailed
         kWhatCaptureSeqEnd,    // onCaptureSequenceCompleted
         kWhatCaptureSeqAbort,  // onCaptureSequenceAborted
-        kWhatCaptureBufferLost,// onCaptureBufferLost
+        kWhatCaptureBufferLost, // onCaptureBufferLost
+        kWhatPreparedCb, // onPrepared
         // Internal cleanup
         kWhatCleanUpSessions   // Cleanup cached sp<ACameraCaptureSession>
     };
@@ -281,7 +302,7 @@
         // This handler will cache all capture session sp until kWhatCleanUpSessions
         // is processed. This is used to guarantee the last session reference is always
         // being removed in callback thread without holding camera device lock
-        Vector<sp<ACameraCaptureSession>> mCachedSessions;
+        std::vector<sp<ACameraCaptureSession>> mCachedSessions;
     };
     sp<CallbackHandler> mHandler;
 
@@ -303,19 +324,19 @@
 
     struct CallbackHolder {
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest>>&  requests,
+                       std::vector<sp<CaptureRequest>>   requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_captureCallbacks* cbs);
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest>>&  requests,
+                       std::vector<sp<CaptureRequest>>   requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest> >& requests,
+                       std::vector<sp<CaptureRequest> >  requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_captureCallbacksV2* cbs);
         CallbackHolder(sp<ACameraCaptureSession>          session,
-                       const Vector<sp<CaptureRequest> >& requests,
+                       std::vector<sp<CaptureRequest> >  requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs);
         void clearCallbacks() {
@@ -359,7 +380,7 @@
         }
 
         sp<ACameraCaptureSession>   mSession;
-        Vector<sp<CaptureRequest>>  mRequests;
+        std::vector<sp<CaptureRequest>>  mRequests;
         const bool                  mIsRepeating;
         const bool                  mIs2Callback;
         const bool                  mIsLogicalCameraCallback;
@@ -401,7 +422,7 @@
     // Misc variables
     int32_t mShadingMapSize[2];   // const after constructor
     int32_t mPartialResultCount;  // const after constructor
-    std::shared_ptr<ResultMetadataQueue> mCaptureRequestMetadataQueue = nullptr;
+    std::shared_ptr<RequestMetadataQueue> mCaptureRequestMetadataQueue = nullptr;
     std::shared_ptr<ResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
 };
 
@@ -415,7 +436,10 @@
 struct ACameraDevice {
     ACameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
                   sp<ACameraMetadata> chars) :
-            mDevice(new android::acam::CameraDevice(id, cb, std::move(chars), this)) {}
+            mDevice(std::make_shared<android::acam::CameraDevice>(id, cb,
+                                                                std::move(chars), this)) {
+        mDevice->init();
+    }
 
     ~ACameraDevice();
     /*******************
@@ -446,19 +470,20 @@
     /***********************
      * Device interal APIs *
      ***********************/
-    inline android::sp<android::acam::ICameraDeviceCallback> getServiceCallback() {
+    inline std::shared_ptr<android::acam::BnCameraDeviceCallback> getServiceCallback() {
         return mDevice->getServiceCallback();
     };
 
     // Camera device is only functional after remote being set
-    inline void setRemoteDevice(android::sp<android::acam::ICameraDeviceUser> remote) {
+    inline void setRemoteDevice(std::shared_ptr<
+            ::aidl::android::frameworks::cameraservice::device::ICameraDeviceUser> remote) {
         mDevice->setRemoteDevice(remote);
     }
     inline bool setDeviceMetadataQueues() {
         return mDevice->setDeviceMetadataQueues();
     }
   private:
-    android::sp<android::acam::CameraDevice> mDevice;
+    std::shared_ptr<android::acam::CameraDevice> mDevice;
 };
 
 #endif // _ACAMERA_DEVICE_H
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
index 8bd5a52..1e724eb 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
+++ b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
@@ -14,17 +14,14 @@
  * limitations under the License.
  */
 
-#include <vector>
-#include <inttypes.h>
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <CameraMetadata.h>
-
-#include "ndk_vendor/impl/ACameraDevice.h"
 #include "ACameraCaptureSession.h"
 #include "ACameraMetadata.h"
 #include "ACaptureRequest.h"
 #include "utils.h"
+#include <CameraMetadata.h>
+#include <inttypes.h>
+#include <ndk_vendor/impl/ACameraDevice.h>
+#include <vector>
 
 using namespace android;
 
@@ -32,22 +29,22 @@
 namespace acam {
 
 template<class T>
-camera_status_t
-CameraDevice::captureLocked(
+camera_status_t CameraDevice::captureLocked(
         sp<ACameraCaptureSession> session,
         /*optional*/T* cbs,
-        int numRequests, ACaptureRequest** requests,
+        int numRequests,
+        ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     return submitRequestsLocked(
             session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/false);
 }
 
 template<class T>
-camera_status_t
-CameraDevice::setRepeatingRequestsLocked(
+camera_status_t CameraDevice::setRepeatingRequestsLocked(
         sp<ACameraCaptureSession> session,
         /*optional*/T* cbs,
-        int numRequests, ACaptureRequest** requests,
+        int numRequests,
+        ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     return submitRequestsLocked(
             session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/true);
@@ -56,11 +53,10 @@
 template<class T>
 camera_status_t CameraDevice::submitRequestsLocked(
         sp<ACameraCaptureSession> session,
-        /*optional*/T* cbs,
-        int numRequests, ACaptureRequest** requests,
+        /*optional*/T* cbs, int numRequests,
+        ACaptureRequest** requests,
         /*out*/int* captureSequenceId,
-        bool isRepeating)
-{
+        bool isRepeating) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         ALOGE("Camera %s submit capture request failed! ret %d", getId(), ret);
@@ -68,9 +64,10 @@
     }
 
     // Form two vectors of capture request, one for internal tracking
-    std::vector<frameworks::cameraservice::device::V2_0::CaptureRequest> requestList;
-    Vector<sp<CaptureRequest>> requestsV;
-    requestsV.setCapacity(numRequests);
+
+    std::vector<::aidl::android::frameworks::cameraservice::device::CaptureRequest> requestList;
+    std::vector<sp<CaptureRequest>> requestsV;
+    requestsV.reserve(numRequests);
     for (int i = 0; i < numRequests; i++) {
         sp<CaptureRequest> req;
         ret = allocateCaptureRequestLocked(requests[i], req);
@@ -87,7 +84,7 @@
             ALOGE("Capture request without output target cannot be submitted!");
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
-        requestList.push_back(utils::convertToHidl(req.get()));
+        requestList.push_back(utils::convertToAidl(req.get()));
         requestsV.push_back(req);
     }
     if (isRepeating) {
@@ -100,18 +97,20 @@
 
     SubmitInfo info;
     Status status;
-    auto remoteRet = mRemote->submitRequestList(requestList, isRepeating,
-                                                [&status, &info](auto s, auto &submitInfo) {
-                                                    status = s;
-                                                    info = submitInfo;
-                                                });
-    if (!remoteRet.isOk()) {
-        ALOGE("%s: Transaction error for submitRequestList call: %s", __FUNCTION__,
-              remoteRet.description().c_str());
+    ndk::ScopedAStatus remoteRet = mRemote->submitRequestList(requestList, isRepeating, &info);
+        if (!remoteRet.isOk()) {
+            if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: submitRequestList call failed: %s",
+                  __FUNCTION__, toString(errStatus).c_str());
+            return utils::convertFromAidl(errStatus);
+        } else {
+            ALOGE("%s: Transaction error for submitRequestList call: %d", __FUNCTION__,
+                  remoteRet.getExceptionCode());
+            return ACAMERA_ERROR_UNKNOWN;
+        }
     }
-    if (status != Status::NO_ERROR) {
-        return utils::convertFromHidl(status);
-    }
+
     int32_t sequenceId = info.requestId;
     int64_t lastFrameNumber = info.lastFrameNumber;
     if (sequenceId < 0) {
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 77c934a..3aa7817 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -17,28 +17,29 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACameraManagerVendor"
 
-#include <memory>
-#include "ndk_vendor/impl/ACameraManager.h"
 #include "ACameraMetadata.h"
 #include "ndk_vendor/impl/ACameraDevice.h"
+#include "ndk_vendor/impl/ACameraManager.h"
 #include "utils.h"
+
 #include <CameraMetadata.h>
-#include <camera_metadata_hidden.h>
-
-#include <utils/Vector.h>
-#include <cutils/properties.h>
-#include <stdlib.h>
-
 #include <VendorTagDescriptor.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <camera_metadata_hidden.h>
+#include <cutils/properties.h>
+#include <memory>
+#include <utils/Vector.h>
 
 using namespace android::acam;
 
 namespace android {
 namespace acam {
 
-using frameworks::cameraservice::common::V2_0::ProviderIdAndVendorTagSections;
-using android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
-using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using ::ndk::ScopedAStatus;
 
 // Static member definitions
 const char* CameraManagerGlobal::kCameraIdKey   = "CameraId";
@@ -47,52 +48,55 @@
 const char* CameraManagerGlobal::kContextKey    = "CallbackContext";
 const nsecs_t CameraManagerGlobal::kCallbackDrainTimeout = 5000000; // 5 ms
 Mutex                CameraManagerGlobal::sLock;
-CameraManagerGlobal* CameraManagerGlobal::sInstance = nullptr;
+std::weak_ptr<CameraManagerGlobal> CameraManagerGlobal::sInstance =
+        std::weak_ptr<CameraManagerGlobal>();
 
 /**
- * The vendor tag descriptor class that takes HIDL vendor tag information as
+ * The vendor tag descriptor class that takes AIDL vendor tag information as
  * input. Not part of vendor available VendorTagDescriptor class because that class is used by
  * default HAL implementation code as well.
+ *
+ * This is a class instead of a free-standing function because VendorTagDescriptor has some
+ * protected fields that need to be initialized during conversion.
  */
-class HidlVendorTagDescriptor : public VendorTagDescriptor {
+class AidlVendorTagDescriptor : public VendorTagDescriptor {
 public:
     /**
-     * Create a VendorTagDescriptor object from the HIDL VendorTagSection
+     * Create a VendorTagDescriptor object from the AIDL VendorTagSection
      * vector.
      *
      * Returns OK on success, or a negative error code.
      */
-    static status_t createDescriptorFromHidl(const hidl_vec<VendorTagSection>& vts,
+    static status_t createDescriptorFromAidl(const std::vector<VendorTagSection>& vts,
                                              /*out*/ sp<VendorTagDescriptor> *descriptor);
 };
 
-status_t HidlVendorTagDescriptor::createDescriptorFromHidl(const hidl_vec<VendorTagSection> &vts,
-                                                           sp<VendorTagDescriptor> *descriptor) {
-    int tagCount = 0;
+status_t AidlVendorTagDescriptor::createDescriptorFromAidl(const std::vector<VendorTagSection>& vts,
+                                                           sp<VendorTagDescriptor>* descriptor){
+    size_t tagCount = 0;
 
     for (size_t s = 0; s < vts.size(); s++) {
         tagCount += vts[s].tags.size();
     }
 
     if (tagCount < 0 || tagCount > INT32_MAX) {
-        ALOGE("%s: tag count %d from vendor tag sections is invalid.", __FUNCTION__, tagCount);
+        ALOGE("%s: tag count %zu from vendor tag sections is invalid.", __FUNCTION__, tagCount);
         return BAD_VALUE;
     }
 
-    Vector<uint32_t> tagArray;
-    LOG_ALWAYS_FATAL_IF(tagArray.resize(tagCount) != tagCount,
-            "%s: too many (%u) vendor tags defined.", __FUNCTION__, tagCount);
+    std::vector<int64_t> tagArray;
+    tagArray.resize(tagCount);
 
-    sp<HidlVendorTagDescriptor> desc = new HidlVendorTagDescriptor();
+    sp<AidlVendorTagDescriptor> desc = new AidlVendorTagDescriptor();
     desc->mTagCount = tagCount;
 
-    KeyedVector<uint32_t, String8> tagToSectionMap;
+    std::map<int64_t, std::string> tagToSectionMap;
 
     int idx = 0;
     for (size_t s = 0; s < vts.size(); s++) {
         const VendorTagSection& section = vts[s];
         const char *sectionName = section.sectionName.c_str();
-        if (sectionName == NULL) {
+        if (sectionName == nullptr) {
             ALOGE("%s: no section name defined for vendor tag section %zu.", __FUNCTION__, s);
             return BAD_VALUE;
         }
@@ -106,15 +110,15 @@
                 return BAD_VALUE;
             }
 
-            tagArray.editItemAt(idx++) = section.tags[j].tagId;
+            tagArray[idx++] = section.tags[j].tagId;
 
             const char *tagName = section.tags[j].tagName.c_str();
-            if (tagName == NULL) {
+            if (tagName == nullptr) {
                 ALOGE("%s: no tag name defined for vendor tag %d.", __FUNCTION__, tag);
                 return BAD_VALUE;
             }
             desc->mTagToNameMap.add(tag, String8(tagName));
-            tagToSectionMap.add(tag, sectionString);
+            tagToSectionMap.insert({tag, section.sectionName});
 
             int tagType = (int) section.tags[j].tagType;
             if (tagType < 0 || tagType >= NUM_TYPES) {
@@ -127,8 +131,12 @@
 
     for (size_t i = 0; i < tagArray.size(); ++i) {
         uint32_t tag = tagArray[i];
-        String8 sectionString = tagToSectionMap.valueFor(tag);
-
+        auto itr = tagToSectionMap.find(tag);
+        if (itr == tagToSectionMap.end()) {
+            ALOGE("%s: Couldn't find previously added tag in map.", __FUNCTION__);
+            return UNKNOWN_ERROR;
+        }
+        String8 sectionString = String8(itr->second.c_str());
         // Set up tag to section index map
         ssize_t index = desc->mSections.indexOf(sectionString);
         LOG_ALWAYS_FATAL_IF(index < 0, "index %zd must be non-negative", index);
@@ -147,35 +155,37 @@
     return OK;
 }
 
-CameraManagerGlobal&
-CameraManagerGlobal::getInstance() {
+std::shared_ptr<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
     Mutex::Autolock _l(sLock);
-    CameraManagerGlobal* instance = sInstance;
+    std::shared_ptr<CameraManagerGlobal> instance = sInstance.lock();
     if (instance == nullptr) {
-        instance = new CameraManagerGlobal();
+        instance = std::make_shared<CameraManagerGlobal>();
         sInstance = instance;
     }
-    return *instance;
+    return instance;
 }
 
 CameraManagerGlobal::~CameraManagerGlobal() {
-    // clear sInstance so next getInstance call knows to create a new one
     Mutex::Autolock _sl(sLock);
-    sInstance = nullptr;
     Mutex::Autolock _l(mLock);
     if (mCameraService != nullptr) {
-        mCameraService->unlinkToDeath(mDeathNotifier);
-        mCameraService->removeListener(mCameraServiceListener);
+        AIBinder_unlinkToDeath(mCameraService->asBinder().get(),
+                               mDeathRecipient.get(), this);
+        auto stat = mCameraService->removeListener(mCameraServiceListener);
+        if (!stat.isOk()) {
+            ALOGE("Failed to remove listener to camera service %d:%d", stat.getExceptionCode(),
+                  stat.getServiceSpecificError());
+        }
     }
-    mDeathNotifier.clear();
+
     if (mCbLooper != nullptr) {
         mCbLooper->unregisterHandler(mHandler->id());
         mCbLooper->stop();
     }
     mCbLooper.clear();
     mHandler.clear();
-    mCameraServiceListener.clear();
-    mCameraService.clear();
+    mCameraServiceListener.reset();
+    mCameraService.reset();
 }
 
 static bool isCameraServiceDisabled() {
@@ -188,23 +198,28 @@
     sp<VendorTagDescriptorCache> tagCache = new VendorTagDescriptorCache();
     Status status = Status::NO_ERROR;
     std::vector<ProviderIdAndVendorTagSections> providerIdsAndVts;
-    auto remoteRet = mCameraService->getCameraVendorTagSections([&status, &providerIdsAndVts]
-                                                                 (Status s,
-                                                                  auto &IdsAndVts) {
-                                                         status = s;
-                                                         providerIdsAndVts = IdsAndVts; });
+    ScopedAStatus remoteRet = mCameraService->getCameraVendorTagSections(&providerIdsAndVts);
 
-    if (!remoteRet.isOk() || status != Status::NO_ERROR) {
-        ALOGE("Failed to retrieve VendorTagSections %s", remoteRet.description().c_str());
+    if (!remoteRet.isOk()) {
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: Failed to retrieve VendorTagSections %s",
+                __FUNCTION__, toString(status).c_str());
+        } else {
+            ALOGE("%s: Binder error when retrieving VendorTagSections: %d", __FUNCTION__,
+                remoteRet.getExceptionCode());
+        }
         return false;
     }
+
     // Convert each providers VendorTagSections into a VendorTagDescriptor and
     // add it to the cache
     for (auto &providerIdAndVts : providerIdsAndVts) {
         sp<VendorTagDescriptor> vendorTagDescriptor;
-        if (HidlVendorTagDescriptor::createDescriptorFromHidl(providerIdAndVts.vendorTagSections,
-                                                              &vendorTagDescriptor) != OK) {
-            ALOGE("Failed to convert from Hidl: VendorTagDescriptor");
+        status_t ret = AidlVendorTagDescriptor::createDescriptorFromAidl(
+                providerIdAndVts.vendorTagSections, &vendorTagDescriptor);
+        if (ret != OK) {
+            ALOGE("Failed to convert from Aidl: VendorTagDescriptor: %d", ret);
             return false;
         }
         tagCache->addVendorDescriptor(providerIdAndVts.providerId, vendorTagDescriptor);
@@ -213,101 +228,125 @@
     return true;
 }
 
-sp<ICameraService> CameraManagerGlobal::getCameraService() {
+std::shared_ptr<ICameraService> CameraManagerGlobal::getCameraService() {
     Mutex::Autolock _l(mLock);
-    if (mCameraService.get() == nullptr) {
-        if (isCameraServiceDisabled()) {
-            return mCameraService;
-        }
 
-        sp<ICameraService> cameraServiceBinder;
-        do {
-            cameraServiceBinder = ICameraService::getService();
-            if (cameraServiceBinder != nullptr) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
-        if (mDeathNotifier == nullptr) {
-            mDeathNotifier = new DeathNotifier(this);
-        }
-        cameraServiceBinder->linkToDeath(mDeathNotifier, 0);
-        mCameraService = cameraServiceBinder;
+    if (mCameraService != nullptr) {
+        // Camera service already set up. Return existing value.
+        return mCameraService;
+    }
 
-        // Setup looper thread to perfrom availiability callbacks
-        if (mCbLooper == nullptr) {
-            mCbLooper = new ALooper;
-            mCbLooper->setName("C2N-mgr-looper");
-            status_t err = mCbLooper->start(
-                    /*runOnCallingThread*/false,
-                    /*canCallJava*/       true,
-                    PRIORITY_DEFAULT);
-            if (err != OK) {
-                ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
-                        __FUNCTION__, strerror(-err), err);
-                mCbLooper.clear();
-                return nullptr;
-            }
-            if (mHandler == nullptr) {
-                mHandler = new CallbackHandler(this);
-            }
-            mCbLooper->registerHandler(mHandler);
-        }
+    if (isCameraServiceDisabled()) {
+        // Camera service is disabled. return nullptr.
+        return mCameraService;
+    }
 
-        // register ICameraServiceListener
-        if (mCameraServiceListener == nullptr) {
-            mCameraServiceListener = new CameraServiceListener(this);
-        }
-        hidl_vec<frameworks::cameraservice::service::V2_1::CameraStatusAndId> cameraStatuses{};
-        Status status = Status::NO_ERROR;
-        auto remoteRet = mCameraService->addListener_2_1(mCameraServiceListener,
-                                                     [&status, &cameraStatuses](Status s,
-                                                                                auto &retStatuses) {
-                                                         status = s;
-                                                         cameraStatuses = retStatuses;
-                                                     });
-        if (!remoteRet.isOk() || status != Status::NO_ERROR) {
-            ALOGE("Failed to add listener to camera service %s", remoteRet.description().c_str());
-        }
+    std::string serviceName = ICameraService::descriptor;
+    serviceName += "/default";
 
-        // Setup vendor tags
-        if (!setupVendorTags()) {
-            ALOGE("Unable to set up vendor tags");
+    bool isDeclared = AServiceManager_isDeclared(serviceName.c_str());
+    if (!isDeclared) {
+        ALOGE("%s: No ICameraService instance declared: %s", __FUNCTION__, serviceName.c_str());
+        return nullptr;
+    }
+
+    // Before doing any more make sure there is a binder threadpool alive
+    // This is a no-op if the binder threadpool was already started by this process.
+    ABinderProcess_startThreadPool();
+
+    std::shared_ptr<ICameraService> cameraService =
+            ICameraService::fromBinder(ndk::SpAIBinder(
+                    AServiceManager_waitForService(serviceName.c_str())));
+    if (cameraService == nullptr) {
+        ALOGE("%s: Could not get ICameraService instance.", __FUNCTION__);
+        return nullptr;
+    }
+
+    if (mDeathRecipient.get() == nullptr) {
+        mDeathRecipient = ndk::ScopedAIBinder_DeathRecipient(
+                AIBinder_DeathRecipient_new(CameraManagerGlobal::binderDeathCallback));
+    }
+    AIBinder_linkToDeath(cameraService->asBinder().get(),
+                         mDeathRecipient.get(), /*cookie=*/ this);
+
+    mCameraService = cameraService;
+
+    // Setup looper thread to perform availability callbacks
+    if (mCbLooper == nullptr) {
+        mCbLooper = new ALooper;
+        mCbLooper->setName("C2N-mgr-looper");
+        status_t err = mCbLooper->start(
+                /*runOnCallingThread*/false,
+                /*canCallJava*/       true,
+                PRIORITY_DEFAULT);
+        if (err != OK) {
+            ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
+                    __FUNCTION__, strerror(-err), err);
+            mCbLooper.clear();
             return nullptr;
         }
+        if (mHandler == nullptr) {
+            mHandler = new CallbackHandler(weak_from_this());
+        }
+        mCbLooper->registerHandler(mHandler);
+    }
 
-        for (auto& c : cameraStatuses) {
-            onStatusChangedLocked(c.v2_0);
+    // register ICameraServiceListener
+    if (mCameraServiceListener == nullptr) {
+        mCameraServiceListener = ndk::SharedRefBase::make<CameraServiceListener>(weak_from_this());
+    }
 
-            for (auto& unavailablePhysicalId : c.unavailPhysicalCameraIds) {
-                PhysicalCameraStatusAndId statusAndId;
-                statusAndId.deviceStatus = CameraDeviceStatus::STATUS_NOT_PRESENT;
-                statusAndId.cameraId = c.v2_0.cameraId;
-                statusAndId.physicalCameraId = unavailablePhysicalId;
-                onStatusChangedLocked(statusAndId);
-            }
+    std::vector<CameraStatusAndId> cameraStatuses;
+    Status status = Status::NO_ERROR;
+    ScopedAStatus remoteRet = mCameraService->addListener(mCameraServiceListener,
+                                                          &cameraStatuses);
+
+    if (!remoteRet.isOk()) {
+        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+            ALOGE("%s: Failed to add listener to camera service: %s", __FUNCTION__,
+                toString(errStatus).c_str());
+        } else {
+            ALOGE("%s: Transaction failed when adding listener to camera service: %d",
+                __FUNCTION__, remoteRet.getExceptionCode());
+        }
+    }
+
+    // Setup vendor tags
+    if (!setupVendorTags()) {
+        ALOGE("Unable to set up vendor tags");
+        return nullptr;
+    }
+
+    for (auto& csi: cameraStatuses){
+        onStatusChangedLocked(csi.deviceStatus, csi.cameraId);
+
+        for (auto& unavailablePhysicalId : csi.unavailPhysicalCameraIds) {
+            onStatusChangedLocked(CameraDeviceStatus::STATUS_NOT_PRESENT,
+                                  csi.cameraId, unavailablePhysicalId);
         }
     }
     return mCameraService;
 }
 
-void CameraManagerGlobal::DeathNotifier::serviceDied(uint64_t cookie, const wp<IBase> &who) {
-    (void) cookie;
-    (void) who;
+void CameraManagerGlobal::binderDeathCallback(void* /*cookie*/) {
+    AutoMutex _l(sLock);
+
     ALOGE("Camera service binderDied!");
-    sp<CameraManagerGlobal> cm = mCameraManager.promote();
-    if (cm != nullptr) {
-        AutoMutex lock(cm->mLock);
-        for (auto& pair : cm->mDeviceStatusMap) {
-            CameraStatusAndId cameraStatusAndId;
-            cameraStatusAndId.cameraId = pair.first;
-            cameraStatusAndId.deviceStatus = pair.second.getStatus();
-            cm->onStatusChangedLocked(cameraStatusAndId);
-        }
-        cm->mCameraService.clear();
-        // TODO: consider adding re-connect call here?
+    std::shared_ptr<CameraManagerGlobal> instance = sInstance.lock();
+    if (instance == nullptr) {
+        return;
     }
+
+    // Remove cameraService from the static instance
+    AutoMutex lock(instance->mLock);
+    for (auto& pair : instance->mDeviceStatusMap) {
+        const auto &cameraId = pair.first;
+        const auto &deviceStatus = pair.second.getStatus();
+        instance->onStatusChangedLocked(deviceStatus, cameraId);
+    }
+    instance->mCameraService.reset();
+    // TODO: consider adding re-connect call here?
 }
 
 void CameraManagerGlobal::registerAvailabilityCallback(
@@ -359,41 +398,45 @@
 void CameraManagerGlobal::registerAvailCallback(const T *callback) {
     Mutex::Autolock _l(mLock);
     Callback cb(callback);
-    auto pair = mCallbacks.insert(cb);
+    auto res = mCallbacks.insert(cb);
+    if (!res.second) {
+        ALOGE("%s: Failed to register callback. Couldn't insert in map.", __FUNCTION__);
+        return;
+    }
     // Send initial callbacks if callback is newly registered
-    if (pair.second) {
-        for (auto& pair : mDeviceStatusMap) {
-            const hidl_string& cameraId = pair.first;
-            CameraDeviceStatus status = pair.second.getStatus();
+    for (auto& pair : mDeviceStatusMap) {
+        const std::string& cameraId = pair.first;
+        CameraDeviceStatus status = pair.second.getStatus();
 
+        {
             // Camera available/unavailable callback
             sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
             ACameraManager_AvailabilityCallback cbFunc = isStatusAvailable(status) ?
-                    cb.mAvailable : cb.mUnavailable;
+                                                         cb.mAvailable : cb.mUnavailable;
             msg->setPointer(kCallbackFpKey, (void *) cbFunc);
             msg->setPointer(kContextKey, cb.mContext);
             msg->setString(kCameraIdKey, AString(cameraId.c_str()));
             mPendingCallbackCnt++;
             msg->post();
+        }
 
-            // Physical camera unavailable callback
-            std::set<hidl_string> unavailPhysicalIds = pair.second.getUnavailablePhysicalIds();
-            for (const auto& physicalCameraId : unavailPhysicalIds) {
-                sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
-                ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
-                        cb.mPhysicalCamUnavailable;
-                msg->setPointer(kCallbackFpKey, (void *) cbFunc);
-                msg->setPointer(kContextKey, cb.mContext);
-                msg->setString(kCameraIdKey, AString(cameraId.c_str()));
-                msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId.c_str()));
-                mPendingCallbackCnt++;
-                msg->post();
-            }
+        // Physical camera unavailable callback
+        std::set<std::string> unavailPhysicalIds = pair.second.getUnavailablePhysicalIds();
+        for (const auto& physicalCameraId : unavailPhysicalIds) {
+            sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
+            ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
+                    cb.mPhysicalCamUnavailable;
+            msg->setPointer(kCallbackFpKey, (void *) cbFunc);
+            msg->setPointer(kContextKey, cb.mContext);
+            msg->setString(kCameraIdKey, AString(cameraId.c_str()));
+            msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId.c_str()));
+            mPendingCallbackCnt++;
+            msg->post();
         }
     }
 }
 
-void CameraManagerGlobal::getCameraIdList(std::vector<hidl_string>* cameraIds) {
+void CameraManagerGlobal::getCameraIdList(std::vector<std::string>* cameraIds) {
     // Ensure that we have initialized/refreshed the list of available devices
     auto cs = getCameraService();
     Mutex::Autolock _l(mLock);
@@ -475,6 +518,10 @@
                 ALOGE("%s: Cannot find camera callback fp!", __FUNCTION__);
                 return;
             }
+            if (cb == nullptr) {
+                // Physical camera callback is null
+                return;
+            }
             found = msg->findPointer(kContextKey, &context);
             if (!found) {
                 ALOGE("%s: Cannot find callback context!", __FUNCTION__);
@@ -500,33 +547,31 @@
 }
 
 void CameraManagerGlobal::CallbackHandler::notifyParent() {
-    sp<CameraManagerGlobal> parent = mParent.promote();
+    std::shared_ptr<CameraManagerGlobal> parent = mParent.lock();
     if (parent != nullptr) {
         parent->onCallbackCalled();
     }
 }
 
-hardware::Return<void> CameraManagerGlobal::CameraServiceListener::onStatusChanged(
-        const CameraStatusAndId &statusAndId) {
-    sp<CameraManagerGlobal> cm = mCameraManager.promote();
+ScopedAStatus CameraManagerGlobal::CameraServiceListener::onStatusChanged(
+        CameraDeviceStatus status, const std::string &cameraId) {
+    std::shared_ptr<CameraManagerGlobal> cm = mCameraManager.lock();
     if (cm != nullptr) {
-        cm->onStatusChanged(statusAndId);
+        cm->onStatusChanged(status, cameraId);
     } else {
         ALOGE("Cannot deliver status change. Global camera manager died");
     }
-    return Void();
+    return ScopedAStatus::ok();
 }
 
 void CameraManagerGlobal::onStatusChanged(
-        const CameraStatusAndId &statusAndId) {
+        const CameraDeviceStatus &status, const std::string &cameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(statusAndId);
+    onStatusChangedLocked(status, cameraId);
 }
 
 void CameraManagerGlobal::onStatusChangedLocked(
-        const CameraStatusAndId &statusAndId) {
-    hidl_string cameraId = statusAndId.cameraId;
-    CameraDeviceStatus status = statusAndId.deviceStatus;
+        const CameraDeviceStatus &status, const std::string &cameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
@@ -560,28 +605,28 @@
     }
 }
 
-hardware::Return<void> CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
-        const PhysicalCameraStatusAndId &statusAndId) {
-    sp<CameraManagerGlobal> cm = mCameraManager.promote();
+ScopedAStatus CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
+        CameraDeviceStatus in_status, const std::string& in_cameraId,
+        const std::string& in_physicalCameraId) {
+    std::shared_ptr<CameraManagerGlobal> cm = mCameraManager.lock();
     if (cm != nullptr) {
-        cm->onStatusChanged(statusAndId);
+        cm->onStatusChanged(in_status, in_cameraId, in_physicalCameraId);
     } else {
         ALOGE("Cannot deliver status change. Global camera manager died");
     }
-    return Void();
+    return ScopedAStatus::ok();
 }
 
 void CameraManagerGlobal::onStatusChanged(
-        const PhysicalCameraStatusAndId &statusAndId) {
+        const CameraDeviceStatus &status, const std::string& cameraId,
+        const std::string& physicalCameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(statusAndId);
+    onStatusChangedLocked(status, cameraId, physicalCameraId);
 }
 
 void CameraManagerGlobal::onStatusChangedLocked(
-        const PhysicalCameraStatusAndId &statusAndId) {
-    hidl_string cameraId = statusAndId.cameraId;
-    hidl_string physicalCameraId = statusAndId.physicalCameraId;
-    CameraDeviceStatus status = statusAndId.deviceStatus;
+        const CameraDeviceStatus &status, const std::string& cameraId,
+        const std::string& physicalCameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
@@ -635,20 +680,20 @@
 }
 
 bool CameraManagerGlobal::CameraStatus::addUnavailablePhysicalId(
-        const hidl_string& physicalCameraId) {
+        const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto result = unavailablePhysicalIds.insert(physicalCameraId);
     return result.second;
 }
 
 bool CameraManagerGlobal::CameraStatus::removeUnavailablePhysicalId(
-        const hidl_string& physicalCameraId) {
+        const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto count = unavailablePhysicalIds.erase(physicalCameraId);
     return count > 0;
 }
 
-std::set<hidl_string> CameraManagerGlobal::CameraStatus::getUnavailablePhysicalIds() {
+std::set<std::string> CameraManagerGlobal::CameraStatus::getUnavailablePhysicalIds() {
     std::lock_guard<std::mutex> lock(mLock);
     return unavailablePhysicalIds;
 }
@@ -659,16 +704,15 @@
 /**
  * ACameraManger Implementation
  */
-camera_status_t
-ACameraManager::getCameraIdList(ACameraIdList** cameraIdList) {
+camera_status_t ACameraManager::getCameraIdList(ACameraIdList** cameraIdList) {
     Mutex::Autolock _l(mLock);
 
-    std::vector<hidl_string> idList;
-    CameraManagerGlobal::getInstance().getCameraIdList(&idList);
+    std::vector<std::string> idList;
+    CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
 
     int numCameras = idList.size();
     ACameraIdList *out = new ACameraIdList;
-    if (!out) {
+    if (out == nullptr) {
         ALOGE("Allocate memory for ACameraIdList failed!");
         return ACAMERA_ERROR_NOT_ENOUGH_MEMORY;
     }
@@ -710,33 +754,37 @@
     }
 }
 
-camera_status_t ACameraManager::getCameraCharacteristics(
-        const char *cameraIdStr, sp<ACameraMetadata> *characteristics) {
+camera_status_t ACameraManager::getCameraCharacteristics(const char *cameraIdStr,
+                                                         sp<ACameraMetadata> *characteristics) {
+    using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
     Mutex::Autolock _l(mLock);
 
-    sp<ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    std::shared_ptr<ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    CameraMetadata rawMetadata;
-    Status status = Status::NO_ERROR;
-    auto serviceRet =
-        cs->getCameraCharacteristics(cameraIdStr,
-                                     [&status, &rawMetadata] (auto s ,
-                                                              const hidl_vec<uint8_t> &metadata) {
-                                          status = s;
-                                          if (status == Status::NO_ERROR) {
-                                              utils::convertFromHidlCloned(metadata, &rawMetadata);
-                                          }
-                                     });
-    if (!serviceRet.isOk() || status != Status::NO_ERROR) {
-        ALOGE("Get camera characteristics from camera service failed");
+    AidlCameraMetadata rawMetadata;
+    ScopedAStatus serviceRet = cs->getCameraCharacteristics(cameraIdStr, &rawMetadata);
+
+    if (!serviceRet.isOk()) {
+        if (serviceRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(serviceRet.getServiceSpecificError());
+            ALOGE("%s: Get camera characteristics from camera service failed: %s",
+                __FUNCTION__, toString(errStatus).c_str());
+        } else {
+            ALOGE("%s: Transaction error when getting camera "
+                  "characteristics from camera service: %d",
+                __FUNCTION__, serviceRet.getExceptionCode());
+        }
         return ACAMERA_ERROR_UNKNOWN; // should not reach here
     }
 
-    *characteristics = new ACameraMetadata(
-            rawMetadata.release(), ACameraMetadata::ACM_CHARACTERISTICS);
+    camera_metadata_t* metadataBuffer;
+    ::android::acam::utils::cloneFromAidl(rawMetadata, &metadataBuffer);
+
+    *characteristics = new ACameraMetadata(metadataBuffer,
+                                           ACameraMetadata::ACM_CHARACTERISTICS);
     return ACAMERA_OK;
 }
 
@@ -756,42 +804,41 @@
 
     ACameraDevice* device = new ACameraDevice(cameraId, callback, std::move(rawChars));
 
-    sp<ICameraService> cs = CameraManagerGlobal::getInstance().getCameraService();
+    std::shared_ptr<ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         delete device;
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
 
-    sp<ICameraDeviceCallback> callbacks = device->getServiceCallback();
-    sp<ICameraDeviceUser_2_0> deviceRemote_2_0;
+    std::shared_ptr<BnCameraDeviceCallback> deviceCallback = device->getServiceCallback();
+    std::shared_ptr<ICameraDeviceUser> deviceRemote;
 
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
-    Status status = Status::NO_ERROR;
-    auto serviceRet = cs->connectDevice(
-            callbacks, cameraId, [&status, &deviceRemote_2_0](auto s, auto &device) {
-                                     status = s;
-                                     deviceRemote_2_0 = device;
-                                 });
-
-    if (!serviceRet.isOk() || status != Status::NO_ERROR) {
-        ALOGE("%s: connect camera device failed", __FUNCTION__);
-        delete device;
-        return utils::convertFromHidl(status);
+    ScopedAStatus serviceRet = cs->connectDevice(deviceCallback,
+                                                 std::string(cameraId), &deviceRemote);
+    if (!serviceRet.isOk()) {
+        if (serviceRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            Status errStatus = static_cast<Status>(serviceRet.getServiceSpecificError());
+            ALOGE("%s: connect camera device failed: %s",
+                  __FUNCTION__, toString(errStatus).c_str());
+            delete device;
+            return utils::convertFromAidl(errStatus);
+        } else {
+            ALOGE("%s: Transaction failed when connecting camera device: %d",
+                __FUNCTION__, serviceRet.getExceptionCode());
+            delete device;
+            return ACAMERA_ERROR_UNKNOWN;
+        }
     }
-    if (deviceRemote_2_0 == nullptr) {
+
+    if (deviceRemote == nullptr) {
         ALOGE("%s: connect camera device failed! remote device is null", __FUNCTION__);
         delete device;
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
-    auto castResult = ICameraDeviceUser::castFrom(deviceRemote_2_0);
-    if (!castResult.isOk()) {
-        ALOGE("%s: failed to cast remote device to version 2.1", __FUNCTION__);
-        delete device;
-        return ACAMERA_ERROR_CAMERA_DISCONNECTED;
-    }
-    sp<ICameraDeviceUser> deviceRemote = castResult;
+
     device->setRemoteDevice(deviceRemote);
     device->setDeviceMetadataQueues();
     *outDevice = device;
@@ -814,7 +861,7 @@
     sp<VendorTagDescriptorCache> vtCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
     sp<VendorTagDescriptor> vTags = nullptr;
     vtCache->getVendorTagDescriptor(vendorTagId, &vTags);
-    status_t status= metadata.getTagFromName(name, vTags.get(), tag);
+    status_t status = CameraMetadata::getTagFromName(name, vTags.get(), tag);
     return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
 }
 
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 4663529..85acee7 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -17,39 +17,35 @@
 #ifndef _ACAMERA_MANAGER_H
 #define _ACAMERA_MANAGER_H
 
-#include <camera/NdkCameraManager.h>
-
-#include <android-base/parseint.h>
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/service/2.1/ICameraService.h>
-#include <android/frameworks/cameraservice/service/2.2/ICameraService.h>
-#include <android/frameworks/cameraservice/service/2.1/ICameraServiceListener.h>
-
 #include <CameraMetadata.h>
-#include <utils/StrongPointer.h>
-#include <utils/Mutex.h>
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include <set>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTag.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTagSection.h>
+#include <aidl/android/frameworks/cameraservice/service/BnCameraServiceListener.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <aidl/android/frameworks/cameraservice/service/ICameraService.h>
+#include <android-base/parseint.h>
+#include <camera/NdkCameraManager.h>
 #include <map>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <set>
+#include <utility>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
 
 namespace android {
 namespace acam {
 
-using ICameraService = frameworks::cameraservice::service::V2_2::ICameraService;
-using CameraDeviceStatus = frameworks::cameraservice::service::V2_0::CameraDeviceStatus;
-using ICameraServiceListener = frameworks::cameraservice::service::V2_1::ICameraServiceListener;
-using PhysicalCameraStatusAndId = frameworks::cameraservice::service::V2_1::PhysicalCameraStatusAndId;
-using CameraStatusAndId = frameworks::cameraservice::service::V2_0::CameraStatusAndId;
-using Status = frameworks::cameraservice::common::V2_0::Status;
-using VendorTagSection = frameworks::cameraservice::common::V2_0::VendorTagSection;
-using VendorTag = frameworks::cameraservice::common::V2_0::VendorTag;
-using IBase = android::hidl::base::V1_0::IBase;
-using android::hardware::hidl_string;
-using hardware::Void;
+using ::aidl::android::frameworks::cameraservice::common::Status;
+using ::aidl::android::frameworks::cameraservice::common::VendorTag;
+using ::aidl::android::frameworks::cameraservice::common::VendorTagSection;
+using ::aidl::android::frameworks::cameraservice::service::BnCameraServiceListener;
+using ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using ::aidl::android::frameworks::cameraservice::service::ICameraService;
 
 /**
  * Per-process singleton instance of CameraManger. Shared by all ACameraManager
@@ -58,15 +54,18 @@
  *
  * TODO: maybe CameraManagerGlobal is better suited in libcameraclient?
  */
-class CameraManagerGlobal final : public RefBase {
+class CameraManagerGlobal final: public std::enable_shared_from_this<CameraManagerGlobal> {
   public:
-    static CameraManagerGlobal& getInstance();
-    sp<ICameraService> getCameraService();
+    static std::shared_ptr<CameraManagerGlobal> getInstance();
+    static void binderDeathCallback(void* cookie);
 
-    void registerAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
-    void unregisterAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
+    CameraManagerGlobal() {};
+    ~CameraManagerGlobal();
+
+    std::shared_ptr<ICameraService> getCameraService();
+
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks *callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks *callback);
 
     void registerExtendedAvailabilityCallback(
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
@@ -76,35 +75,28 @@
     /**
      * Return camera IDs that support camera2
      */
-    void getCameraIdList(std::vector<hidl_string> *cameraIds);
+    void getCameraIdList(std::vector<std::string> *cameraIds);
 
   private:
-    sp<ICameraService> mCameraService;
+    std::shared_ptr<ICameraService> mCameraService;
     const int          kCameraServicePollDelay = 500000; // 0.5s
     Mutex              mLock;
-    class DeathNotifier : public android::hardware::hidl_death_recipient {
-      public:
-        explicit DeathNotifier(CameraManagerGlobal* cm) : mCameraManager(cm) {}
-      protected:
-        // IBinder::DeathRecipient implementation
-        virtual void serviceDied(uint64_t cookie, const wp<IBase> &who);
-      private:
-        const wp<CameraManagerGlobal> mCameraManager;
-    };
-    sp<DeathNotifier> mDeathNotifier;
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
 
-    class CameraServiceListener final : public ICameraServiceListener {
+    class CameraServiceListener final : public BnCameraServiceListener {
       public:
-        explicit CameraServiceListener(CameraManagerGlobal* cm) : mCameraManager(cm) {}
-        android::hardware::Return<void> onStatusChanged(
-            const CameraStatusAndId &statusAndId) override;
-        android::hardware::Return<void> onPhysicalCameraStatusChanged(
-            const PhysicalCameraStatusAndId &statusAndId) override;
+        explicit CameraServiceListener(std::weak_ptr<CameraManagerGlobal> cm) :
+              mCameraManager(std::move(cm)) {}
+        ndk::ScopedAStatus onPhysicalCameraStatusChanged(
+                CameraDeviceStatus in_status, const std::string& in_cameraId,
+                const std::string& in_physicalCameraId) override;
+        ndk::ScopedAStatus onStatusChanged(CameraDeviceStatus in_status,
+                                           const std::string& in_cameraId) override;
 
       private:
-        const wp<CameraManagerGlobal> mCameraManager;
+        const std::weak_ptr<CameraManagerGlobal> mCameraManager;
     };
-    sp<CameraServiceListener> mCameraServiceListener;
+    std::shared_ptr<CameraServiceListener> mCameraServiceListener;
 
     // Wrapper of ACameraManager_AvailabilityCallbacks so we can store it in std::set
     struct Callback {
@@ -180,20 +172,22 @@
     static const nsecs_t kCallbackDrainTimeout;
     class CallbackHandler : public AHandler {
       public:
-        CallbackHandler(wp<CameraManagerGlobal> parent) : mParent(parent) {}
+        CallbackHandler(std::weak_ptr<CameraManagerGlobal> parent) : mParent(std::move(parent)) {}
         void onMessageReceived(const sp<AMessage> &msg) override;
       private:
-        wp<CameraManagerGlobal> mParent;
+        std::weak_ptr<CameraManagerGlobal> mParent;
         void notifyParent();
         void onMessageReceivedInternal(const sp<AMessage> &msg);
     };
     sp<CallbackHandler> mHandler;
     sp<ALooper>         mCbLooper; // Looper thread where callbacks actually happen on
 
-    void onStatusChanged(const CameraStatusAndId &statusAndId);
-    void onStatusChangedLocked(const CameraStatusAndId &statusAndId);
-    void onStatusChanged(const PhysicalCameraStatusAndId &statusAndId);
-    void onStatusChangedLocked(const PhysicalCameraStatusAndId &statusAndId);
+    void onStatusChanged(const CameraDeviceStatus &status, const std::string &cameraId);
+    void onStatusChangedLocked(const CameraDeviceStatus &status, const std::string &cameraId);
+    void onStatusChanged(const CameraDeviceStatus &status, const std::string &cameraId,
+                         const std::string &physicalCameraId);
+    void onStatusChangedLocked(const CameraDeviceStatus &status, const std::string &cameraId,
+                               const std::string &physicalCameraId);
     bool setupVendorTags();
 
     // Utils for status
@@ -203,7 +197,7 @@
     // The sort logic must match the logic in
     // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
     struct CameraIdComparator {
-        bool operator()(const hidl_string& a, const hidl_string& b) const {
+        bool operator()(const std::string& a, const std::string& b) const {
             uint32_t aUint = 0, bUint = 0;
             bool aIsUint = base::ParseUint(a.c_str(), &aUint);
             bool bIsUint = base::ParseUint(b.c_str(), &bUint);
@@ -225,29 +219,29 @@
       private:
         CameraDeviceStatus status = CameraDeviceStatus::STATUS_NOT_PRESENT;
         mutable std::mutex mLock;
-        std::set<hidl_string> unavailablePhysicalIds;
+        std::set<std::string> unavailablePhysicalIds;
       public:
         CameraStatus(CameraDeviceStatus st): status(st) { };
         CameraStatus() = default;
 
-        bool addUnavailablePhysicalId(const hidl_string& physicalCameraId);
-        bool removeUnavailablePhysicalId(const hidl_string& physicalCameraId);
+        bool addUnavailablePhysicalId(const std::string& physicalCameraId);
+        bool removeUnavailablePhysicalId(const std::string& physicalCameraId);
         CameraDeviceStatus getStatus();
         void updateStatus(CameraDeviceStatus newStatus);
-        std::set<hidl_string> getUnavailablePhysicalIds();
+        std::set<std::string> getUnavailablePhysicalIds();
     };
 
     template <class T>
     void registerAvailCallback(const T *callback);
 
     // Map camera_id -> status
-    std::map<hidl_string, CameraStatus, CameraIdComparator> mDeviceStatusMap;
+    std::map<std::string, CameraStatus, CameraIdComparator> mDeviceStatusMap;
 
     // For the singleton instance
     static Mutex sLock;
-    static CameraManagerGlobal* sInstance;
-    CameraManagerGlobal() {};
-    ~CameraManagerGlobal();
+    // Static instance is stored in a weak pointer, so will only exist if there is at least one
+    // active consumer of CameraManagerGlobal
+    static std::weak_ptr<CameraManagerGlobal> sInstance;
 };
 
 } // namespace acam;
@@ -259,7 +253,7 @@
  */
 struct ACameraManager {
     ACameraManager() :
-            mGlobalManager(&(android::acam::CameraManagerGlobal::getInstance())) {}
+            mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
     ~ACameraManager();
     camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
     static void     deleteCameraIdList(ACameraIdList* cameraIdList);
@@ -277,7 +271,7 @@
         kCameraIdListNotInit = -1
     };
     android::Mutex         mLock;
-    android::sp<android::acam::CameraManagerGlobal> mGlobalManager;
+    std::shared_ptr<android::acam::CameraManagerGlobal> mGlobalManager;
 };
 
 #endif //_ACAMERA_MANAGER_H
diff --git a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
index 5715d77..fcb7e34 100644
--- a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
@@ -16,6 +16,7 @@
 
 #include "utils.h"
 
+using ::android::acam::utils::native_handle_ptr_wrapper;
 struct ACameraOutputTarget {
     explicit ACameraOutputTarget(const native_handle_t* window) : mWindow(window) {};
 
@@ -32,5 +33,5 @@
         return mWindow > other.mWindow;
     }
 
-    android::acam::utils::native_handle_ptr_wrapper mWindow;
+    native_handle_ptr_wrapper mWindow;
 };
diff --git a/camera/ndk/ndk_vendor/impl/utils.cpp b/camera/ndk/ndk_vendor/impl/utils.cpp
index e4fb204..73a527b 100644
--- a/camera/ndk/ndk_vendor/impl/utils.cpp
+++ b/camera/ndk/ndk_vendor/impl/utils.cpp
@@ -16,66 +16,75 @@
 
 #define LOG_TAG "ACameraVendorUtils"
 
-#include <utils/Log.h>
-
 #include "utils.h"
 
+#include <aidlcommonsupport/NativeHandle.h>
+#include <utils/Log.h>
+
 namespace android {
 namespace acam {
 namespace utils {
 
-// Convert CaptureRequest wrappable by sp<> to hidl CaptureRequest.
-frameworks::cameraservice::device::V2_0::CaptureRequest
-convertToHidl(const CaptureRequest *captureRequest) {
-    frameworks::cameraservice::device::V2_0::CaptureRequest hCaptureRequest;
-    hCaptureRequest.physicalCameraSettings = captureRequest->mCaptureRequest.physicalCameraSettings;
-    hCaptureRequest.streamAndWindowIds = captureRequest->mCaptureRequest.streamAndWindowIds;
-    return hCaptureRequest;
+// Convert CaptureRequest wrappable by sp<> to aidl CaptureRequest.
+AidlCaptureRequest convertToAidl(const CaptureRequest *captureRequest) {
+    AidlCaptureRequest aidlCaptureRequest;
+    aidlCaptureRequest.physicalCameraSettings =
+            captureRequest->mCaptureRequest.physicalCameraSettings;
+    aidlCaptureRequest.streamAndWindowIds = captureRequest->mCaptureRequest.streamAndWindowIds;
+    return aidlCaptureRequest;
 }
 
-HRotation convertToHidl(int rotation) {
-    HRotation hRotation = HRotation::R0;
+OutputConfiguration::Rotation convertToAidl(int rotation) {
+    using AidlRotation = OutputConfiguration::Rotation;
+
+    AidlRotation aRot = AidlRotation ::R0;
     switch(rotation) {
         case CAMERA3_STREAM_ROTATION_90:
-            hRotation = HRotation::R90;
+            aRot = AidlRotation::R90;
             break;
         case CAMERA3_STREAM_ROTATION_180:
-            hRotation = HRotation::R180;
+            aRot = AidlRotation::R180;
             break;
         case CAMERA3_STREAM_ROTATION_270:
-            hRotation = HRotation::R270;
+            aRot = AidlRotation::R270;
             break;
         default:
             break;
     }
-    return hRotation;
+    return aRot;
 }
 
-bool convertFromHidlCloned(const HCameraMetadata &metadata, CameraMetadata *rawMetadata) {
-    const camera_metadata *buffer = (camera_metadata_t*)(metadata.data());
-    size_t expectedSize = metadata.size();
+bool cloneFromAidl(const AidlCameraMetadata& srcMetadata, camera_metadata_t** dst) {
+    const camera_metadata *buffer = (camera_metadata_t*)(srcMetadata.metadata.data());
+    size_t expectedSize = srcMetadata.metadata.size();
     int ret = validate_camera_metadata_structure(buffer, &expectedSize);
-    if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
-        *rawMetadata = buffer;
-    } else {
-        ALOGE("%s: Malformed camera metadata received from caller", __FUNCTION__);
+    if (ret != OK && ret != CAMERA_METADATA_VALIDATION_SHIFTED) {
+        ALOGE("%s: Malformed camera srcMetadata received from caller", __FUNCTION__);
         return false;
     }
-    return true;
+
+    camera_metadata_t* clonedBuffer = clone_camera_metadata(buffer);
+    if (clonedBuffer != nullptr) {
+        *dst = clonedBuffer;
+        return true;
+    }
+
+    ALOGE("%s: Failed to clone srcMetadata buffer.", __FUNCTION__);
+    return false;
 }
 
-// Note: existing data in dst will be gone. dst owns memory if shouldOwn is set
-//       to true.
-void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst, bool shouldOwn) {
+// Note: existing data in dst will be gone.
+void convertToAidl(const camera_metadata_t *src, AidlCameraMetadata* dst) {
     if (src == nullptr) {
         return;
     }
     size_t size = get_camera_metadata_size(src);
-    dst->setToExternal((uint8_t *) src, size, shouldOwn);
-    return;
+    uint8_t* metadataStart = (uint8_t*)src;
+    uint8_t* metadataEnd = metadataStart + size;
+    dst->metadata.assign(metadataStart, metadataEnd);
 }
 
-TemplateId convertToHidl(ACameraDevice_request_template templateId) {
+TemplateId convertToAidl(ACameraDevice_request_template templateId) {
     switch(templateId) {
         case TEMPLATE_STILL_CAPTURE:
             return TemplateId::STILL_CAPTURE;
@@ -92,7 +101,7 @@
     }
 }
 
-camera_status_t convertFromHidl(Status status) {
+camera_status_t convertFromAidl(Status status) {
     camera_status_t ret = ACAMERA_OK;
     switch(status) {
         case Status::NO_ERROR:
@@ -146,6 +155,14 @@
     return true;
 }
 
+bool isWindowNativeHandleEqual(const native_handle_t *nh1,
+                               const aidl::android::hardware::common::NativeHandle& nh2) {
+    native_handle_t* tempNh = makeFromAidl(nh2);
+    bool equal = isWindowNativeHandleEqual(nh1, tempNh);
+    native_handle_delete(tempNh);
+    return equal;
+}
+
 bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2) {
     if (isWindowNativeHandleEqual(nh1, nh2)) {
         return false;
@@ -166,32 +183,6 @@
     return !isWindowNativeHandleLessThan(nh1, nh2) && !isWindowNativeHandleEqual(nh1, nh2);
 }
 
-bool areWindowNativeHandlesEqual(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle> handles2) {
-    if (handles1.size() != handles2.size()) {
-        return false;
-    }
-    for (int i = 0; i < handles1.size(); i++) {
-        if (!isWindowNativeHandleEqual(handles1[i], handles2[i])) {
-            return false;
-        }
-    }
-    return true;
-}
-
-bool areWindowNativeHandlesLessThan(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle>handles2) {
-    if (handles1.size() != handles2.size()) {
-        return handles1.size() < handles2.size();
-    }
-    for (int i = 0; i < handles1.size(); i++) {
-        const native_handle_t *handle1 = handles1[i].getNativeHandle();
-        const native_handle_t *handle2 = handles2[i].getNativeHandle();
-        if (!isWindowNativeHandleEqual(handle1, handle2)) {
-            return isWindowNativeHandleLessThan(handle1, handle2);
-        }
-    }
-    return false;
-}
-
 } // namespace utils
 } // namespace acam
 } // namespace android
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 62779a4..7ad74ad 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -14,46 +14,39 @@
  * limitations under the License.
  */
 
-#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
-#include <android/frameworks/cameraservice/device/2.0/ICameraDeviceUser.h>
-#include <android/frameworks/cameraservice/device/2.0/types.h>
-#include <camera/NdkCameraDevice.h>
+#ifndef CAMERA_NDK_VENDOR_UTILS_H
+#define CAMERA_NDK_VENDOR_UTILS_H
+
 #include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureRequest.h>
+#include <aidl/android/frameworks/cameraservice/device/ICameraDeviceUser.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCameraSettings.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/frameworks/cameraservice/service/ICameraService.h>
+#include <camera/NdkCameraDevice.h>
 #include <hardware/camera3.h>
-
-#ifndef CAMERA_NDK_VENDOR_H
-#define CAMERA_NDK_VENDOR_H
-
-using android::hardware::hidl_vec;
-using android::hardware::hidl_handle;
+#include <utils/RefBase.h>
 
 namespace android {
 namespace acam {
 namespace utils {
 
-using CameraMetadata = hardware::camera::common::V1_0::helper::CameraMetadata;
-using HCameraMetadata  = frameworks::cameraservice::service::V2_0::CameraMetadata;
-using Status = frameworks::cameraservice::common::V2_0::Status;
-using TemplateId = frameworks::cameraservice::device::V2_0::TemplateId;
-using PhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
-using HRotation = frameworks::cameraservice::device::V2_0::OutputConfiguration::Rotation;
-using OutputConfiguration = frameworks::cameraservice::device::V2_0::OutputConfiguration;
-
-// Utility class so that CaptureRequest can be stored by sp<>
-struct CaptureRequest : public RefBase {
-  frameworks::cameraservice::device::V2_0::CaptureRequest mCaptureRequest;
-  std::vector<const native_handle_t *> mSurfaceList;
-  //Physical camera settings metadata is stored here, since the capture request
-  //might not contain it. That's since, fmq might have consumed it.
-  hidl_vec<PhysicalCameraSettings> mPhysicalCameraSettings;
-};
-
-bool areWindowNativeHandlesEqual(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle>handles2);
-
-bool areWindowNativeHandlesLessThan(hidl_vec<hidl_handle> handles1, hidl_vec<hidl_handle>handles2);
+using ::aidl::android::frameworks::cameraservice::common::Status;
+using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
+using ::aidl::android::frameworks::cameraservice::device::TemplateId;
+using ::aidl::android::hardware::common::NativeHandle;
+using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using AidlCaptureRequest = ::aidl::android::frameworks::cameraservice::device::CaptureRequest;
 
 bool isWindowNativeHandleEqual(const native_handle_t *nh1, const native_handle_t *nh2);
 
+bool isWindowNativeHandleEqual(const native_handle_t* nh1, const NativeHandle& nh2);
+
 bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2);
 
 // Convenience wrapper over isWindowNativeHandleLessThan and isWindowNativeHandleEqual
@@ -88,117 +81,30 @@
 
 };
 
-// Wrapper around OutputConfiguration. This is needed since HIDL
-// OutputConfiguration is auto-generated and marked final. Therefore, operator
-// overloads outside the class, will not get picked by clang while trying to
-// store OutputConfiguration in maps/sets.
-struct OutputConfigurationWrapper {
-    OutputConfiguration mOutputConfiguration;
-
-    operator const OutputConfiguration &() const {
-        return mOutputConfiguration;
-    }
-
-    OutputConfigurationWrapper() {
-        mOutputConfiguration.rotation = OutputConfiguration::Rotation::R0;
-        // The ndk currently doesn't support deferred surfaces
-        mOutputConfiguration.isDeferred = false;
-        mOutputConfiguration.width = 0;
-        mOutputConfiguration.height = 0;
-        // ndk doesn't support inter OutputConfiguration buffer sharing.
-        mOutputConfiguration.windowGroupId = -1;
-    };
-
-    OutputConfigurationWrapper(const OutputConfigurationWrapper &other) {
-        *this = other;
-    }
-
-    // Needed to make sure that OutputConfiguration in
-    // OutputConfigurationWrapper, when copied doesn't call hidl_handle's
-    // assignment operator / copy constructor, which will lead to native handle
-    // cloning, which is not what we want for app callbacks which have the native
-    // handle as parameter.
-    OutputConfigurationWrapper &operator=(const OutputConfigurationWrapper &other) {
-        const OutputConfiguration &outputConfiguration = other.mOutputConfiguration;
-        mOutputConfiguration.rotation = outputConfiguration.rotation;
-        mOutputConfiguration.isDeferred = outputConfiguration.isDeferred;
-        mOutputConfiguration.width = outputConfiguration.width;
-        mOutputConfiguration.height = outputConfiguration.height;
-        mOutputConfiguration.windowGroupId = outputConfiguration.windowGroupId;
-        mOutputConfiguration.windowHandles.resize(outputConfiguration.windowHandles.size());
-        mOutputConfiguration.physicalCameraId = outputConfiguration.physicalCameraId;
-        size_t i = 0;
-        for (const auto &handle : outputConfiguration.windowHandles) {
-            mOutputConfiguration.windowHandles[i++] = handle.getNativeHandle();
-        }
-        return *this;
-    }
-
-    bool operator ==(const OutputConfiguration &other) const {
-        const OutputConfiguration &self = mOutputConfiguration;
-        return self.rotation == other.rotation && self.windowGroupId == other.windowGroupId &&
-                self.physicalCameraId == other.physicalCameraId && self.width == other.width &&
-                self.height == other.height && self.isDeferred == other.isDeferred &&
-                areWindowNativeHandlesEqual(self.windowHandles, other.windowHandles);
-    }
-
-    bool operator < (const OutputConfiguration &other) const {
-        if (*this == other) {
-            return false;
-        }
-        const OutputConfiguration &self = mOutputConfiguration;
-        if (self.windowGroupId != other.windowGroupId) {
-            return self.windowGroupId < other.windowGroupId;
-        }
-
-        if (self.width != other.width) {
-            return self.width < other.width;
-        }
-
-        if (self.height != other.height) {
-            return self.height < other.height;
-        }
-
-        if (self.rotation != other.rotation) {
-            return static_cast<uint32_t>(self.rotation) < static_cast<uint32_t>(other.rotation);
-        }
-
-        if (self.isDeferred != other.isDeferred) {
-            return self.isDeferred < other.isDeferred;
-        }
-
-        if (self.physicalCameraId != other.physicalCameraId) {
-            return self.physicalCameraId < other.physicalCameraId;
-        }
-        return areWindowNativeHandlesLessThan(self.windowHandles, other.windowHandles);
-    }
-
-    bool operator != (const OutputConfiguration &other) const {
-        return !(*this == other);
-    }
-
-    bool operator > (const OutputConfiguration &other) const {
-        return (*this != other) && !(*this < other);
-    }
+// Utility class so that CaptureRequest can be stored by sp<>
+struct CaptureRequest: public RefBase {
+  AidlCaptureRequest mCaptureRequest;
+  std::vector<native_handle_ptr_wrapper> mSurfaceList;
+  // Physical camera settings metadata is stored here, as the capture request
+  // might not contain it. That's since, fmq might have consumed it.
+  std::vector<PhysicalCameraSettings> mPhysicalCameraSettings;
 };
 
-// Convert CaptureRequest wrappable by sp<> to hidl CaptureRequest.
-frameworks::cameraservice::device::V2_0::CaptureRequest convertToHidl(
-    const CaptureRequest *captureRequest);
+AidlCaptureRequest convertToAidl(const CaptureRequest *captureRequest);
 
-HRotation convertToHidl(int rotation);
+OutputConfiguration::Rotation convertToAidl(int rotation);
 
-bool convertFromHidlCloned(const HCameraMetadata &metadata, CameraMetadata *rawMetadata);
+bool cloneFromAidl(const AidlCameraMetadata & srcMetadata, camera_metadata_t** dst);
 
 // Note: existing data in dst will be gone.
-void convertToHidl(const camera_metadata_t *src, HCameraMetadata* dst, bool shouldOwn = false);
+void convertToAidl(const camera_metadata_t *src, AidlCameraMetadata * dst);
 
-TemplateId convertToHidl(ACameraDevice_request_template templateId);
+TemplateId convertToAidl(ACameraDevice_request_template templateId);
 
-camera_status_t convertFromHidl(Status status);
+camera_status_t convertFromAidl(Status status);
 
 } // namespace utils
 } // namespace acam
 } // namespace android
 
-#endif // CAMERA_NDK_VENDOR_H
+#endif // CAMERA_NDK_VENDOR_UTILS_H
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 63cdb76..7f6ea9d 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -66,7 +66,7 @@
     // Retaining the error code in case the caller needs to analyze it.
     std::variant<int, ConfiguredWindows> initCamera(const native_handle_t* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
-            bool usePhysicalSettings) {
+            bool usePhysicalSettings, bool prepareWindows = false) {
         ConfiguredWindows configuredWindows;
         if (imgReaderAnw == nullptr) {
             ALOGE("Cannot initialize camera before image reader get initialized.");
@@ -142,7 +142,26 @@
             ALOGE("ACameraDevice_createCaptureSession failed, ret=%d", ret);
             return ret;
         }
-
+        if (prepareWindows) {
+            // Set window prepared callback
+            ACameraCaptureSession_setWindowPreparedCallback(mSession, /*context*/this,
+                    mPreparedCb);
+            // Prepare windows
+            for (auto &window : configuredWindows) {
+                ret = ACameraCaptureSession_prepareWindow(mSession, window);
+                if (ret != ACAMERA_OK) {
+                    ALOGE("%s: ACameraCaptureSession_prepareWindow failed", __FUNCTION__);
+                    return ret;
+                }
+                incPendingPrepared(window);
+            }
+            // Some time for the on-PreparedCallbacks
+            usleep(configuredWindows.size() * 100000);
+            // Check that callbacks were received
+            if (!gotAllPreparedCallbacks()) {
+                return -1;
+            }
+        }
         // Create capture request
         if (usePhysicalSettings) {
             ret = ACameraDevice_createCaptureRequest_withPhysicalIds(mDevice,
@@ -254,20 +273,22 @@
                 &mLogicalCaptureCallbacksV2, 1, &mStillRequest, &seqId);
     }
 
-    bool checkCallbacks(int pictureCount) {
+    bool checkCallbacks(int pictureCount, bool printLog = false) {
         std::lock_guard<std::mutex> lock(mMutex);
         if (mCompletedCaptureCallbackCount != pictureCount) {
-            ALOGE("Completed capture callback count not as expected. expected %d actual %d",
-                  pictureCount, mCompletedCaptureCallbackCount);
+            ALOGE_IF(printLog,
+                     "Completed capture callback count not as expected. expected %d actual %d",
+                     pictureCount, mCompletedCaptureCallbackCount);
             return false;
         }
         return true;
     }
-    bool checkCallbacksV2(int pictureCount) {
+    bool checkCallbacksV2(int pictureCount, bool printLog = false) {
         std::lock_guard<std::mutex> lock(mMutex);
         if (mCaptureStartedCallbackCount != pictureCount) {
-            ALOGE("Capture started callback count not as expected. expected %d actual %d",
-                  pictureCount, mCaptureStartedCallbackCount);
+            ALOGE_IF(printLog,
+                     "Capture started callback count not as expected. expected %d actual %d",
+                     pictureCount, mCaptureStartedCallbackCount);
             return false;
         }
         return true;
@@ -275,9 +296,55 @@
 
 
    private:
+    static void onPreparedCb(void* obj, ACameraWindowType *anw, ACameraCaptureSession *session) {
+        CameraHelper* thiz = reinterpret_cast<CameraHelper*>(obj);
+        thiz->handlePrepared(anw, session);
+    }
+    bool gotAllPreparedCallbacks() {
+        std::lock_guard<std::mutex> lock(mMutex);
+        bool ret = (mPendingPreparedCbs.size() == 0);
+        if (!ret) {
+            ALOGE("%s: mPendingPreparedCbs has the following expected callbacks", __FUNCTION__);
+            for (auto pair : mPendingPreparedCbs) {
+                ALOGE("%s: ANW: %p : pending callbacks %d", __FUNCTION__, pair.first, pair.second);
+            }
+        }
+        return ret;
+    }
+
+    void handlePrepared(ACameraWindowType *anw, ACameraCaptureSession *session) {
+        // Reduce the pending prepared count of anw by 1. If count is  0, remove the key.
+        std::lock_guard<std::mutex> lock(mMutex);
+        if (session != mSession) {
+            ALOGE("%s: Received callback for incorrect session ? mSession %p, session %p",
+                    __FUNCTION__, mSession, session);
+            return;
+        }
+        if(mPendingPreparedCbs.find(anw) == mPendingPreparedCbs.end()) {
+            ALOGE("%s: ANW %p was not being prepared at all ?", __FUNCTION__, anw);
+            return;
+        }
+        mPendingPreparedCbs[anw]--;
+        if (mPendingPreparedCbs[anw] == 0) {
+            mPendingPreparedCbs.erase(anw);
+        }
+    }
+    void incPendingPrepared(ACameraWindowType *anw) {
+        std::lock_guard<std::mutex> lock(mMutex);
+        if ((mPendingPreparedCbs.find(anw) == mPendingPreparedCbs.end())) {
+            mPendingPreparedCbs[anw] = 1;
+            return;
+        }
+        mPendingPreparedCbs[anw]++;
+    }
+
+    // ANW -> pending prepared callbacks
+    std::unordered_map<ACameraWindowType *, int> mPendingPreparedCbs;
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
     ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
+    ACameraCaptureSession_prepareCallback mPreparedCb = &onPreparedCb;
+
     const native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
 
     // Camera device
@@ -626,7 +693,7 @@
     }
 
     bool takePictures(const char* id, uint64_t readerUsage, int readerMaxImages,
-            bool readerAsync, int pictureCount, bool v2 = false) {
+            bool readerAsync, int pictureCount, bool v2 = false, bool prepareSurfaces = false) {
         int ret = 0;
 
         ImageReaderTestCase testCase(
@@ -641,7 +708,7 @@
         CameraHelper cameraHelper(id, mCameraManager);
         std::variant<int, ConfiguredWindows> retInit =
                 cameraHelper.initCamera(testCase.getNativeWindow(), {}/*physicalImageReaders*/,
-                                        false/*usePhysicalSettings*/);
+                                        false/*usePhysicalSettings*/, prepareSurfaces);
         int *retp = std::get_if<int>(&retInit);
         if (retp) {
             ALOGE("Unable to initialize camera helper");
@@ -670,18 +737,25 @@
         // Sleep until all capture finished
         for (int i = 0; i < kCaptureWaitRetry * pictureCount; i++) {
             usleep(kCaptureWaitUs);
-            if (testCase.getAcquiredImageCount() == pictureCount) {
+            bool receivedAllCallbacks = v2 ? cameraHelper.checkCallbacksV2(pictureCount)
+                                           : cameraHelper.checkCallbacks(pictureCount);
+
+            bool acquiredAllImages = testCase.getAcquiredImageCount() == pictureCount;
+            if (acquiredAllImages) {
                 ALOGI("Session take ~%d ms to capture %d images", i * kCaptureWaitUs / 1000,
                       pictureCount);
+            }
+            // Wait for all images to be acquired and all callbacks to be processed
+            if (acquiredAllImages && receivedAllCallbacks) {
                 break;
             }
         }
         return testCase.getAcquiredImageCount() == pictureCount &&
-               v2 ? cameraHelper.checkCallbacksV2(pictureCount) :
-                    cameraHelper.checkCallbacks(pictureCount);
+               v2 ? cameraHelper.checkCallbacksV2(pictureCount, /* printLog= */true) :
+                    cameraHelper.checkCallbacks(pictureCount, /* printLog= */true);
     }
 
-    bool testTakePicturesNative(const char* id) {
+    bool testTakePicturesNative(const char* id, bool prepareSurfaces) {
         for (auto& readerUsage :
              {AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}) {
             for (auto& readerMaxImages : {1, 4, 8}) {
@@ -689,7 +763,7 @@
                     for (auto& pictureCount : {1, 4, 8}) {
                         for ( auto & v2 : {true, false}) {
                             if (!takePictures(id, readerUsage, readerMaxImages,
-                                    readerAsync, pictureCount, v2)) {
+                                    readerAsync, pictureCount, v2, prepareSurfaces)) {
                                 ALOGE("Test takePictures failed for test case usage=%" PRIu64
                                       ", maxImages=%d, async=%d, pictureCount=%d",
                                       readerUsage, readerMaxImages, readerAsync, pictureCount);
@@ -869,39 +943,46 @@
 
         ACameraMetadata_free(staticMetadata);
     }
+
+    void testBasicTakePictures(bool prepareSurfaces) {
+        // We always use the first camera.
+        const char* cameraId = mCameraIdList->cameraIds[0];
+        ASSERT_TRUE(cameraId != nullptr);
+
+        ACameraMetadata* staticMetadata = nullptr;
+        camera_status_t ret = ACameraManager_getCameraCharacteristics(
+                mCameraManager, cameraId, &staticMetadata);
+        ASSERT_EQ(ret, ACAMERA_OK);
+        ASSERT_NE(staticMetadata, nullptr);
+
+        bool isBC = isCapabilitySupported(staticMetadata,
+                ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+
+        uint32_t namedTag = 0;
+        // Test that ACameraMetadata_getTagFromName works as expected for public tag
+        // names
+        camera_status_t status = ACameraManager_getTagFromName(mCameraManager, cameraId,
+                "android.control.aeMode", &namedTag);
+
+        ASSERT_EQ(status, ACAMERA_OK);
+        ASSERT_EQ(namedTag, ACAMERA_CONTROL_AE_MODE);
+
+        ACameraMetadata_free(staticMetadata);
+
+        if (!isBC) {
+            ALOGW("Camera does not support BACKWARD_COMPATIBLE.");
+            return;
+        }
+
+        EXPECT_TRUE(testTakePicturesNative(cameraId, prepareSurfaces));
+    }
 };
 
+
+
 TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
-    // We always use the first camera.
-    const char* cameraId = mCameraIdList->cameraIds[0];
-    ASSERT_TRUE(cameraId != nullptr);
-
-    ACameraMetadata* staticMetadata = nullptr;
-    camera_status_t ret = ACameraManager_getCameraCharacteristics(
-            mCameraManager, cameraId, &staticMetadata);
-    ASSERT_EQ(ret, ACAMERA_OK);
-    ASSERT_NE(staticMetadata, nullptr);
-
-    bool isBC = isCapabilitySupported(staticMetadata,
-            ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
-
-    uint32_t namedTag = 0;
-    // Test that ACameraMetadata_getTagFromName works as expected for public tag
-    // names
-    camera_status_t status = ACameraManager_getTagFromName(mCameraManager, cameraId,
-            "android.control.aeMode", &namedTag);
-
-    ASSERT_EQ(status, ACAMERA_OK);
-    ASSERT_EQ(namedTag, ACAMERA_CONTROL_AE_MODE);
-
-    ACameraMetadata_free(staticMetadata);
-
-    if (!isBC) {
-        ALOGW("Camera does not support BACKWARD_COMPATIBLE.");
-        return;
-    }
-
-    EXPECT_TRUE(testTakePicturesNative(cameraId));
+    testBasicTakePictures(/*prepareSurfaces*/ false);
+    testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 17ea512..1af5637 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -370,7 +370,7 @@
         // Check metadata binder call
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -386,7 +386,8 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
                 {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -429,7 +430,8 @@
             SCOPED_TRACE("openNewDevice");
             binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
                     {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
-                    /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+                    /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                    /*overrideToPortrait*/false, /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
         auto p = std::make_pair(callbacks, device);
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 76dc38c..f2fa48c 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -74,7 +74,8 @@
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index efd9dae..6423709 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -182,7 +182,8 @@
 
         CameraMetadata metadata;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -209,7 +210,8 @@
         rc = mCameraService->connect(this, cameraId,
                 String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
                 hardware::ICameraService::USE_CALLING_PID,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
         CameraParameters params(cameraDevice->getParameters());
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..bae8706
--- /dev/null
+++ b/camera/tests/fuzzer/Android.bp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_camera_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
+cc_defaults {
+    name: "camera_defaults",
+    static_libs: [
+        "libcamera_client",
+    ],
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libutils",
+        "liblog",
+        "libbinder",
+        "libgui",
+        "libcamera_metadata",
+        "libnativewindow",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "camera_fuzzer",
+    srcs: [
+        "camera_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2CaptureRequest_fuzzer",
+    srcs: [
+        "camera_c2CaptureRequest_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2ConcurrentCamera_fuzzer",
+    srcs: [
+        "camera_c2ConcurrentCamera_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2SubmitInfo_fuzzer",
+    srcs: [
+        "camera_c2SubmitInfo_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2SessionConfiguration_fuzzer",
+    srcs: [
+        "camera_c2SessionConfiguration_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_c2OutputConfiguration_fuzzer",
+    srcs: [
+        "camera_c2OutputConfiguration_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_vendorTagDescriptor_fuzzer",
+    srcs: [
+        "camera_vendorTagDescriptor_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+    include_dirs: [
+        "system/media/camera/tests",
+        "system/media/private/camera/include",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_Parameters_fuzzer",
+    srcs: [
+        "camera_Parameters_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_SessionStats_fuzzer",
+    srcs: [
+        "camera_SessionStats_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "camera_captureResult_fuzzer",
+    srcs: [
+        "camera_captureResult_fuzzer.cpp",
+    ],
+    defaults: [
+        "camera_defaults",
+    ],
+}
diff --git a/camera/tests/fuzzer/README.md b/camera/tests/fuzzer/README.md
new file mode 100644
index 0000000..c07ac04
--- /dev/null
+++ b/camera/tests/fuzzer/README.md
@@ -0,0 +1,74 @@
+# Fuzzers for libcamera_client
+
+## Plugin Design Considerations
+The fuzzer plugins for libcamera_client are designed based on the understanding of the
+source code and try to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzers.
+
+libcamera_client supports the following parameters:
+1. Command (parameter name: `cmd`)
+2. Video Buffer Mode (parameter name: `videoBufferMode`)
+3. Preview Callback Flag (parameter name: `previewCallbackFlag`)
+4. Facing (parameter name: `facing`)
+5. Orientation (parameter name: `orientation`)
+6. Format (parameter name: `format`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `cmd` | 0.`CAMERA_CMD_START_SMOOTH_ZOOM` 1.`CAMERA_CMD_STOP_SMOOTH_ZOOM` 3.`CAMERA_CMD_SET_DISPLAY_ORIENTATION` 4.`CAMERA_CMD_ENABLE_SHUTTER_SOUND` 5.`CAMERA_CMD_PLAY_RECORDING_SOUND` 6.`CAMERA_CMD_START_FACE_DETECTION` 7.`CAMERA_CMD_STOP_FACE_DETECTION` 8.`CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG` 9.`CAMERA_CMD_PING` 10.`CAMERA_CMD_SET_VIDEO_BUFFER_COUNT` 11.`CAMERA_CMD_SET_VIDEO_FORMAT`| Value obtained from FuzzedDataProvider|
+| `videoBufferMode` |0. `ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV` 1.`ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA` 2.`ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE`| Value obtained from FuzzedDataProvider|
+| `previewCallbackFlag` | 0. `CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK` 1.`CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK` 2.`CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK` 3.`CAMERA_FRAME_CALLBACK_FLAG_NOOP` 4.`CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER` 5.`CAMERA_FRAME_CALLBACK_FLAG_CAMERA` 6.`CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER`| Value obtained from FuzzedDataProvider|
+| `facing` | 0.`android::hardware::CAMERA_FACING_BACK` 1.`android::hardware::CAMERA_FACING_FRONT`| Value obtained from FuzzedDataProvider|
+| `orientation` | 0.`0` 1.`90` 2.`180`3.`270`| Value obtained from FuzzedDataProvider|
+| `format` | 0.`CameraParameters::PIXEL_FORMAT_YUV422SP` 1.`CameraParameters::PIXEL_FORMAT_YUV420SP` 2.`CameraParameters::PIXEL_FORMAT_YUV422I` 3.`CameraParameters::PIXEL_FORMAT_YUV420P` 4.`CameraParameters::PIXEL_FORMAT_RGB565` 5.`CameraParameters::PIXEL_FORMAT_RGBA8888` 6.`CameraParameters::PIXEL_FORMAT_JPEG` 7.`CameraParameters::PIXEL_FORMAT_BAYER_RGGB` 8.`CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE`| Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugins are always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugins feed the entire input data to the module.
+This ensures that the plugins tolerate any kind of input (empty, huge,
+malformed, etc) and dont `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build camera_fuzzer, camera2CaptureRequest_fuzzer, camera2ConcurrentCamera_fuzzer, camera2SubmitInfo_fuzzer, camera2SessionConfiguration_fuzzer, camera2OutputConfiguration_fuzzer, vendorTagDescriptor_fuzzer, cameraParameters_fuzzer, cameraSessionStats_fuzzer and captureResult_fuzzer binaries
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) camera_fuzzer
+  $ mm -j$(nproc) camera_c2CaptureRequest_fuzzer
+  $ mm -j$(nproc) camera_c2ConcurrentCamera_fuzzer
+  $ mm -j$(nproc) camera_c2SubmitInfo_fuzzer
+  $ mm -j$(nproc) camera_c2SessionConfiguration_fuzzer
+  $ mm -j$(nproc) camera_c2OutputConfiguration_fuzzer
+  $ mm -j$(nproc) camera_vendorTagDescriptor_fuzzer
+  $ mm -j$(nproc) camera_Parameters_fuzzer
+  $ mm -j$(nproc) camera_SessionStats_fuzzer
+  $ mm -j$(nproc) camera_captureResult_fuzzer
+```
+#### Steps to run
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_fuzzer/camera_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2CaptureRequest_fuzzer/camera_c2CaptureRequest_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2ConcurrentCamera_fuzzer/camera_c2ConcurrentCamera_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SubmitInfo_fuzzer/camera_c2SubmitInfo_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2SessionConfiguration_fuzzer/camera_c2SessionConfiguration_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_c2OutputConfiguration_fuzzer/camera_c2OutputConfiguration_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_vendorTagDescriptor_fuzzer/camera_vendorTagDescriptor_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_Parameters_fuzzer/camera_Parameters_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_SessionStats_fuzzer/camera_SessionStats_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/camera_captureResult_fuzzer/camera_captureResult_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/camera/tests/fuzzer/camera2common.h b/camera/tests/fuzzer/camera2common.h
new file mode 100644
index 0000000..14a1b1b
--- /dev/null
+++ b/camera/tests/fuzzer/camera2common.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef CAMERA2COMMON_H
+#define CAMERA2COMMON_H
+
+#include <binder/Parcel.h>
+
+using namespace android;
+
+template <class type>
+void invokeReadWriteNullParcel(type* obj) {
+    Parcel* parcelNull = nullptr;
+    obj->writeToParcel(parcelNull);
+    obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteNullParcelsp(sp<type> obj) {
+    Parcel* parcelNull = nullptr;
+    obj->writeToParcel(parcelNull);
+    obj->readFromParcel(parcelNull);
+}
+
+template <class type>
+void invokeReadWriteParcel(type* obj) {
+    Parcel* parcel = new Parcel();
+    obj->writeToParcel(parcel);
+    parcel->setDataPosition(0);
+    obj->readFromParcel(parcel);
+    delete parcel;
+}
+
+template <class type>
+void invokeReadWriteParcelsp(sp<type> obj) {
+    Parcel* parcel = new Parcel();
+    obj->writeToParcel(parcel);
+    parcel->setDataPosition(0);
+    obj->readFromParcel(parcel);
+    delete parcel;
+}
+
+#endif  // CAMERA2COMMON_H
diff --git a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
new file mode 100644
index 0000000..45b3526
--- /dev/null
+++ b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraParameters.h>
+#include <CameraParameters2.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <utils/String16.h>
+
+using namespace std;
+using namespace android;
+
+string kValidFormats[] = {
+        CameraParameters::PIXEL_FORMAT_YUV422SP,      CameraParameters::PIXEL_FORMAT_YUV420SP,
+        CameraParameters::PIXEL_FORMAT_YUV422I,       CameraParameters::PIXEL_FORMAT_YUV420P,
+        CameraParameters::PIXEL_FORMAT_RGB565,        CameraParameters::PIXEL_FORMAT_RGBA8888,
+        CameraParameters::PIXEL_FORMAT_JPEG,          CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+        CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE};
+
+class CameraParametersFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~CameraParametersFuzzer() {
+        delete mCameraParameters;
+        delete mCameraParameters2;
+    }
+
+  private:
+    void invokeCameraParameters();
+    template <class type>
+    void initCameraParameters(type** obj);
+    template <class type>
+    void cameraParametersCommon(type* obj);
+    CameraParameters* mCameraParameters = nullptr;
+    CameraParameters2* mCameraParameters2 = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+};
+
+template <class type>
+void CameraParametersFuzzer::initCameraParameters(type** obj) {
+    if (mFDP->ConsumeBool()) {
+        *obj = new type();
+    } else {
+        string params;
+        if (mFDP->ConsumeBool()) {
+            int32_t width = mFDP->ConsumeIntegral<int32_t>();
+            int32_t height = mFDP->ConsumeIntegral<int32_t>();
+            int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
+            int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
+            params = CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
+            params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_PREVIEW_FPS_RANGE;
+                params += '=' + to_string(minFps) + ',' + to_string(maxFps) + ';';
+            }
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
+                params += '=' + to_string(width) + 'x' + to_string(height) + ';';
+            }
+            if (mFDP->ConsumeBool()) {
+                params += CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
+                params += '=' + mFDP->PickValueInArray(kValidFormats) + ';';
+            }
+        } else {
+            params = mFDP->ConsumeRandomLengthString();
+        }
+        *obj = new type(String8(params.c_str()));
+    }
+}
+
+template <class type>
+void CameraParametersFuzzer::cameraParametersCommon(type* obj) {
+    Vector<Size> supportedPreviewSizes;
+    obj->getSupportedPreviewSizes(supportedPreviewSizes);
+    int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
+    int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
+    obj->setPreviewSize(previewWidth, previewHeight);
+    obj->getPreviewSize(&previewWidth, &previewHeight);
+
+    Vector<Size> supportedVideoSizes;
+    obj->getSupportedVideoSizes(supportedVideoSizes);
+    if (supportedVideoSizes.size() != 0) {
+        int32_t videoWidth, videoHeight, preferredVideoWidth, preferredVideoHeight;
+        if (mFDP->ConsumeBool()) {
+            int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(0, supportedVideoSizes.size() - 1);
+            obj->setVideoSize(supportedVideoSizes[idx].width, supportedVideoSizes[idx].height);
+        } else {
+            videoWidth = mFDP->ConsumeIntegral<int32_t>();
+            videoHeight = mFDP->ConsumeIntegral<int32_t>();
+            obj->setVideoSize(videoWidth, videoHeight);
+        }
+        obj->getVideoSize(&videoWidth, &videoHeight);
+        obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth, &preferredVideoHeight);
+    }
+
+    int32_t fps = mFDP->ConsumeIntegral<int32_t>();
+    obj->setPreviewFrameRate(fps);
+    obj->getPreviewFrameRate();
+    string previewFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                               : mFDP->ConsumeRandomLengthString();
+    obj->setPreviewFormat(previewFormat.c_str());
+
+    int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
+    int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
+    Vector<Size> supportedPictureSizes;
+    obj->setPictureSize(pictureWidth, pictureHeight);
+    obj->getPictureSize(&pictureWidth, &pictureHeight);
+    obj->getSupportedPictureSizes(supportedPictureSizes);
+    string pictureFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                               : mFDP->ConsumeRandomLengthString();
+    obj->setPictureFormat(pictureFormat.c_str());
+    obj->getPictureFormat();
+
+    if (mFDP->ConsumeBool()) {
+        obj->dump();
+    } else {
+        int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+        Vector<String16> args = {};
+        obj->dump(fd, args);
+        close(fd);
+    }
+}
+
+void CameraParametersFuzzer::invokeCameraParameters() {
+    initCameraParameters<CameraParameters>(&mCameraParameters);
+    cameraParametersCommon<CameraParameters>(mCameraParameters);
+    initCameraParameters<CameraParameters2>(&mCameraParameters2);
+    cameraParametersCommon<CameraParameters2>(mCameraParameters2);
+
+    int32_t minFPS, maxFPS;
+    mCameraParameters->getPreviewFpsRange(&minFPS, &maxFPS);
+    string format = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
+                                        : mFDP->ConsumeRandomLengthString();
+    mCameraParameters->previewFormatToEnum(format.c_str());
+    mCameraParameters->isEmpty();
+    Vector<int32_t> formats;
+    mCameraParameters->getSupportedPreviewFormats(formats);
+}
+
+void CameraParametersFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeCameraParameters();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    CameraParametersFuzzer cameraParametersFuzzer;
+    cameraParametersFuzzer.process(data, size);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
new file mode 100644
index 0000000..2f2ad77
--- /dev/null
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraSessionStats.h>
+#include <binder/Parcel.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    CameraStreamStats* cameraStreamStats = nullptr;
+    Parcel parcelCamStreamStats;
+
+    if (fdp.ConsumeBool()) {
+        cameraStreamStats = new CameraStreamStats();
+    } else {
+        int32_t width = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(width);
+        }
+        int32_t height = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(height);
+        }
+        int32_t format = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(format);
+        }
+        float maxPreviewFps = fdp.ConsumeFloatingPoint<float>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeFloat(maxPreviewFps);
+        }
+        int32_t dataSpace = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(dataSpace);
+        }
+        int64_t usage = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(usage);
+        }
+        int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(requestCount);
+        }
+        int64_t errorCount = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt64(errorCount);
+        }
+        int32_t maxHalBuffers = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(maxHalBuffers);
+        }
+        int32_t maxAppBuffers = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(maxAppBuffers);
+        }
+        int32_t dynamicRangeProfile = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(dynamicRangeProfile);
+        }
+        int32_t streamUseCase = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(streamUseCase);
+        }
+        int32_t colorSpace = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamStreamStats.writeInt32(colorSpace);
+        }
+
+        cameraStreamStats = new CameraStreamStats(width, height, format, maxPreviewFps, dataSpace,
+                                                  usage, maxHalBuffers, maxAppBuffers,
+                                                  dynamicRangeProfile, streamUseCase, colorSpace);
+    }
+
+    parcelCamStreamStats.setDataPosition(0);
+    cameraStreamStats->readFromParcel(&parcelCamStreamStats);
+    invokeReadWriteNullParcel<CameraStreamStats>(cameraStreamStats);
+    invokeReadWriteParcel<CameraStreamStats>(cameraStreamStats);
+
+    CameraSessionStats* cameraSessionStats = nullptr;
+    Parcel parcelCamSessionStats;
+
+    if (fdp.ConsumeBool()) {
+        cameraSessionStats = new CameraSessionStats();
+    } else {
+        string camId = fdp.ConsumeRandomLengthString();
+        String16 cameraId(camId.c_str());
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeString16(cameraId);
+        }
+        int32_t facing = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(facing);
+        }
+        int32_t newCameraState = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(newCameraState);
+        }
+        string name = fdp.ConsumeRandomLengthString();
+        String16 clientName(name.c_str());
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeString16(clientName);
+        }
+        int32_t apiLevel = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(apiLevel);
+        }
+        bool isNdk = fdp.ConsumeBool();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeBool(isNdk);
+        }
+        int32_t latencyMs = fdp.ConsumeIntegral<int32_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt32(latencyMs);
+        }
+
+        int64_t logId = fdp.ConsumeIntegral<int64_t>();
+        if (fdp.ConsumeBool()) {
+            parcelCamSessionStats.writeInt64(logId);
+        }
+
+        cameraSessionStats = new CameraSessionStats(cameraId, facing, newCameraState, clientName,
+                                                    apiLevel, isNdk, latencyMs, logId);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int32_t internalReconfigure = fdp.ConsumeIntegral<int32_t>();
+        parcelCamSessionStats.writeInt32(internalReconfigure);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int64_t requestCount = fdp.ConsumeIntegral<int64_t>();
+        parcelCamSessionStats.writeInt64(requestCount);
+    }
+
+    if (fdp.ConsumeBool()) {
+        int64_t resultErrorCount = fdp.ConsumeIntegral<int64_t>();
+        parcelCamSessionStats.writeInt64(resultErrorCount);
+    }
+
+    if (fdp.ConsumeBool()) {
+        bool deviceError = fdp.ConsumeBool();
+        parcelCamSessionStats.writeBool(deviceError);
+    }
+
+    parcelCamSessionStats.setDataPosition(0);
+    cameraSessionStats->readFromParcel(&parcelCamSessionStats);
+    invokeReadWriteNullParcel<CameraSessionStats>(cameraSessionStats);
+    invokeReadWriteParcel<CameraSessionStats>(cameraSessionStats);
+
+    delete cameraStreamStats;
+    delete cameraSessionStats;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
new file mode 100644
index 0000000..06215a5
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CameraMetadata.h>
+#include <camera2/CaptureRequest.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/view/Surface.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kNonZeroRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 1;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    sp<CaptureRequest> captureRequest = new CaptureRequest();
+    Parcel parcelCamCaptureReq;
+
+    size_t physicalCameraSettingsSize =
+            fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(physicalCameraSettingsSize);
+    }
+
+    for (size_t idx = 0; idx < physicalCameraSettingsSize; ++idx) {
+        string id = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeString16(String16(id.c_str()));
+        }
+        CameraMetadata cameraMetadata;
+        if (fdp.ConsumeBool()) {
+            cameraMetadata = CameraMetadata();
+        } else {
+            size_t entryCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+            size_t dataCapacity = fdp.ConsumeIntegralInRange<size_t>(kNonZeroRangeMin, kRangeMax);
+            cameraMetadata = CameraMetadata(entryCapacity, dataCapacity);
+        }
+        captureRequest->mPhysicalCameraSettings.push_back({id, cameraMetadata});
+        if (fdp.ConsumeBool()) {
+            cameraMetadata.writeToParcel(&parcelCamCaptureReq);
+        }
+    }
+
+    captureRequest->mIsReprocess = fdp.ConsumeBool();
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(captureRequest->mIsReprocess);
+    }
+
+    captureRequest->mSurfaceConverted = fdp.ConsumeBool();
+    if (fdp.ConsumeBool() && captureRequest->mSurfaceConverted) {
+        // 0-sized array
+        parcelCamCaptureReq.writeInt32(0);
+    }
+
+    if (!captureRequest->mSurfaceConverted) {
+        size_t surfaceListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(surfaceListSize);
+        }
+        for (size_t idx = 0; idx < surfaceListSize; ++idx) {
+            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                    fdp.ConsumeIntegral<uint32_t>() /* width */,
+                    fdp.ConsumeIntegral<uint32_t>() /* height */,
+                    fdp.ConsumeIntegral<int32_t>() /* format */,
+                    fdp.ConsumeIntegral<int32_t>() /* flags */);
+            if (surfaceControl) {
+                sp<Surface> surface = surfaceControl->getSurface();
+                captureRequest->mSurfaceList.push_back(surface);
+                if (fdp.ConsumeBool()) {
+                    view::Surface surfaceShim;
+                    surfaceShim.name = String16((fdp.ConsumeRandomLengthString()).c_str());
+                    surfaceShim.graphicBufferProducer = surface->getIGraphicBufferProducer();
+                    surfaceShim.writeToParcel(&parcelCamCaptureReq);
+                }
+                surface.clear();
+            }
+            composerClient.clear();
+            surfaceControl.clear();
+        }
+    }
+
+    size_t indexListSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+    if (fdp.ConsumeBool()) {
+        parcelCamCaptureReq.writeInt32(indexListSize);
+    }
+
+    for (size_t idx = 0; idx < indexListSize; ++idx) {
+        int32_t streamIdx = fdp.ConsumeIntegral<int32_t>();
+        int32_t surfaceIdx = fdp.ConsumeIntegral<int32_t>();
+        captureRequest->mStreamIdxList.push_back(streamIdx);
+        captureRequest->mSurfaceIdxList.push_back(surfaceIdx);
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(streamIdx);
+        }
+        if (fdp.ConsumeBool()) {
+            parcelCamCaptureReq.writeInt32(surfaceIdx);
+        }
+    }
+
+    invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+    invokeReadWriteNullParcelsp<CaptureRequest>(captureRequest);
+    parcelCamCaptureReq.setDataPosition(0);
+    captureRequest->readFromParcel(&parcelCamCaptureReq);
+    captureRequest.clear();
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
new file mode 100644
index 0000000..12b5bc3
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/ConcurrentCamera.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    ConcurrentCameraIdCombination camIdCombination;
+
+    if (fdp.ConsumeBool()) {
+        size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+        for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
+            string concurrentCameraId = fdp.ConsumeRandomLengthString();
+            camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+        }
+    }
+
+    invokeReadWriteNullParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+    invokeReadWriteParcel<ConcurrentCameraIdCombination>(&camIdCombination);
+
+    CameraIdAndSessionConfiguration camIdAndSessionConfig;
+
+    if (fdp.ConsumeBool()) {
+        camIdAndSessionConfig.mCameraId = fdp.ConsumeRandomLengthString();
+        if (fdp.ConsumeBool()) {
+            camIdAndSessionConfig.mSessionConfiguration = SessionConfiguration();
+        } else {
+            int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+            int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+            int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+            int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+            camIdAndSessionConfig.mSessionConfiguration =
+                    SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+        }
+    }
+
+    invokeReadWriteNullParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+    invokeReadWriteParcel<CameraIdAndSessionConfiguration>(&camIdAndSessionConfig);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..51ac4e8
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    OutputConfiguration* outputConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        outputConfiguration = new OutputConfiguration();
+    } else {
+        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+        string phyCameraId = fdp.ConsumeRandomLengthString();
+        String16 physicalCameraId(phyCameraId.c_str());
+        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+        bool isShared = fdp.ConsumeBool();
+
+        if (fdp.ConsumeBool()) {
+            sp<IGraphicBufferProducer> iGBP = nullptr;
+            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                    fdp.ConsumeIntegral<uint32_t>() /* width */,
+                    fdp.ConsumeIntegral<uint32_t>() /* height */,
+                    fdp.ConsumeIntegral<int32_t>() /* format */,
+                    fdp.ConsumeIntegral<int32_t>() /* flags */);
+            if (surfaceControl) {
+                sp<Surface> surface = surfaceControl->getSurface();
+                iGBP = surface->getIGraphicBufferProducer();
+            }
+            outputConfiguration = new OutputConfiguration(iGBP, rotation, physicalCameraId,
+                                                          surfaceSetID, isShared);
+            iGBP.clear();
+            composerClient.clear();
+            surfaceControl.clear();
+        } else {
+            size_t iGBPSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+            vector<sp<IGraphicBufferProducer>> iGBPs;
+            for (size_t idx = 0; idx < iGBPSize; ++idx) {
+                sp<IGraphicBufferProducer> iGBP = nullptr;
+                sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+                sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                        static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
+                        fdp.ConsumeIntegral<uint32_t>() /* width */,
+                        fdp.ConsumeIntegral<uint32_t>() /* height */,
+                        fdp.ConsumeIntegral<int32_t>() /* format */,
+                        fdp.ConsumeIntegral<int32_t>() /* flags */);
+                if (surfaceControl) {
+                    sp<Surface> surface = surfaceControl->getSurface();
+                    iGBP = surface->getIGraphicBufferProducer();
+                    iGBPs.push_back(iGBP);
+                }
+                iGBP.clear();
+                composerClient.clear();
+                surfaceControl.clear();
+            }
+            outputConfiguration = new OutputConfiguration(iGBPs, rotation, physicalCameraId,
+                                                          surfaceSetID, isShared);
+        }
+    }
+
+    outputConfiguration->getRotation();
+    outputConfiguration->getSurfaceSetID();
+    outputConfiguration->getSurfaceType();
+    outputConfiguration->getWidth();
+    outputConfiguration->getHeight();
+    outputConfiguration->isDeferred();
+    outputConfiguration->isShared();
+    outputConfiguration->getPhysicalCameraId();
+
+    OutputConfiguration outputConfiguration2;
+    outputConfiguration->gbpsEqual(outputConfiguration2);
+    outputConfiguration->sensorPixelModesUsedEqual(outputConfiguration2);
+    outputConfiguration->gbpsLessThan(outputConfiguration2);
+    outputConfiguration->sensorPixelModesUsedLessThan(outputConfiguration2);
+    outputConfiguration->getGraphicBufferProducers();
+    sp<IGraphicBufferProducer> gbp;
+    outputConfiguration->addGraphicProducer(gbp);
+    invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration);
+    invokeReadWriteParcel<OutputConfiguration>(outputConfiguration);
+    delete outputConfiguration;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
new file mode 100644
index 0000000..b2de95d
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/OutputConfiguration.h>
+#include <camera2/SessionConfiguration.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::params;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+
+    SessionConfiguration* sessionConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        sessionConfiguration = new SessionConfiguration();
+    } else {
+        int32_t inputWidth = fdp.ConsumeIntegral<int32_t>();
+        int32_t inputHeight = fdp.ConsumeIntegral<int32_t>();
+        int32_t inputFormat = fdp.ConsumeIntegral<int32_t>();
+        int32_t operatingMode = fdp.ConsumeIntegral<int32_t>();
+        sessionConfiguration =
+                new SessionConfiguration(inputWidth, inputHeight, inputFormat, operatingMode);
+    }
+
+    sessionConfiguration->getInputWidth();
+    sessionConfiguration->getInputHeight();
+    sessionConfiguration->getInputFormat();
+    sessionConfiguration->getOperatingMode();
+
+    OutputConfiguration* outputConfiguration = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        outputConfiguration = new OutputConfiguration();
+        sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+    } else {
+        sp<IGraphicBufferProducer> iGBP = nullptr;
+        sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+        sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+                static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()),
+                fdp.ConsumeIntegral<uint32_t>(), fdp.ConsumeIntegral<uint32_t>(),
+                fdp.ConsumeIntegral<int32_t>(), fdp.ConsumeIntegral<int32_t>());
+        if (surfaceControl) {
+            sp<Surface> surface = surfaceControl->getSurface();
+            iGBP = surface->getIGraphicBufferProducer();
+            surface.clear();
+        }
+        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
+        string phyCameraId = fdp.ConsumeRandomLengthString();
+        String16 physicalCameraId(phyCameraId.c_str());
+        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
+        bool isShared = fdp.ConsumeBool();
+        outputConfiguration =
+                new OutputConfiguration(iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+        sessionConfiguration->addOutputConfiguration(*outputConfiguration);
+    }
+
+    sessionConfiguration->getOutputConfigurations();
+    SessionConfiguration sessionConfiguration2;
+    sessionConfiguration->outputsEqual(sessionConfiguration2);
+    sessionConfiguration->outputsLessThan(sessionConfiguration2);
+    sessionConfiguration->inputIsMultiResolution();
+
+    invokeReadWriteNullParcel<SessionConfiguration>(sessionConfiguration);
+    invokeReadWriteParcel<SessionConfiguration>(sessionConfiguration);
+
+    delete sessionConfiguration;
+    delete outputConfiguration;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
new file mode 100644
index 0000000..dc40b0f
--- /dev/null
+++ b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <camera2/SubmitInfo.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::utils;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    SubmitInfo submitInfo;
+    submitInfo.mRequestId = fdp.ConsumeIntegral<int32_t>();
+    submitInfo.mLastFrameNumber = fdp.ConsumeIntegral<int64_t>();
+    invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
new file mode 100644
index 0000000..1396431
--- /dev/null
+++ b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <CaptureResult.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+using namespace android::hardware::camera2::impl;
+
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    PhysicalCaptureResultInfo* physicalCaptureResultInfo = nullptr;
+
+    if (fdp.ConsumeBool()) {
+        physicalCaptureResultInfo = new PhysicalCaptureResultInfo();
+    } else {
+        string camId = fdp.ConsumeRandomLengthString();
+        String16 cameraId(camId.c_str());
+        CameraMetadata cameraMetadata = CameraMetadata();
+        physicalCaptureResultInfo = new PhysicalCaptureResultInfo(cameraId, cameraMetadata);
+    }
+
+    invokeReadWriteParcel<PhysicalCaptureResultInfo>(physicalCaptureResultInfo);
+
+    CaptureResult* captureResult = new CaptureResult();
+
+    if (fdp.ConsumeBool()) {
+        captureResult->mMetadata = CameraMetadata();
+    }
+    if (fdp.ConsumeBool()) {
+        captureResult->mResultExtras = CaptureResultExtras();
+        string errCamId = fdp.ConsumeRandomLengthString();
+        String16 errCameraId(errCamId.c_str());
+        captureResult->mResultExtras.errorPhysicalCameraId = errCameraId;
+        captureResult->mResultExtras.isValid();
+        invokeReadWriteNullParcel<CaptureResultExtras>(&(captureResult->mResultExtras));
+    }
+    if (fdp.ConsumeBool()) {
+        size_t physicalMetadatasSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        for (size_t idx = 0; idx < physicalMetadatasSize; ++idx) {
+            captureResult->mPhysicalMetadatas.push_back(PhysicalCaptureResultInfo());
+        }
+    }
+
+    invokeReadWriteNullParcel<CaptureResult>(captureResult);
+    invokeReadWriteParcel<CaptureResult>(captureResult);
+    CaptureResult captureResult2(*captureResult);
+    CaptureResult captureResult3(std::move(captureResult2));
+
+    delete captureResult;
+    delete physicalCaptureResultInfo;
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
new file mode 100644
index 0000000..f9ef98e
--- /dev/null
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -0,0 +1,405 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <Camera.h>
+#include <CameraBase.h>
+#include <CameraMetadata.h>
+#include <CameraParameters.h>
+#include <CameraUtils.h>
+#include <VendorTagDescriptor.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryDealer.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <utils/Log.h>
+#include "camera2common.h"
+#include <android/hardware/ICameraService.h>
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+constexpr int32_t kFrameRateMin = 1;
+constexpr int32_t kFrameRateMax = 120;
+constexpr int32_t kCamIdMin = 0;
+constexpr int32_t kCamIdMax = 1;
+constexpr int32_t kNumMin = 0;
+constexpr int32_t kNumMax = 1024;
+constexpr int32_t kMemoryDealerSize = 1000;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kSizeMin = 0;
+constexpr int32_t kSizeMax = 1000;
+
+constexpr int32_t kValidCMD[] = {CAMERA_CMD_START_SMOOTH_ZOOM,
+                                 CAMERA_CMD_STOP_SMOOTH_ZOOM,
+                                 CAMERA_CMD_SET_DISPLAY_ORIENTATION,
+                                 CAMERA_CMD_ENABLE_SHUTTER_SOUND,
+                                 CAMERA_CMD_PLAY_RECORDING_SOUND,
+                                 CAMERA_CMD_START_FACE_DETECTION,
+                                 CAMERA_CMD_STOP_FACE_DETECTION,
+                                 CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG,
+                                 CAMERA_CMD_PING,
+                                 CAMERA_CMD_SET_VIDEO_BUFFER_COUNT,
+                                 CAMERA_CMD_SET_VIDEO_FORMAT};
+
+constexpr int32_t kValidVideoBufferMode[] = {ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV,
+                                             ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA,
+                                             ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE};
+
+constexpr int32_t kValidPreviewCallbackFlag[] = {
+        CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK,    CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK,
+        CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK,  CAMERA_FRAME_CALLBACK_FLAG_NOOP,
+        CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER,      CAMERA_FRAME_CALLBACK_FLAG_CAMERA,
+        CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER};
+
+constexpr int32_t kValidFacing[] = {android::hardware::CAMERA_FACING_BACK,
+                                    android::hardware::CAMERA_FACING_FRONT};
+
+constexpr int32_t kValidOrientation[] = {0, 90, 180, 270};
+
+class TestCameraListener : public CameraListener {
+  public:
+    virtual ~TestCameraListener() = default;
+
+    void notify(int32_t /*msgType*/, int32_t /*ext1*/, int32_t /*ext2*/) override { return; };
+    void postData(int32_t /*msgType*/, const sp<IMemory>& /*dataPtr*/,
+                  camera_frame_metadata_t* /*metadata*/) override {
+        return;
+    };
+    void postDataTimestamp(nsecs_t /*timestamp*/, int32_t /*msgType*/,
+                           const sp<IMemory>& /*dataPtr*/) override {
+        return;
+    };
+    void postRecordingFrameHandleTimestamp(nsecs_t /*timestamp*/,
+                                           native_handle_t* /*handle*/) override {
+        return;
+    };
+    void postRecordingFrameHandleTimestampBatch(
+            const std::vector<nsecs_t>& /*timestamps*/,
+            const std::vector<native_handle_t*>& /*handles*/) override {
+        return;
+    };
+};
+
+class CameraFuzzer : public ::android::hardware::BnCameraClient {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~CameraFuzzer() {
+        delete mCameraMetadata;
+        mComposerClient.clear();
+        mSurfaceControl.clear();
+        mSurface.clear();
+        mCamera.clear();
+        mMemoryDealer.clear();
+        mIMem.clear();
+        mCameraListener.clear();
+        mCameraService.clear();
+    }
+
+  private:
+    bool initCamera();
+    void initCameraMetadata();
+    void invokeCamera();
+    void invokeCameraUtils();
+    void invokeCameraBase();
+    void invokeCameraMetadata();
+    void invokeSetParameters();
+    sp<Camera> mCamera = nullptr;
+    CameraMetadata* mCameraMetadata = nullptr;
+    sp<SurfaceComposerClient> mComposerClient = nullptr;
+    sp<SurfaceControl> mSurfaceControl = nullptr;
+    sp<Surface> mSurface = nullptr;
+    sp<MemoryDealer> mMemoryDealer = nullptr;
+    sp<IMemory> mIMem = nullptr;
+    sp<TestCameraListener> mCameraListener = nullptr;
+    sp<ICameraService> mCameraService = nullptr;
+    sp<ICamera> cameraDevice = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+
+    // CameraClient interface
+    void notifyCallback(int32_t, int32_t, int32_t) override { return; };
+    void dataCallback(int32_t, const sp<IMemory>&, camera_frame_metadata_t*) override { return; };
+    void dataCallbackTimestamp(nsecs_t, int32_t, const sp<IMemory>&) override { return; };
+    void recordingFrameHandleCallbackTimestamp(nsecs_t, native_handle_t*) override { return; };
+    void recordingFrameHandleCallbackTimestampBatch(const std::vector<nsecs_t>&,
+                                                    const std::vector<native_handle_t*>&) override {
+        return;
+    };
+};
+
+bool CameraFuzzer::initCamera() {
+    ProcessState::self()->startThreadPool();
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.camera"));
+    mCameraService = interface_cast<ICameraService>(binder);
+    mCameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
+                            String16("CAMERAFUZZ"), hardware::ICameraService::USE_CALLING_UID,
+                            hardware::ICameraService::USE_CALLING_PID,
+                            /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+                            /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
+    mCamera = Camera::create(cameraDevice);
+    if (!mCamera) {
+        return false;
+    }
+    return true;
+}
+
+void CameraFuzzer::invokeSetParameters() {
+    String8 s = mCamera->getParameters();
+    CameraParameters params(s);
+    int32_t width = mFDP->ConsumeIntegral<int32_t>();
+    int32_t height = mFDP->ConsumeIntegral<int32_t>();
+    params.setVideoSize(width, height);
+    int32_t frameRate = mFDP->ConsumeIntegralInRange<int32_t>(kFrameRateMin, kFrameRateMax);
+    params.setPreviewFrameRate(frameRate);
+    mCamera->setParameters(params.flatten());
+}
+
+void CameraFuzzer::invokeCamera() {
+    if (!initCamera()) {
+        return;
+    }
+
+    int32_t cameraId = mFDP->ConsumeIntegralInRange<int32_t>(kCamIdMin, kCamIdMax);
+    Camera::getNumberOfCameras();
+    CameraInfo cameraInfo;
+    cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<int>();
+    cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                                 : mFDP->ConsumeIntegral<int>();
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+    mCamera->reconnect();
+
+    mComposerClient = new SurfaceComposerClient;
+    mSurfaceControl = mComposerClient->createSurface(
+            static_cast<String8>(mFDP->ConsumeRandomLengthString().c_str()) /* name */,
+            mFDP->ConsumeIntegral<uint32_t>() /* width */,
+            mFDP->ConsumeIntegral<uint32_t>() /* height */,
+            mFDP->ConsumeIntegral<int32_t>() /* format */,
+            mFDP->ConsumeIntegral<int32_t>() /* flags */);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setPreviewTarget(mSurface->getIGraphicBufferProducer());
+        mCamera->startPreview();
+        mCamera->stopPreview();
+        mCamera->previewEnabled();
+        mCamera->startRecording();
+        mCamera->stopRecording();
+    }
+
+    mCamera->lock();
+    mCamera->unlock();
+    mCamera->autoFocus();
+    mCamera->cancelAutoFocus();
+
+    int32_t msgType = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->takePicture(msgType);
+    invokeSetParameters();
+    int32_t cmd;
+    if (mFDP->ConsumeBool()) {
+        cmd = mFDP->PickValueInArray(kValidCMD);
+    } else {
+        cmd = mFDP->ConsumeIntegral<int32_t>();
+    }
+    int32_t arg1 = mFDP->ConsumeIntegral<int32_t>();
+    int32_t arg2 = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->sendCommand(cmd, arg1, arg2);
+
+    int32_t videoBufferMode = mFDP->PickValueInArray(kValidVideoBufferMode);
+    mCamera->setVideoBufferMode(videoBufferMode);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setVideoTarget(mSurface->getIGraphicBufferProducer());
+    }
+    mCameraListener = sp<TestCameraListener>::make();
+    mCamera->setListener(mCameraListener);
+    int32_t previewCallbackFlag;
+    if (mFDP->ConsumeBool()) {
+        previewCallbackFlag = mFDP->PickValueInArray(kValidPreviewCallbackFlag);
+    } else {
+        previewCallbackFlag = mFDP->ConsumeIntegral<int32_t>();
+    }
+    mCamera->setPreviewCallbackFlags(previewCallbackFlag);
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mCamera->setPreviewCallbackTarget(mSurface->getIGraphicBufferProducer());
+    }
+
+    mCamera->getRecordingProxy();
+    int32_t mode = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->setAudioRestriction(mode);
+    mCamera->getGlobalAudioRestriction();
+    mCamera->recordingEnabled();
+
+    mMemoryDealer = new MemoryDealer(kMemoryDealerSize);
+    mIMem = mMemoryDealer->allocate(kMemoryDealerSize);
+    mCamera->releaseRecordingFrame(mIMem);
+
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    mCamera->releaseRecordingFrameHandle(handle);
+
+    int32_t msgTypeNC = mFDP->ConsumeIntegral<int32_t>();
+    int32_t ext = mFDP->ConsumeIntegral<int32_t>();
+    int32_t ext2 = mFDP->ConsumeIntegral<int32_t>();
+    mCamera->notifyCallback(msgTypeNC, ext, ext2);
+
+    int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
+    mCamera->dataCallbackTimestamp(timestamp, msgTypeNC, mIMem);
+    mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
+}
+
+void CameraFuzzer::invokeCameraUtils() {
+    CameraMetadata staticMetadata;
+    int32_t orientVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                            : mFDP->ConsumeIntegral<int32_t>();
+    uint8_t facingVal = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<uint8_t>();
+    staticMetadata.update(ANDROID_SENSOR_ORIENTATION, &orientVal, 1);
+    staticMetadata.update(ANDROID_LENS_FACING, &facingVal, 1);
+    int32_t transform = 0;
+    CameraUtils::getRotationTransform(
+            staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */, &transform /*out*/);
+    CameraUtils::isCameraServiceDisabled();
+}
+
+void CameraFuzzer::invokeCameraBase() {
+    CameraInfo cameraInfo;
+    cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
+                                            : mFDP->ConsumeIntegral<int>();
+    cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
+                                                 : mFDP->ConsumeIntegral<int>();
+    invokeReadWriteParcel<CameraInfo>(&cameraInfo);
+
+    CameraStatus* cameraStatus = nullptr;
+
+    if (mFDP->ConsumeBool()) {
+        cameraStatus = new CameraStatus();
+    } else {
+        string cid = mFDP->ConsumeRandomLengthString();
+        String8 id(cid.c_str());
+        int32_t status = mFDP->ConsumeIntegral<int32_t>();
+        size_t unavailSubIdsSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        vector<String8> unavailSubIds;
+        for (size_t idx = 0; idx < unavailSubIdsSize; ++idx) {
+            string subId = mFDP->ConsumeRandomLengthString();
+            String8 unavailSubId(subId.c_str());
+            unavailSubIds.push_back(unavailSubId);
+        }
+        string clientPkg = mFDP->ConsumeRandomLengthString();
+        String8 clientPackage(clientPkg.c_str());
+        cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+    }
+
+    invokeReadWriteParcel<CameraStatus>(cameraStatus);
+    delete cameraStatus;
+}
+
+void CameraFuzzer::initCameraMetadata() {
+    if (mFDP->ConsumeBool()) {
+        mCameraMetadata = new CameraMetadata();
+    } else {
+        size_t entryCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        size_t dataCapacity = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+        mCameraMetadata = new CameraMetadata(entryCapacity, dataCapacity);
+    }
+}
+
+void CameraFuzzer::invokeCameraMetadata() {
+    initCameraMetadata();
+
+    const camera_metadata_t* metadataBuffer = nullptr;
+    if (mFDP->ConsumeBool()) {
+        metadataBuffer = mCameraMetadata->getAndLock();
+    }
+
+    mCameraMetadata->entryCount();
+    mCameraMetadata->isEmpty();
+    mCameraMetadata->bufferSize();
+    mCameraMetadata->sort();
+
+    uint32_t tag = mFDP->ConsumeIntegral<uint32_t>();
+    uint8_t dataUint8 = mFDP->ConsumeIntegral<uint8_t>();
+    int32_t dataInt32 = mFDP->ConsumeIntegral<int32_t>();
+    int64_t dataInt64 = mFDP->ConsumeIntegral<int64_t>();
+    float dataFloat = mFDP->ConsumeFloatingPoint<float>();
+    double dataDouble = mFDP->ConsumeFloatingPoint<double>();
+    camera_metadata_rational dataRational;
+    dataRational.numerator = mFDP->ConsumeIntegral<int32_t>();
+    dataRational.denominator = mFDP->ConsumeIntegral<int32_t>();
+    string dataStr = mFDP->ConsumeRandomLengthString();
+    String8 dataString(dataStr.c_str());
+    size_t data_count = 1;
+    mCameraMetadata->update(tag, &dataUint8, data_count);
+    mCameraMetadata->update(tag, &dataInt32, data_count);
+    mCameraMetadata->update(tag, &dataFloat, data_count);
+    mCameraMetadata->update(tag, &dataInt64, data_count);
+    mCameraMetadata->update(tag, &dataRational, data_count);
+    mCameraMetadata->update(tag, &dataDouble, data_count);
+    mCameraMetadata->update(tag, dataString);
+
+    uint32_t tagExists = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->exists(tagExists);
+
+    uint32_t tagFind = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->find(tagFind);
+
+    uint32_t tagErase = mFDP->ConsumeBool() ? tag : mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->erase(tagErase);
+
+    mCameraMetadata->unlock(metadataBuffer);
+    std::vector<int32_t> tagsRemoved;
+    uint64_t vendorId = mFDP->ConsumeIntegral<uint64_t>();
+    mCameraMetadata->removePermissionEntries(vendorId, &tagsRemoved);
+
+    string name = mFDP->ConsumeRandomLengthString();
+    VendorTagDescriptor vTags;
+    uint32_t tagName = mFDP->ConsumeIntegral<uint32_t>();
+    mCameraMetadata->getTagFromName(name.c_str(), &vTags, &tagName);
+
+    invokeReadWriteNullParcel<CameraMetadata>(mCameraMetadata);
+    invokeReadWriteParcel<CameraMetadata>(mCameraMetadata);
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    mCameraMetadata->dump(fd, verbosity, indentation);
+
+    CameraMetadata metadataCopy(mCameraMetadata->release());
+    CameraMetadata otherCameraMetadata;
+    mCameraMetadata->swap(otherCameraMetadata);
+    close(fd);
+}
+
+void CameraFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeCamera();
+    invokeCameraUtils();
+    invokeCameraBase();
+    invokeCameraMetadata();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    sp<CameraFuzzer> cameraFuzzer = new CameraFuzzer();
+    cameraFuzzer->process(data, size);
+    cameraFuzzer.clear();
+    return 0;
+}
diff --git a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
new file mode 100644
index 0000000..e14d9ce
--- /dev/null
+++ b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <VendorTagDescriptor.h>
+#include <binder/Parcel.h>
+#include <camera_metadata_tests_fake_vendor.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <system/camera_vendor_tags.h>
+
+#include <camera_metadata_hidden.h>
+#include "camera2common.h"
+
+using namespace std;
+using namespace android;
+
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+constexpr int32_t kVendorTagDescriptorId = -1;
+
+extern "C" {
+
+static int zero_get_tag_count(const vendor_tag_ops_t*) {
+    return 0;
+}
+
+static int default_get_tag_count(const vendor_tag_ops_t*) {
+    return VENDOR_TAG_COUNT_ERR;
+}
+
+static void default_get_all_tags(const vendor_tag_ops_t*, uint32_t*) {}
+
+static const char* default_get_section_name(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_SECTION_NAME_ERR;
+}
+
+static const char* default_get_tag_name(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_TAG_NAME_ERR;
+}
+
+static int default_get_tag_type(const vendor_tag_ops_t*, uint32_t) {
+    return VENDOR_TAG_TYPE_ERR;
+}
+
+} /*extern "C"*/
+
+static void FillWithDefaults(vendor_tag_ops_t* vOps) {
+    vOps->get_tag_count = default_get_tag_count;
+    vOps->get_all_tags = default_get_all_tags;
+    vOps->get_section_name = default_get_section_name;
+    vOps->get_tag_name = default_get_tag_name;
+    vOps->get_tag_type = default_get_tag_type;
+}
+
+class VendorTagDescriptorFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
+    ~VendorTagDescriptorFuzzer() {
+        mVendorTagDescriptor.clear();
+        mVendorTagDescriptorCache.clear();
+    }
+
+  private:
+    void initVendorTagDescriptor();
+    void invokeVendorTagDescriptor();
+    void invokeVendorTagDescriptorCache();
+    void invokeVendorTagErrorConditions();
+    sp<VendorTagDescriptor> mVendorTagDescriptor = nullptr;
+    sp<VendorTagDescriptorCache> mVendorTagDescriptorCache = nullptr;
+    FuzzedDataProvider* mFDP = nullptr;
+};
+
+void VendorTagDescriptorFuzzer::initVendorTagDescriptor() {
+    if (mFDP->ConsumeBool()) {
+        mVendorTagDescriptor = new VendorTagDescriptor();
+    } else {
+        const vendor_tag_ops_t* vOps = &fakevendor_ops;
+        VendorTagDescriptor::createDescriptorFromOps(vOps, mVendorTagDescriptor);
+    }
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptor() {
+    initVendorTagDescriptor();
+
+    sp<VendorTagDescriptor> vdesc = new VendorTagDescriptor();
+    vdesc->copyFrom(*mVendorTagDescriptor);
+    VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
+    VendorTagDescriptor::getGlobalVendorTagDescriptor();
+
+    int32_t tagCount = mVendorTagDescriptor->getTagCount();
+    if (tagCount > 0) {
+        uint32_t tagArray[tagCount];
+        mVendorTagDescriptor->getTagArray(tagArray);
+        uint32_t tag;
+        for (int32_t i = 0; i < tagCount; ++i) {
+            tag = tagArray[i];
+            get_local_camera_metadata_section_name_vendor_id(tag, kVendorTagDescriptorId);
+            get_local_camera_metadata_tag_name_vendor_id(tag, kVendorTagDescriptorId);
+            get_local_camera_metadata_tag_type_vendor_id(tag, kVendorTagDescriptorId);
+            mVendorTagDescriptor->getSectionIndex(tag);
+        }
+        mVendorTagDescriptor->getAllSectionNames();
+    }
+
+    String8 name((mFDP->ConsumeRandomLengthString()).c_str());
+    String8 section((mFDP->ConsumeRandomLengthString()).c_str());
+    uint32_t lookupTag;
+    mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+    mVendorTagDescriptor->dump(fd, verbosity, indentation);
+
+    invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+    VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+    vdesc.clear();
+    close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagDescriptorCache() {
+    mVendorTagDescriptorCache = new VendorTagDescriptorCache();
+    uint64_t id = mFDP->ConsumeIntegral<uint64_t>();
+    initVendorTagDescriptor();
+
+    mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor);
+    VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
+    VendorTagDescriptorCache::getGlobalVendorTagCache();
+    sp<VendorTagDescriptor> tagDesc;
+    mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
+
+    int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
+    if (tagCount > 0) {
+        uint32_t tagArray[tagCount];
+        mVendorTagDescriptorCache->getTagArray(tagArray, id);
+        uint32_t tag;
+        for (int32_t i = 0; i < tagCount; ++i) {
+            tag = tagArray[i];
+            get_local_camera_metadata_section_name_vendor_id(tag, id);
+            get_local_camera_metadata_tag_name_vendor_id(tag, id);
+            get_local_camera_metadata_tag_type_vendor_id(tag, id);
+        }
+    }
+
+    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+    int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+    int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+    mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
+
+    invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
+    VendorTagDescriptorCache::isVendorCachePresent(id);
+    mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors();
+    mVendorTagDescriptorCache->clearGlobalVendorTagCache();
+    tagDesc.clear();
+    close(fd);
+}
+
+void VendorTagDescriptorFuzzer::invokeVendorTagErrorConditions() {
+    sp<VendorTagDescriptor> vDesc;
+    vendor_tag_ops_t vOps;
+    FillWithDefaults(&vOps);
+    vOps.get_tag_count = zero_get_tag_count;
+
+    if (mFDP->ConsumeBool()) {
+        VendorTagDescriptor::createDescriptorFromOps(/*vOps*/ NULL, vDesc);
+    } else {
+        VendorTagDescriptor::createDescriptorFromOps(&vOps, vDesc);
+        int32_t tagCount = vDesc->getTagCount();
+        uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
+        uint32_t badTagArray[tagCount + 1];
+        vDesc->getTagArray(badTagArray);
+        vDesc->getSectionName(badTag);
+        vDesc->getTagName(badTag);
+        vDesc->getTagType(badTag);
+        VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+        VendorTagDescriptor::getGlobalVendorTagDescriptor();
+        VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc);
+        invokeReadWriteNullParcelsp<VendorTagDescriptor>(vDesc);
+        vDesc.clear();
+    }
+}
+
+void VendorTagDescriptorFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeVendorTagDescriptor();
+    invokeVendorTagDescriptorCache();
+    invokeVendorTagErrorConditions();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    VendorTagDescriptorFuzzer vendorTagDescriptorFuzzer;
+    vendorTagDescriptorFuzzer.process(data, size);
+    return 0;
+}
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 2e0b678..f53fc0a 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -13,6 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <algorithm>
+#include <string_view>
+#include <type_traits>
 
 #include <assert.h>
 #include <ctype.h>
@@ -85,6 +88,7 @@
 using android::Vector;
 using android::sp;
 using android::status_t;
+using android::SurfaceControl;
 
 using android::INVALID_OPERATION;
 using android::NAME_NOT_FOUND;
@@ -100,7 +104,6 @@
 static const uint32_t kFallbackHeight = 720;
 static const char* kMimeTypeAvc = "video/avc";
 static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
-static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
 
 // Command-line parameters.
 static bool gVerbose = false;           // chatty on stdout
@@ -119,7 +122,7 @@
 static uint32_t gBitRate = 20000000;     // 20Mbps
 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
 static uint32_t gBframes = 0;
-static PhysicalDisplayId gPhysicalDisplayId;
+static std::optional<PhysicalDisplayId> gPhysicalDisplayId;
 // Set by signal handler to stop recording.
 static volatile bool gStopRequested = false;
 
@@ -333,19 +336,49 @@
 }
 
 /*
+ * Gets the physical id of the display to record. If the user specified a physical
+ * display id, then that id will be set. Otherwise, the default display will be set.
+ */
+static status_t getPhysicalDisplayId(PhysicalDisplayId& outDisplayId) {
+    if (gPhysicalDisplayId) {
+        outDisplayId = *gPhysicalDisplayId;
+        return NO_ERROR;
+    }
+
+    const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+    if (ids.empty()) {
+        return INVALID_OPERATION;
+    }
+    outDisplayId = ids.front();
+    return NO_ERROR;
+}
+
+/*
  * Configures the virtual display.  When this completes, virtual display
  * frames will start arriving from the buffer producer.
  */
 static status_t prepareVirtualDisplay(
         const ui::DisplayState& displayState,
         const sp<IGraphicBufferProducer>& bufferProducer,
-        sp<IBinder>* pDisplayHandle) {
+        sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
     sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
             String8("ScreenRecorder"), false /*secure*/);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
-    t.setDisplayLayerStack(dpy, displayState.layerStack);
+    ui::LayerStack layerStack = ui::LayerStack::fromValue(std::rand());
+    t.setDisplayLayerStack(dpy, layerStack);
+    PhysicalDisplayId displayId;
+    status_t err = getPhysicalDisplayId(displayId);
+    if (err != NO_ERROR) {
+        return err;
+    }
+    *mirrorRoot = SurfaceComposerClient::getDefault()->mirrorDisplay(displayId);
+    if (*mirrorRoot == nullptr) {
+        ALOGE("Failed to create a mirror for screenrecord");
+        return UNKNOWN_ERROR;
+    }
+    t.setLayerStack(*mirrorRoot, layerStack);
     t.apply();
 
     *pDisplayHandle = dpy;
@@ -354,14 +387,15 @@
 }
 
 /*
- * Writes an unsigned integer byte-by-byte in little endian order regardless
+ * Writes an unsigned/signed integer byte-by-byte in little endian order regardless
  * of the platform endianness.
  */
-template <typename UINT>
-static void writeValueLE(UINT value, uint8_t* buffer) {
-    for (int i = 0; i < sizeof(UINT); ++i) {
-        buffer[i] = static_cast<uint8_t>(value);
-        value >>= 8;
+template <typename T>
+static void writeValueLE(T value, uint8_t* buffer) {
+    std::remove_const_t<T> temp = value;
+    for (int i = 0; i < sizeof(T); ++i) {
+        buffer[i] = static_cast<std::uint8_t>(temp & 0xff);
+        temp >>= 8;
     }
 }
 
@@ -377,16 +411,18 @@
  * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
  *   (as little endian uint64).
  */
-static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
+static status_t writeWinscopeMetadataLegacy(const Vector<int64_t>& timestamps,
         const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
-    ALOGV("Writing metadata");
+    static constexpr auto kWinscopeMagicStringLegacy = "#VV1NSC0PET1ME!#";
+
+    ALOGV("Writing winscope metadata legacy");
     int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
         - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
     sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
-        + sizeof(uint32_t) + strlen(kWinscopeMagicString));
+        + sizeof(uint32_t) + strlen(kWinscopeMagicStringLegacy));
     uint8_t* pos = buffer->data();
-    strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
-    pos += strlen(kWinscopeMagicString);
+    strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicStringLegacy);
+    pos += strlen(kWinscopeMagicStringLegacy);
     writeValueLE<uint32_t>(timestamps.size(), pos);
     pos += sizeof(uint32_t);
     for (size_t idx = 0; idx < timestamps.size(); ++idx) {
@@ -395,15 +431,121 @@
         pos += sizeof(uint64_t);
     }
     AMediaCodecBufferInfo bufferInfo = {
-        0,
+        0 /* offset */,
         static_cast<int32_t>(buffer->size()),
-        timestamps[0],
-        0
+        timestamps[0] /* presentationTimeUs */,
+        0 /* flags */
     };
     return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
 }
 
 /*
+ * Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
+ *
+ * The metadata (version 2) is written as a binary array with the following format:
+ * - winscope magic string (#VV1NSC0PET1ME2#, 16B).
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
+ * - for each recorded frame:
+ *     - System time in elapsed clock timebase in nanoseconds (8B little endian).
+ *
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
+ */
+static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
+        const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
+    ALOGV("Writing winscope metadata");
+
+    static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
+    static constexpr std::uint32_t metadataVersion = 2;
+
+    const auto elapsedTimeNs = android::elapsedRealtimeNano();
+    const std::int64_t elapsedToMonotonicTimeOffsetNs =
+            elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+    const std::int64_t realToElapsedTimeOffsetNs =
+            systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
+    const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
+
+    sp<ABuffer> buffer = new ABuffer(
+        kWinscopeMagicString.size() +
+        sizeof(decltype(metadataVersion)) +
+        sizeof(decltype(realToElapsedTimeOffsetNs)) +
+        sizeof(decltype(framesCount)) +
+        framesCount * sizeof(std::uint64_t)
+    );
+    std::uint8_t* pos = buffer->data();
+
+    std::copy(kWinscopeMagicString.cbegin(), kWinscopeMagicString.cend(), pos);
+    pos += kWinscopeMagicString.size();
+
+    writeValueLE(metadataVersion, pos);
+    pos += sizeof(decltype(metadataVersion));
+
+    writeValueLE(realToElapsedTimeOffsetNs, pos);
+    pos += sizeof(decltype(realToElapsedTimeOffsetNs));
+
+    writeValueLE(framesCount, pos);
+    pos += sizeof(decltype(framesCount));
+
+    for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
+        const auto timestampElapsedNs =
+                elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+        writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
+        pos += sizeof(std::uint64_t);
+    }
+
+    AMediaCodecBufferInfo bufferInfo = {
+        0 /* offset */,
+        static_cast<std::int32_t>(buffer->size()),
+        timestampsMonotonicUs[0] /* presentationTimeUs */,
+        0 /* flags */
+    };
+    return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
+}
+
+/*
+ * Update the display projection if size or orientation have changed.
+ */
+void updateDisplayProjection(const sp<IBinder>& virtualDpy, ui::DisplayState& displayState) {
+    ATRACE_NAME("updateDisplayProjection");
+
+    PhysicalDisplayId displayId;
+    if (getPhysicalDisplayId(displayId) != NO_ERROR) {
+        fprintf(stderr, "ERROR: Failed to get display id\n");
+        return;
+    }
+
+    sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
+    if (!displayToken) {
+        fprintf(stderr, "ERROR: failed to get display token\n");
+        return;
+    }
+
+    ui::DisplayState currentDisplayState;
+    if (SurfaceComposerClient::getDisplayState(displayToken, &currentDisplayState) != NO_ERROR) {
+        ALOGW("ERROR: failed to get display state\n");
+        return;
+    }
+
+    if (currentDisplayState.orientation != displayState.orientation ||
+        currentDisplayState.layerStackSpaceRect != displayState.layerStackSpaceRect) {
+        displayState = currentDisplayState;
+        ALOGD("display state changed, now has orientation %s, size (%d, %d)",
+              toCString(displayState.orientation), displayState.layerStackSpaceRect.getWidth(),
+              displayState.layerStackSpaceRect.getHeight());
+
+        SurfaceComposerClient::Transaction t;
+        setDisplayProjection(t, virtualDpy, currentDisplayState);
+        t.apply();
+    }
+}
+
+/*
  * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
  * input frames are coming from the virtual display as fast as SurfaceFlinger
  * wants to send them.
@@ -412,17 +554,17 @@
  *
  * The muxer must *not* have been started before calling.
  */
-static status_t runEncoder(const sp<MediaCodec>& encoder,
-        AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
-        const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
+static status_t runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer* muxer, FILE* rawFp,
+                           const sp<IBinder>& virtualDpy, ui::DisplayState displayState) {
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
     ssize_t trackIdx = -1;
+    ssize_t metaLegacyTrackIdx = -1;
     ssize_t metaTrackIdx = -1;
     uint32_t debugNumFrames = 0;
     int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
     int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
-    Vector<int64_t> timestamps;
+    Vector<int64_t> timestampsMonotonicUs;
     bool firstFrame = true;
 
     assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
@@ -472,24 +614,7 @@
                 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
                         bufIndex, size, ptsUsec);
 
-                { // scope
-                    ATRACE_NAME("orientation");
-                    // Check orientation, update if it has changed.
-                    //
-                    // Polling for changes is inefficient and wrong, but the
-                    // useful stuff is hard to get at without a Dalvik VM.
-                    ui::DisplayState displayState;
-                    err = SurfaceComposerClient::getDisplayState(display, &displayState);
-                    if (err != NO_ERROR) {
-                        ALOGW("getDisplayState() failed: %d", err);
-                    } else if (orientation != displayState.orientation) {
-                        ALOGD("orientation changed, now %s", toCString(displayState.orientation));
-                        SurfaceComposerClient::Transaction t;
-                        setDisplayProjection(t, virtualDpy, displayState);
-                        t.apply();
-                        orientation = displayState.orientation;
-                    }
-                }
+                updateDisplayProjection(virtualDpy, displayState);
 
                 // If the virtual display isn't providing us with timestamps,
                 // use the current time.  This isn't great -- we could get
@@ -520,9 +645,9 @@
                     sp<ABuffer> buffer = new ABuffer(
                             buffers[bufIndex]->data(), buffers[bufIndex]->size());
                     AMediaCodecBufferInfo bufferInfo = {
-                        0,
+                        0 /* offset */,
                         static_cast<int32_t>(buffer->size()),
-                        ptsUsec,
+                        ptsUsec /* presentationTimeUs */,
                         flags
                     };
                     err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
@@ -532,7 +657,7 @@
                         return err;
                     }
                     if (gOutputFormat == FORMAT_MP4) {
-                        timestamps.add(ptsUsec);
+                        timestampsMonotonicUs.add(ptsUsec);
                     }
                 }
                 debugNumFrames++;
@@ -565,6 +690,7 @@
                     if (gOutputFormat == FORMAT_MP4) {
                         AMediaFormat *metaFormat = AMediaFormat_new();
                         AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
+                        metaLegacyTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                         metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                         AMediaFormat_delete(metaFormat);
                     }
@@ -604,10 +730,16 @@
                         systemTime(CLOCK_MONOTONIC) - startWhenNsec));
         fflush(stdout);
     }
-    if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
-        err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
+    if (metaLegacyTrackIdx >= 0 && metaTrackIdx >= 0 && !timestampsMonotonicUs.isEmpty()) {
+        err = writeWinscopeMetadataLegacy(timestampsMonotonicUs, metaLegacyTrackIdx, muxer);
         if (err != NO_ERROR) {
-            fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
+            fprintf(stderr, "Failed writing legacy winscope metadata to muxer (err=%d)\n", err);
+            return err;
+        }
+
+        err = writeWinscopeMetadata(timestampsMonotonicUs, metaTrackIdx, muxer);
+        if (err != NO_ERROR) {
+            fprintf(stderr, "Failed writing winscope metadata to muxer (err=%d)\n", err);
             return err;
         }
     }
@@ -656,6 +788,55 @@
     return num & ~1;
 }
 
+struct RecordingData {
+    sp<MediaCodec> encoder;
+    // Configure virtual display.
+    sp<IBinder> dpy;
+
+    sp<Overlay> overlay;
+
+    ~RecordingData() {
+        if (dpy != nullptr) SurfaceComposerClient::destroyDisplay(dpy);
+        if (overlay != nullptr) overlay->stop();
+        if (encoder != nullptr) {
+            encoder->stop();
+            encoder->release();
+        }
+    }
+};
+
+/*
+ * Computes the maximum width and height across all physical displays.
+ */
+static ui::Size getMaxDisplaySize() {
+    const std::vector<PhysicalDisplayId> physicalDisplayIds =
+            SurfaceComposerClient::getPhysicalDisplayIds();
+    if (physicalDisplayIds.empty()) {
+        fprintf(stderr, "ERROR: Failed to get physical display ids\n");
+        return {};
+    }
+
+    ui::Size result;
+    for (auto& displayId : physicalDisplayIds) {
+        sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
+        if (!displayToken) {
+            fprintf(stderr, "ERROR: failed to get display token\n");
+            continue;
+        }
+
+        ui::DisplayState displayState;
+        status_t err = SurfaceComposerClient::getDisplayState(displayToken, &displayState);
+        if (err != NO_ERROR) {
+            fprintf(stderr, "ERROR: failed to get display state\n");
+            continue;
+        }
+
+        result.height = std::max(result.height, displayState.layerStackSpaceRect.getHeight());
+        result.width = std::max(result.width, displayState.layerStackSpaceRect.getWidth());
+    }
+    return result;
+}
+
 /*
  * Main "do work" start point.
  *
@@ -674,21 +855,20 @@
     sp<ProcessState> self = ProcessState::self();
     self->startThreadPool();
 
+    PhysicalDisplayId displayId;
+    err = getPhysicalDisplayId(displayId);
+    if (err != NO_ERROR) {
+        fprintf(stderr, "ERROR: Failed to get display id\n");
+        return err;
+    }
+
     // Get main display parameters.
-    sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(
-            gPhysicalDisplayId);
+    sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
     if (display == nullptr) {
         fprintf(stderr, "ERROR: no display\n");
         return NAME_NOT_FOUND;
     }
 
-    ui::DisplayState displayState;
-    err = SurfaceComposerClient::getDisplayState(display, &displayState);
-    if (err != NO_ERROR) {
-        fprintf(stderr, "ERROR: unable to get display state\n");
-        return err;
-    }
-
     DisplayMode displayMode;
     err = SurfaceComposerClient::getActiveDisplayMode(display, &displayMode);
     if (err != NO_ERROR) {
@@ -696,7 +876,20 @@
         return err;
     }
 
-    const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
+    ui::DisplayState displayState;
+    err = SurfaceComposerClient::getDisplayState(display, &displayState);
+    if (err != NO_ERROR) {
+        fprintf(stderr, "ERROR: unable to get display state\n");
+        return err;
+    }
+
+    if (displayState.layerStack == ui::INVALID_LAYER_STACK) {
+        fprintf(stderr, "ERROR: INVALID_LAYER_STACK, please check your display state.\n");
+        return INVALID_OPERATION;
+    }
+
+    const ui::Size layerStackSpaceRect =
+        gPhysicalDisplayId ? displayState.layerStackSpaceRect : getMaxDisplaySize();
     if (gVerbose) {
         printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
                 layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
@@ -713,12 +906,12 @@
         gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
     }
 
+    RecordingData recordingData = RecordingData();
     // Configure and start the encoder.
-    sp<MediaCodec> encoder;
     sp<FrameOutput> frameOutput;
     sp<IGraphicBufferProducer> encoderInputSurface;
     if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
-        err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
+        err = prepareEncoder(displayMode.refreshRate, &recordingData.encoder, &encoderInputSurface);
 
         if (err != NO_ERROR && !gSizeSpecified) {
             // fallback is defined for landscape; swap if we're in portrait
@@ -731,7 +924,8 @@
                         gVideoWidth, gVideoHeight, newWidth, newHeight);
                 gVideoWidth = newWidth;
                 gVideoHeight = newHeight;
-                err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
+                err = prepareEncoder(displayMode.refreshRate, &recordingData.encoder,
+                                      &encoderInputSurface);
             }
         }
         if (err != NO_ERROR) return err;
@@ -758,13 +952,11 @@
 
     // Configure optional overlay.
     sp<IGraphicBufferProducer> bufferProducer;
-    sp<Overlay> overlay;
     if (gWantFrameTime) {
         // Send virtual display frames to an external texture.
-        overlay = new Overlay(gMonotonicTime);
-        err = overlay->start(encoderInputSurface, &bufferProducer);
+        recordingData.overlay = new Overlay(gMonotonicTime);
+        err = recordingData.overlay->start(encoderInputSurface, &bufferProducer);
         if (err != NO_ERROR) {
-            if (encoder != NULL) encoder->release();
             return err;
         }
         if (gVerbose) {
@@ -776,11 +968,13 @@
         bufferProducer = encoderInputSurface;
     }
 
+    // We need to hold a reference to mirrorRoot during the entire recording to ensure it's not
+    // cleaned up by SurfaceFlinger. When the reference is dropped, SurfaceFlinger will delete
+    // the resource.
+    sp<SurfaceControl> mirrorRoot;
     // Configure virtual display.
-    sp<IBinder> dpy;
-    err = prepareVirtualDisplay(displayState, bufferProducer, &dpy);
+    err = prepareVirtualDisplay(displayState, bufferProducer, &recordingData.dpy, &mirrorRoot);
     if (err != NO_ERROR) {
-        if (encoder != NULL) encoder->release();
         return err;
     }
 
@@ -820,7 +1014,6 @@
         case FORMAT_RAW_FRAMES: {
             rawFp = prepareRawOutput(fileName);
             if (rawFp == NULL) {
-                if (encoder != NULL) encoder->release();
                 return -1;
             }
             break;
@@ -861,7 +1054,7 @@
         }
     } else {
         // Main encoder loop.
-        err = runEncoder(encoder, muxer, rawFp, display, dpy, displayState.orientation);
+        err = runEncoder(recordingData.encoder, muxer, rawFp, recordingData.dpy, displayState);
         if (err != NO_ERROR) {
             fprintf(stderr, "Encoder failed (err=%d)\n", err);
             // fall through to cleanup
@@ -875,9 +1068,6 @@
 
     // Shut everything down, starting with the producer side.
     encoderInputSurface = NULL;
-    SurfaceComposerClient::destroyDisplay(dpy);
-    if (overlay != NULL) overlay->stop();
-    if (encoder != NULL) encoder->stop();
     if (muxer != NULL) {
         // If we don't stop muxer explicitly, i.e. let the destructor run,
         // it may hang (b/11050628).
@@ -885,7 +1075,6 @@
     } else if (rawFp != stdout) {
         fclose(rawFp);
     }
-    if (encoder != NULL) encoder->release();
 
     return err;
 }
@@ -1026,7 +1215,8 @@
         "    Add additional information, such as a timestamp overlay, that is helpful\n"
         "    in videos captured to illustrate bugs.\n"
         "--time-limit TIME\n"
-        "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
+        "    Set the maximum recording time, in seconds.  Default is %d. Set to 0\n"
+        "    to remove the time limit.\n"
         "--display-id ID\n"
         "    specify the physical display ID to record. Default is the primary display.\n"
         "    see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
@@ -1065,14 +1255,6 @@
         { NULL,                 0,                  NULL, 0 }
     };
 
-    std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
-    if (!displayId) {
-        fprintf(stderr, "Failed to get ID for internal display\n");
-        return 1;
-    }
-
-    gPhysicalDisplayId = *displayId;
-
     while (true) {
         int optionIndex = 0;
         int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
@@ -1113,14 +1295,27 @@
             }
             break;
         case 't':
-            gTimeLimitSec = atoi(optarg);
-            if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
-                fprintf(stderr,
-                        "Time limit %ds outside acceptable range [1,%d]\n",
-                        gTimeLimitSec, kMaxTimeLimitSec);
+        {
+            char *next;
+            const int64_t timeLimitSec = strtol(optarg, &next, 10);
+            if (next == optarg || (*next != '\0' && *next != ' ')) {
+                fprintf(stderr, "Error parsing time limit argument\n");
                 return 2;
             }
+            if (timeLimitSec > std::numeric_limits<uint32_t>::max() || timeLimitSec < 0) {
+                fprintf(stderr,
+                        "Time limit %" PRIi64 "s outside acceptable range [0,%u] seconds\n",
+                        timeLimitSec, std::numeric_limits<uint32_t>::max());
+                return 2;
+            }
+            gTimeLimitSec = (timeLimitSec == 0) ?
+                    std::numeric_limits<uint32_t>::max() : timeLimitSec;
+            if (gVerbose) {
+                printf("Time limit set to %u seconds\n", gTimeLimitSec);
+                fflush(stdout);
+            }
             break;
+        }
         case 'u':
             gWantInfoScreen = true;
             gWantFrameTime = true;
diff --git a/cmds/stagefright/Android.bp b/cmds/stagefright/Android.bp
index e1fe07e..445541e 100644
--- a/cmds/stagefright/Android.bp
+++ b/cmds/stagefright/Android.bp
@@ -211,46 +211,6 @@
 }
 
 cc_binary {
-    name: "mediafilter",
-
-    srcs: [
-        "filters/argbtorgba.rscript",
-        "filters/nightvision.rscript",
-        "filters/saturation.rscript",
-        "mediafilter.cpp",
-    ],
-
-    header_libs: [
-        "libmediadrm_headers",
-        "libmediametrics_headers",
-        "libstagefright_headers",
-        "rs-headers",
-    ],
-
-    shared_libs: [
-        "libstagefright",
-        "liblog",
-        "libutils",
-        "libbinder",
-        "libstagefright_foundation",
-        "libmedia_omx",
-        "libui",
-        "libgui",
-        "libRScpp",
-    ],
-
-    static_libs: ["libstagefright_mediafilter"],
-
-    cflags: [
-        "-Wno-multichar",
-    ],
-
-    sanitize: {
-        cfi: true,
-    },
-}
-
-cc_binary {
     name: "muxer",
 
     srcs: ["muxer.cpp"],
diff --git a/cmds/stagefright/SineSource.cpp b/cmds/stagefright/SineSource.cpp
index 0ecc16c..0656030 100644
--- a/cmds/stagefright/SineSource.cpp
+++ b/cmds/stagefright/SineSource.cpp
@@ -63,12 +63,15 @@
         MediaBufferBase **out, const ReadOptions * /* options */) {
     *out = NULL;
 
-    MediaBufferBase *buffer;
+    MediaBufferBase *buffer = nullptr;
     status_t err = mGroup->acquire_buffer(&buffer);
 
     if (err != OK) {
         return err;
     }
+    if (buffer == nullptr) {
+        return AMEDIA_ERROR_UNKNOWN;
+    }
 
     size_t frameSize = mNumChannels * sizeof(int16_t);
     size_t numFramesPerBuffer = buffer->size() / frameSize;
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index beeab54..c43f8ce 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -411,7 +411,10 @@
         composerClient = new SurfaceComposerClient;
         CHECK_EQ(composerClient->initCheck(), (status_t)OK);
 
-        const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+        const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+        CHECK(!ids.empty());
+
+        const sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
         CHECK(display != nullptr);
 
         ui::DisplayMode mode;
diff --git a/cmds/stagefright/filters/argbtorgba.rscript b/cmds/stagefright/filters/argbtorgba.rscript
deleted file mode 100644
index 229ff8c..0000000
--- a/cmds/stagefright/filters/argbtorgba.rscript
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-void root(const uchar4 *v_in, uchar4 *v_out) {
-    v_out->x = v_in->y;
-    v_out->y = v_in->z;
-    v_out->z = v_in->w;
-    v_out->w = v_in->x;
-}
\ No newline at end of file
diff --git a/cmds/stagefright/filters/nightvision.rscript b/cmds/stagefright/filters/nightvision.rscript
deleted file mode 100644
index f61413c..0000000
--- a/cmds/stagefright/filters/nightvision.rscript
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-const static float3 gNightVisionMult = {0.5f, 1.f, 0.5f};
-
-// calculates luminance of pixel, then biases color balance toward green
-void root(const uchar4 *v_in, uchar4 *v_out) {
-    v_out->x = v_in->x; // don't modify A
-
-    // get RGB, scale 0-255 uchar to 0-1.0 float
-    float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
-            v_in->w * 0.003921569f};
-
-    // apply filter
-    float3 result = dot(rgb, gMonoMult) * gNightVisionMult;
-
-    v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
-    v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
-    v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/cmds/stagefright/filters/saturation.rscript b/cmds/stagefright/filters/saturation.rscript
deleted file mode 100644
index 1de9dd8..0000000
--- a/cmds/stagefright/filters/saturation.rscript
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-
-// global variables (parameters accessible to application code)
-float gSaturation = 1.0f;
-
-void root(const uchar4 *v_in, uchar4 *v_out) {
-    v_out->x = v_in->x; // don't modify A
-
-    // get RGB, scale 0-255 uchar to 0-1.0 float
-    float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
-            v_in->w * 0.003921569f};
-
-    // apply saturation filter
-    float3 result = dot(rgb, gMonoMult);
-    result = mix(result, rgb, gSaturation);
-
-    v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
-    v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
-    v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
deleted file mode 100644
index 67c68e6..0000000
--- a/cmds/stagefright/mediafilter.cpp
+++ /dev/null
@@ -1,789 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "mediafilterTest"
-
-#include <inttypes.h>
-
-#include <binder/ProcessState.h>
-#include <filters/ColorConvert.h>
-#include <gui/ISurfaceComposer.h>
-#include <gui/SurfaceComposerClient.h>
-#include <gui/Surface.h>
-#include <media/IMediaHTTPService.h>
-#include <media/MediaCodecBuffer.h>
-#include <mediadrm/ICrypto.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaCodec.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/RenderScriptWrapper.h>
-#include <OMX_IVCommon.h>
-#include <ui/DisplayMode.h>
-
-#include "RenderScript.h"
-#include "ScriptC_argbtorgba.h"
-#include "ScriptC_nightvision.h"
-#include "ScriptC_saturation.h"
-
-// test parameters
-static const bool kTestFlush = true;        // Note: true will drop 1 out of
-static const int kFlushAfterFrames = 25;    // kFlushAfterFrames output frames
-static const int64_t kTimeout = 500ll;
-
-// built-in filter parameters
-static const int32_t kInvert = false;   // ZeroFilter param
-static const float kBlurRadius = 15.0f; // IntrinsicBlurFilter param
-static const float kSaturation = 0.0f;  // SaturationFilter param
-
-static void usage(const char *me) {
-    fprintf(stderr, "usage: [flags] %s\n"
-                    "\t[-b] use IntrinsicBlurFilter\n"
-                    "\t[-c] use argb to rgba conversion RSFilter\n"
-                    "\t[-n] use night vision RSFilter\n"
-                    "\t[-r] use saturation RSFilter\n"
-                    "\t[-s] use SaturationFilter\n"
-                    "\t[-z] use ZeroFilter (copy filter)\n"
-                    "\t[-R] render output to surface (enables -S)\n"
-                    "\t[-S] allocate buffers from a surface\n"
-                    "\t[-T] use render timestamps (enables -R)\n",
-                    me);
-    exit(1);
-}
-
-namespace android {
-
-struct SaturationRSFilter : RenderScriptWrapper::RSFilterCallback {
-    void init(const RSC::sp<RSC::RS> &context) {
-        mScript = new ScriptC_saturation(context);
-        mScript->set_gSaturation(3.f);
-    }
-
-    virtual status_t processBuffers(
-            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
-        mScript->forEach_root(inBuffer, outBuffer);
-
-        return OK;
-    }
-
-    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
-        return OK;
-    }
-
-private:
-    RSC::sp<ScriptC_saturation> mScript;
-};
-
-struct NightVisionRSFilter : RenderScriptWrapper::RSFilterCallback {
-    void init(const RSC::sp<RSC::RS> &context) {
-        mScript = new ScriptC_nightvision(context);
-    }
-
-    virtual status_t processBuffers(
-            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
-        mScript->forEach_root(inBuffer, outBuffer);
-
-        return OK;
-    }
-
-    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
-        return OK;
-    }
-
-private:
-    RSC::sp<ScriptC_nightvision> mScript;
-};
-
-struct ARGBToRGBARSFilter : RenderScriptWrapper::RSFilterCallback {
-    void init(const RSC::sp<RSC::RS> &context) {
-        mScript = new ScriptC_argbtorgba(context);
-    }
-
-    virtual status_t processBuffers(
-            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
-        mScript->forEach_root(inBuffer, outBuffer);
-
-        return OK;
-    }
-
-    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
-        return OK;
-    }
-
-private:
-    RSC::sp<ScriptC_argbtorgba> mScript;
-};
-
-struct CodecState {
-    sp<MediaCodec> mCodec;
-    Vector<sp<MediaCodecBuffer> > mInBuffers;
-    Vector<sp<MediaCodecBuffer> > mOutBuffers;
-    bool mSignalledInputEOS;
-    bool mSawOutputEOS;
-    int64_t mNumBuffersDecoded;
-};
-
-struct DecodedFrame {
-    size_t index;
-    size_t offset;
-    size_t size;
-    int64_t presentationTimeUs;
-    uint32_t flags;
-};
-
-enum FilterType {
-    FILTERTYPE_ZERO,
-    FILTERTYPE_INTRINSIC_BLUR,
-    FILTERTYPE_SATURATION,
-    FILTERTYPE_RS_SATURATION,
-    FILTERTYPE_RS_NIGHT_VISION,
-    FILTERTYPE_RS_ARGB_TO_RGBA,
-};
-
-size_t inputFramesSinceFlush = 0;
-void tryCopyDecodedBuffer(
-        List<DecodedFrame> *decodedFrameIndices,
-        CodecState *filterState,
-        CodecState *vidState) {
-    if (decodedFrameIndices->empty()) {
-        return;
-    }
-
-    size_t filterIndex;
-    status_t err = filterState->mCodec->dequeueInputBuffer(
-            &filterIndex, kTimeout);
-    if (err != OK) {
-        return;
-    }
-
-    ++inputFramesSinceFlush;
-
-    DecodedFrame frame = *decodedFrameIndices->begin();
-
-    // only consume a buffer if we are not going to flush, since we expect
-    // the dequeue -> flush -> queue operation to cause an error and
-    // not produce an output frame
-    if (!kTestFlush || inputFramesSinceFlush < kFlushAfterFrames) {
-        decodedFrameIndices->erase(decodedFrameIndices->begin());
-    }
-    size_t outIndex = frame.index;
-
-    const sp<MediaCodecBuffer> &srcBuffer =
-        vidState->mOutBuffers.itemAt(outIndex);
-    const sp<MediaCodecBuffer> &destBuffer =
-        filterState->mInBuffers.itemAt(filterIndex);
-
-    sp<AMessage> srcFormat, destFormat;
-    vidState->mCodec->getOutputFormat(&srcFormat);
-    filterState->mCodec->getInputFormat(&destFormat);
-
-    int32_t srcWidth, srcHeight, srcStride, srcSliceHeight;
-    int32_t srcColorFormat, destColorFormat;
-    int32_t destWidth, destHeight, destStride, destSliceHeight;
-    CHECK(srcFormat->findInt32("stride", &srcStride)
-            && srcFormat->findInt32("slice-height", &srcSliceHeight)
-            && srcFormat->findInt32("width", &srcWidth)
-            && srcFormat->findInt32("height", & srcHeight)
-            && srcFormat->findInt32("color-format", &srcColorFormat));
-    CHECK(destFormat->findInt32("stride", &destStride)
-            && destFormat->findInt32("slice-height", &destSliceHeight)
-            && destFormat->findInt32("width", &destWidth)
-            && destFormat->findInt32("height", & destHeight)
-            && destFormat->findInt32("color-format", &destColorFormat));
-
-    CHECK(srcWidth <= destStride && srcHeight <= destSliceHeight);
-
-    convertYUV420spToARGB(
-            srcBuffer->data(),
-            srcBuffer->data() + srcStride * srcSliceHeight,
-            srcWidth,
-            srcHeight,
-            destBuffer->data());
-
-    // copy timestamp
-    int64_t timeUs;
-    CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
-    destBuffer->meta()->setInt64("timeUs", timeUs);
-
-    if (kTestFlush && inputFramesSinceFlush >= kFlushAfterFrames) {
-        inputFramesSinceFlush = 0;
-
-        // check that queueing a buffer that was dequeued before flush
-        // fails with expected error EACCES
-        filterState->mCodec->flush();
-
-        err = filterState->mCodec->queueInputBuffer(
-                filterIndex, 0 /* offset */, destBuffer->size(),
-                timeUs, frame.flags);
-
-        if (err == OK) {
-            ALOGE("FAIL: queue after flush returned OK");
-        } else if (err != -EACCES) {
-            ALOGE("queueInputBuffer after flush returned %d, "
-                    "expected -EACCES (-13)", err);
-        }
-    } else {
-        err = filterState->mCodec->queueInputBuffer(
-                filterIndex, 0 /* offset */, destBuffer->size(),
-                timeUs, frame.flags);
-        CHECK(err == OK);
-
-        err = vidState->mCodec->releaseOutputBuffer(outIndex);
-        CHECK(err == OK);
-    }
-}
-
-size_t outputFramesSinceFlush = 0;
-void tryDrainOutputBuffer(
-        CodecState *filterState,
-        const sp<Surface> &surface, bool renderSurface,
-        bool useTimestamp, int64_t *startTimeRender) {
-    size_t index;
-    size_t offset;
-    size_t size;
-    int64_t presentationTimeUs;
-    uint32_t flags;
-    status_t err = filterState->mCodec->dequeueOutputBuffer(
-            &index, &offset, &size, &presentationTimeUs, &flags,
-            kTimeout);
-
-    if (err != OK) {
-        return;
-    }
-
-    ++outputFramesSinceFlush;
-
-    if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
-        filterState->mCodec->flush();
-    }
-
-    if (surface == NULL || !renderSurface) {
-        err = filterState->mCodec->releaseOutputBuffer(index);
-    } else if (useTimestamp) {
-        if (*startTimeRender == -1) {
-            // begin rendering 2 vsyncs after first decode
-            *startTimeRender = systemTime(SYSTEM_TIME_MONOTONIC)
-                    + 33000000 - (presentationTimeUs * 1000);
-        }
-        presentationTimeUs =
-                (presentationTimeUs * 1000) + *startTimeRender;
-        err = filterState->mCodec->renderOutputBufferAndRelease(
-                index, presentationTimeUs);
-    } else {
-        err = filterState->mCodec->renderOutputBufferAndRelease(index);
-    }
-
-    if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
-        outputFramesSinceFlush = 0;
-
-        // releasing the buffer dequeued before flush should cause an error
-        // if so, the frame will also be skipped in output stream
-        if (err == OK) {
-            ALOGE("FAIL: release after flush returned OK");
-        } else if (err != -EACCES) {
-            ALOGE("releaseOutputBuffer after flush returned %d, "
-                    "expected -EACCES (-13)", err);
-        }
-    } else {
-        CHECK(err == OK);
-    }
-
-    if (flags & MediaCodec::BUFFER_FLAG_EOS) {
-        ALOGV("reached EOS on output.");
-        filterState->mSawOutputEOS = true;
-    }
-}
-
-static int decode(
-        const sp<android::ALooper> &looper,
-        const char *path,
-        const sp<Surface> &surface,
-        bool renderSurface,
-        bool useTimestamp,
-        FilterType filterType) {
-
-    static int64_t kTimeout = 500ll;
-
-    sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
-
-    if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
-        fprintf(stderr, "unable to instantiate extractor.\n");
-        return 1;
-    }
-
-    KeyedVector<size_t, CodecState> stateByTrack;
-
-    CodecState *vidState = NULL;
-    for (size_t i = 0; i < extractor->countTracks(); ++i) {
-        sp<AMessage> format;
-        status_t err = extractor->getTrackFormat(i, &format);
-        CHECK(err == OK);
-
-        AString mime;
-        CHECK(format->findString("mime", &mime));
-        bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
-        if (!isVideo) {
-            continue;
-        }
-
-        ALOGV("selecting track %zu", i);
-
-        err = extractor->selectTrack(i);
-        CHECK(err == OK);
-
-        CodecState *state =
-            &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
-
-        vidState = state;
-
-        state->mNumBuffersDecoded = 0;
-
-        state->mCodec = MediaCodec::CreateByType(
-                looper, mime.c_str(), false /* encoder */);
-
-        CHECK(state->mCodec != NULL);
-
-        err = state->mCodec->configure(
-                format, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
-
-        CHECK(err == OK);
-
-        state->mSignalledInputEOS = false;
-        state->mSawOutputEOS = false;
-
-        break;
-    }
-    CHECK(!stateByTrack.isEmpty());
-    CHECK(vidState != NULL);
-    sp<AMessage> vidFormat;
-    vidState->mCodec->getOutputFormat(&vidFormat);
-
-    // set filter to use ARGB8888
-    vidFormat->setInt32("color-format", OMX_COLOR_Format32bitARGB8888);
-    // set app cache directory path
-    vidFormat->setString("cacheDir", "/system/bin");
-
-    // create RenderScript context for RSFilters
-    RSC::sp<RSC::RS> context = new RSC::RS();
-    context->init("/system/bin");
-
-    sp<RenderScriptWrapper::RSFilterCallback> rsFilter;
-
-    // create renderscript wrapper for RSFilters
-    sp<RenderScriptWrapper> rsWrapper = new RenderScriptWrapper;
-    rsWrapper->mContext = context.get();
-
-    CodecState *filterState = new CodecState();
-    filterState->mNumBuffersDecoded = 0;
-
-    sp<AMessage> params = new AMessage();
-
-    switch (filterType) {
-        case FILTERTYPE_ZERO:
-        {
-            filterState->mCodec = MediaCodec::CreateByComponentName(
-                    looper, "android.filter.zerofilter");
-            params->setInt32("invert", kInvert);
-            break;
-        }
-        case FILTERTYPE_INTRINSIC_BLUR:
-        {
-            filterState->mCodec = MediaCodec::CreateByComponentName(
-                    looper, "android.filter.intrinsicblur");
-            params->setFloat("blur-radius", kBlurRadius);
-            break;
-        }
-        case FILTERTYPE_SATURATION:
-        {
-            filterState->mCodec = MediaCodec::CreateByComponentName(
-                    looper, "android.filter.saturation");
-            params->setFloat("saturation", kSaturation);
-            break;
-        }
-        case FILTERTYPE_RS_SATURATION:
-        {
-            SaturationRSFilter *satFilter = new SaturationRSFilter;
-            satFilter->init(context);
-            rsFilter = satFilter;
-            rsWrapper->mCallback = rsFilter;
-            vidFormat->setObject("rs-wrapper", rsWrapper);
-
-            filterState->mCodec = MediaCodec::CreateByComponentName(
-                    looper, "android.filter.RenderScript");
-            break;
-        }
-        case FILTERTYPE_RS_NIGHT_VISION:
-        {
-            NightVisionRSFilter *nightVisionFilter = new NightVisionRSFilter;
-            nightVisionFilter->init(context);
-            rsFilter = nightVisionFilter;
-            rsWrapper->mCallback = rsFilter;
-            vidFormat->setObject("rs-wrapper", rsWrapper);
-
-            filterState->mCodec = MediaCodec::CreateByComponentName(
-                    looper, "android.filter.RenderScript");
-            break;
-        }
-        case FILTERTYPE_RS_ARGB_TO_RGBA:
-        {
-            ARGBToRGBARSFilter *argbToRgbaFilter = new ARGBToRGBARSFilter;
-            argbToRgbaFilter->init(context);
-            rsFilter = argbToRgbaFilter;
-            rsWrapper->mCallback = rsFilter;
-            vidFormat->setObject("rs-wrapper", rsWrapper);
-
-            filterState->mCodec = MediaCodec::CreateByComponentName(
-                    looper, "android.filter.RenderScript");
-            break;
-        }
-        default:
-        {
-            LOG_ALWAYS_FATAL("mediacodec.cpp error: unrecognized FilterType");
-            break;
-        }
-    }
-    CHECK(filterState->mCodec != NULL);
-
-    status_t err = filterState->mCodec->configure(
-            vidFormat /* format */, surface, NULL /* crypto */, 0 /* flags */);
-    CHECK(err == OK);
-
-    filterState->mSignalledInputEOS = false;
-    filterState->mSawOutputEOS = false;
-
-    int64_t startTimeUs = android::ALooper::GetNowUs();
-    int64_t startTimeRender = -1;
-
-    for (size_t i = 0; i < stateByTrack.size(); ++i) {
-        CodecState *state = &stateByTrack.editValueAt(i);
-
-        sp<MediaCodec> codec = state->mCodec;
-
-        CHECK_EQ((status_t)OK, codec->start());
-
-        CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
-        CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
-
-        ALOGV("got %zu input and %zu output buffers",
-                state->mInBuffers.size(), state->mOutBuffers.size());
-    }
-
-    CHECK_EQ((status_t)OK, filterState->mCodec->setParameters(params));
-
-    if (kTestFlush) {
-        status_t flushErr = filterState->mCodec->flush();
-        if (flushErr == OK) {
-            ALOGE("FAIL: Flush before start returned OK");
-        } else {
-            ALOGV("Flush before start returned status %d, usually ENOSYS (-38)",
-                    flushErr);
-        }
-    }
-
-    CHECK_EQ((status_t)OK, filterState->mCodec->start());
-    CHECK_EQ((status_t)OK, filterState->mCodec->getInputBuffers(
-            &filterState->mInBuffers));
-    CHECK_EQ((status_t)OK, filterState->mCodec->getOutputBuffers(
-            &filterState->mOutBuffers));
-
-    if (kTestFlush) {
-        status_t flushErr = filterState->mCodec->flush();
-        if (flushErr != OK) {
-            ALOGE("FAIL: Flush after start returned %d, expect OK (0)",
-                    flushErr);
-        } else {
-            ALOGV("Flush immediately after start OK");
-        }
-    }
-
-    List<DecodedFrame> decodedFrameIndices;
-
-    // loop until decoder reaches EOS
-    bool sawInputEOS = false;
-    bool sawOutputEOSOnAllTracks = false;
-    while (!sawOutputEOSOnAllTracks) {
-        if (!sawInputEOS) {
-            size_t trackIndex;
-            status_t err = extractor->getSampleTrackIndex(&trackIndex);
-
-            if (err != OK) {
-                ALOGV("saw input eos");
-                sawInputEOS = true;
-            } else {
-                CodecState *state = &stateByTrack.editValueFor(trackIndex);
-
-                size_t index;
-                err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
-
-                if (err == OK) {
-                    ALOGV("filling input buffer %zu", index);
-
-                    const sp<MediaCodecBuffer> &buffer = state->mInBuffers.itemAt(index);
-                    sp<ABuffer> abuffer = new ABuffer(buffer->base(), buffer->capacity());
-
-                    err = extractor->readSampleData(abuffer);
-                    CHECK(err == OK);
-                    buffer->setRange(abuffer->offset(), abuffer->size());
-
-                    int64_t timeUs;
-                    err = extractor->getSampleTime(&timeUs);
-                    CHECK(err == OK);
-
-                    uint32_t bufferFlags = 0;
-
-                    err = state->mCodec->queueInputBuffer(
-                            index, 0 /* offset */, buffer->size(),
-                            timeUs, bufferFlags);
-
-                    CHECK(err == OK);
-
-                    extractor->advance();
-                } else {
-                    CHECK_EQ(err, -EAGAIN);
-                }
-            }
-        } else {
-            for (size_t i = 0; i < stateByTrack.size(); ++i) {
-                CodecState *state = &stateByTrack.editValueAt(i);
-
-                if (!state->mSignalledInputEOS) {
-                    size_t index;
-                    status_t err =
-                        state->mCodec->dequeueInputBuffer(&index, kTimeout);
-
-                    if (err == OK) {
-                        ALOGV("signalling input EOS on track %zu", i);
-
-                        err = state->mCodec->queueInputBuffer(
-                                index, 0 /* offset */, 0 /* size */,
-                                0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS);
-
-                        CHECK(err == OK);
-
-                        state->mSignalledInputEOS = true;
-                    } else {
-                        CHECK_EQ(err, -EAGAIN);
-                    }
-                }
-            }
-        }
-
-        sawOutputEOSOnAllTracks = true;
-        for (size_t i = 0; i < stateByTrack.size(); ++i) {
-            CodecState *state = &stateByTrack.editValueAt(i);
-
-            if (state->mSawOutputEOS) {
-                continue;
-            } else {
-                sawOutputEOSOnAllTracks = false;
-            }
-
-            DecodedFrame frame;
-            status_t err = state->mCodec->dequeueOutputBuffer(
-                    &frame.index, &frame.offset, &frame.size,
-                    &frame.presentationTimeUs, &frame.flags, kTimeout);
-
-            if (err == OK) {
-                ALOGV("draining decoded buffer %zu, time = %lld us",
-                        frame.index, (long long)frame.presentationTimeUs);
-
-                ++(state->mNumBuffersDecoded);
-
-                decodedFrameIndices.push_back(frame);
-
-                if (frame.flags & MediaCodec::BUFFER_FLAG_EOS) {
-                    ALOGV("reached EOS on decoder output.");
-                    state->mSawOutputEOS = true;
-                }
-
-            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
-                ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
-                CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers(
-                        &state->mOutBuffers));
-
-                ALOGV("got %zu output buffers", state->mOutBuffers.size());
-            } else if (err == INFO_FORMAT_CHANGED) {
-                sp<AMessage> format;
-                CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
-
-                ALOGV("INFO_FORMAT_CHANGED: %s",
-                        format->debugString().c_str());
-            } else {
-                CHECK_EQ(err, -EAGAIN);
-            }
-
-            tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
-
-            tryDrainOutputBuffer(
-                    filterState, surface, renderSurface,
-                    useTimestamp, &startTimeRender);
-        }
-    }
-
-    // after EOS on decoder, let filter reach EOS
-    while (!filterState->mSawOutputEOS) {
-        tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
-
-        tryDrainOutputBuffer(
-                filterState, surface, renderSurface,
-                useTimestamp, &startTimeRender);
-    }
-
-    int64_t elapsedTimeUs = android::ALooper::GetNowUs() - startTimeUs;
-
-    for (size_t i = 0; i < stateByTrack.size(); ++i) {
-        CodecState *state = &stateByTrack.editValueAt(i);
-
-        CHECK_EQ((status_t)OK, state->mCodec->release());
-
-        printf("track %zu: %" PRId64 " frames decoded and filtered, "
-                "%.2f fps.\n", i, state->mNumBuffersDecoded,
-                state->mNumBuffersDecoded * 1E6 / elapsedTimeUs);
-    }
-
-    return 0;
-}
-
-}  // namespace android
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    const char *me = argv[0];
-
-    bool useSurface = false;
-    bool renderSurface = false;
-    bool useTimestamp = false;
-    FilterType filterType = FILTERTYPE_ZERO;
-
-    int res;
-    while ((res = getopt(argc, argv, "bcnrszTRSh")) >= 0) {
-        switch (res) {
-            case 'b':
-            {
-                filterType = FILTERTYPE_INTRINSIC_BLUR;
-                break;
-            }
-            case 'c':
-            {
-                filterType = FILTERTYPE_RS_ARGB_TO_RGBA;
-                break;
-            }
-            case 'n':
-            {
-                filterType = FILTERTYPE_RS_NIGHT_VISION;
-                break;
-            }
-            case 'r':
-            {
-                filterType = FILTERTYPE_RS_SATURATION;
-                break;
-            }
-            case 's':
-            {
-                filterType = FILTERTYPE_SATURATION;
-                break;
-            }
-            case 'z':
-            {
-                filterType = FILTERTYPE_ZERO;
-                break;
-            }
-            case 'T':
-            {
-                useTimestamp = true;
-                FALLTHROUGH_INTENDED;
-            }
-            case 'R':
-            {
-                renderSurface = true;
-                FALLTHROUGH_INTENDED;
-            }
-            case 'S':
-            {
-                useSurface = true;
-                break;
-            }
-            case '?':
-            case 'h':
-            default:
-            {
-                usage(me);
-                break;
-            }
-        }
-    }
-
-    argc -= optind;
-    argv += optind;
-
-    if (argc != 1) {
-        usage(me);
-    }
-
-    ProcessState::self()->startThreadPool();
-
-    android::sp<android::ALooper> looper = new android::ALooper;
-    looper->start();
-
-    android::sp<SurfaceComposerClient> composerClient;
-    android::sp<SurfaceControl> control;
-    android::sp<Surface> surface;
-
-    if (useSurface) {
-        composerClient = new SurfaceComposerClient;
-        CHECK_EQ((status_t)OK, composerClient->initCheck());
-
-        const android::sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
-        CHECK(display != nullptr);
-
-        ui::DisplayMode mode;
-        CHECK_EQ(SurfaceComposerClient::getActiveDisplayMode(display, &mode), NO_ERROR);
-
-        const ui::Size& resolution = mode.resolution;
-        const ssize_t displayWidth = resolution.getWidth();
-        const ssize_t displayHeight = resolution.getHeight();
-
-        ALOGV("display is %zd x %zd", displayWidth, displayHeight);
-
-        control = composerClient->createSurface(
-                String8("A Surface"), displayWidth, displayHeight,
-                PIXEL_FORMAT_RGBA_8888, 0);
-
-        CHECK(control != NULL);
-        CHECK(control->isValid());
-
-        SurfaceComposerClient::Transaction{}
-                .setLayer(control, INT_MAX)
-                .show(control)
-                .apply();
-
-        surface = control->getSurface();
-        CHECK(surface != NULL);
-    }
-
-    decode(looper, argv[0], surface, renderSurface, useTimestamp, filterType);
-
-    if (useSurface) {
-        composerClient->dispose();
-    }
-
-    looper->stop();
-
-    return 0;
-}
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index bc7e41e..185491f 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -78,10 +78,14 @@
     int fd = open(outputFileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
 
     if (fd < 0) {
-        ALOGE("couldn't open file");
-        return fd;
+        ALOGE("couldn't open output file %s", outputFileName);
+        return 1;
     }
-    sp<MediaMuxer> muxer = new MediaMuxer(fd, container);
+    sp<MediaMuxer> muxer = MediaMuxer::create(fd, container);
+    if (muxer == nullptr) {
+        fprintf(stderr, "unable to instantiate muxer for format %d\n", container);
+        return 1;
+    }
     close(fd);
 
     size_t trackCount = extractor->countTracks();
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 5743ad6..87e8832 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -89,6 +89,9 @@
         if (err != OK) {
             return err;
         }
+        if (buffer == nullptr) {
+            return AMEDIA_ERROR_UNKNOWN;
+        }
 
         char x = (char)((double)rand() / RAND_MAX * 255);
         memset((*buffer)->data(), x, mSize);
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 40b2392..1ffe801 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -318,7 +318,13 @@
     sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
     CHECK_EQ(composerClient->initCheck(), (status_t)OK);
 
-    const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+    const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
+    if (ids.empty()) {
+        SLOGE("Failed to get ID for any displays\n");
+        return 1;
+    }
+
+    const sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(ids.front());
     CHECK(display != nullptr);
 
     ui::DisplayMode mode;
diff --git a/drm/README.md b/drm/README.md
new file mode 100644
index 0000000..2681aac
--- /dev/null
+++ b/drm/README.md
@@ -0,0 +1,13 @@
+## AIDL error handling
+
+Starting in **Android U (14)**, `libmediadrm` (app-side) understands extra error
+details from **AIDL** DRM HALs passed through the binder exception message
+as a json string. The supported fields are:
+* `cdmError` (*int*)
+* `oemError` (*int*)
+* `context` (*int*)
+* `errorMessage` (*str*)
+
+The errors details will be reported to apps through the java interface
+`android.media.MediaDrmThrowable`. Please see the javadoc of `MediaDrmThrowable`
+for detailed definitions of each field above.
diff --git a/drm/TEST_MAPPING b/drm/TEST_MAPPING
index 3642898..a9b4b2a 100644
--- a/drm/TEST_MAPPING
+++ b/drm/TEST_MAPPING
@@ -1,17 +1,17 @@
 {
-  "presubmit-large": [
+  "presubmit": [
     // The following tests validate codec and drm path.
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+          "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index df3a6a2..d064123 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -31,7 +31,33 @@
     ],
 }
 
-cc_binary {
+prebuilt_etc {
+    name: "drmserver.zygote64_32.rc",
+    src: "drmserver.zygote64_32.rc",
+    sub_dir: "init/hw",
+}
+
+prebuilt_etc {
+    name: "drmserver.zygote64.rc",
+    src: "drmserver.zygote64.rc",
+    sub_dir: "init/hw",
+}
+
+soong_config_module_type {
+    name: "drmserver_cc_binary",
+    module_type: "cc_binary",
+    config_namespace: "ANDROID",
+    bool_variables: ["TARGET_DYNAMIC_64_32_DRMSERVER"],
+    properties: [
+        "compile_multilib",
+        "init_rc",
+        "multilib.lib32.suffix",
+        "multilib.lib64.suffix",
+        "required",
+    ],
+}
+
+drmserver_cc_binary {
     name: "drmserver",
 
     srcs: [
@@ -61,5 +87,61 @@
 
     compile_multilib: "prefer32",
 
-    init_rc: ["drmserver.rc"],
+    soong_config_variables: {
+        TARGET_DYNAMIC_64_32_DRMSERVER: {
+            compile_multilib: "both",
+            multilib: {
+                lib32: {
+                    suffix: "32",
+                },
+                lib64: {
+                    suffix: "64",
+                },
+            },
+            required: [
+                "drmserver.zygote64_32.rc",
+                "drmserver.zygote64.rc",
+            ],
+            init_rc: ["drmserver_dynamic.rc"],
+            conditions_default: {
+                init_rc: ["drmserver.rc"],
+            },
+        },
+    },
 }
+
+cc_fuzz {
+    name: "drmserver_fuzzer",
+
+    defaults: [
+        "service_fuzzer_defaults",
+    ],
+
+    srcs: [
+        "fuzzer/DrmFuzzer.cpp",
+        "DrmManagerService.cpp",
+        "DrmManager.cpp",
+    ],
+
+    static_libs: [
+        "libmediautils",
+        "liblog",
+        "libdl",
+        "libdrmframeworkcommon",
+        "libselinux",
+        "libstagefright_foundation",
+    ],
+
+     shared_libs: [
+         "libmediametrics",
+     ],
+
+     fuzz_config: {
+         libfuzzer_options: [
+             "max_len=50000",
+         ],
+         cc: [
+             "android-drm-team@google.com",
+         ],
+     },
+}
\ No newline at end of file
diff --git a/drm/drmserver/DrmManagerService.h b/drm/drmserver/DrmManagerService.h
index f9b8bef..56201d9 100644
--- a/drm/drmserver/DrmManagerService.h
+++ b/drm/drmserver/DrmManagerService.h
@@ -141,6 +141,8 @@
 
     virtual status_t dump(int fd, const Vector<String16>& args);
 
+    friend class DrmManagerServiceFuzzer;
+
 private:
     sp<DrmManager> mDrmManager;
 };
diff --git a/drm/drmserver/drmserver.zygote64.rc b/drm/drmserver/drmserver.zygote64.rc
new file mode 100644
index 0000000..60cd906
--- /dev/null
+++ b/drm/drmserver/drmserver.zygote64.rc
@@ -0,0 +1,6 @@
+service drm /system/bin/drmserver64
+    disabled
+    class main
+    user drm
+    group drm system inet drmrpc readproc
+    task_profiles ProcessCapacityHigh
diff --git a/drm/drmserver/drmserver.zygote64_32.rc b/drm/drmserver/drmserver.zygote64_32.rc
new file mode 100644
index 0000000..c881acf
--- /dev/null
+++ b/drm/drmserver/drmserver.zygote64_32.rc
@@ -0,0 +1,6 @@
+service drm /system/bin/drmserver32
+    disabled
+    class main
+    user drm
+    group drm system inet drmrpc readproc
+    task_profiles ProcessCapacityHigh
diff --git a/drm/drmserver/drmserver_dynamic.rc b/drm/drmserver/drmserver_dynamic.rc
new file mode 100644
index 0000000..bfaada1
--- /dev/null
+++ b/drm/drmserver/drmserver_dynamic.rc
@@ -0,0 +1,7 @@
+import /system/etc/init/hw/drmserver.${ro.zygote}.rc
+
+on property:drm.service.enabled=true
+    start drm
+
+on property:drm.service.enabled=1
+    start drm
diff --git a/drm/drmserver/fuzzer/DrmFuzzer.cpp b/drm/drmserver/fuzzer/DrmFuzzer.cpp
new file mode 100644
index 0000000..4b23679
--- /dev/null
+++ b/drm/drmserver/fuzzer/DrmFuzzer.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "DrmManagerService.h"
+
+namespace android {
+class DrmManagerServiceFuzzer {
+public:
+    DrmManagerServiceFuzzer() {}
+
+    void fuzz(const uint8_t* data, size_t size) {
+          auto drmService = new DrmManagerService();
+          fuzzService(drmService, FuzzedDataProvider(data, size));
+    }
+};
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    android::DrmManagerServiceFuzzer serviceFuzzer;
+    serviceFuzzer.fuzz(data, size);
+    return 0;
+}
\ No newline at end of file
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index be2b546..6e55a16 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -48,7 +48,6 @@
     srcs: ["src/FwdLockEngine.cpp"],
 
     shared_libs: [
-        "libandroidicu",
         "libutils",
         "liblog",
         "libdl",
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 408d216..7d68c5b 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -35,6 +35,8 @@
         "CryptoHalAidl.cpp",
         "DrmUtils.cpp",
         "DrmHalListener.cpp",
+        "DrmStatus.cpp",
+        "DrmMetricsLogger.cpp",
     ],
 
     local_include_dirs: [
@@ -74,6 +76,7 @@
     static_libs: [
         "resourcemanager_aidl_interface-ndk",
         "libaidlcommonsupport",
+        "libjsoncpp",
     ],
 
     export_shared_lib_headers: [
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index f95d527..fc1780d 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -71,7 +71,7 @@
     mCryptoHalHidl->notifyResolution(width, height);
 }
 
-status_t CryptoHal::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
+DrmStatus CryptoHal::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
     // This requires plugin to be created.
     if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->setMediaDrmSession(sessionId);
     return mCryptoHalHidl->setMediaDrmSession(sessionId);
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index 8b9d1de..c1fd797 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -41,7 +41,7 @@
 using ::aidl::android::hardware::drm::DecryptArgs;
 
 using ::android::sp;
-using ::android::DrmUtils::statusAidlToStatusT;
+using ::android::DrmUtils::statusAidlToDrmStatus;
 using ::android::hardware::hidl_array;
 using ::android::hardware::hidl_handle;
 using ::android::hardware::hidl_memory;
@@ -260,7 +260,7 @@
     }
 }
 
-status_t CryptoHalAidl::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
+DrmStatus CryptoHalAidl::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
     Mutex::Autolock autoLock(mLock);
 
     if (mInitCheck != OK) {
@@ -268,7 +268,7 @@
     }
 
     auto err = mPlugin->setMediaDrmSession(toStdVec(sessionId));
-    return statusAidlToStatusT(err);
+    return statusAidlToDrmStatus(err);
 }
 
 ssize_t CryptoHalAidl::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
@@ -352,7 +352,7 @@
     int32_t result = 0;
     ::ndk::ScopedAStatus statusAidl = mPlugin->decrypt(args, &result);
 
-    err = statusAidlToStatusT(statusAidl);
+    err = statusAidlToDrmStatus(statusAidl);
     std::string msgStr(statusAidl.getMessage());
     if (errorDetailMsg != nullptr) {
         *errorDetailMsg = toString8(msgStr);
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
index 55364b5..458a1ae 100644
--- a/drm/libmediadrm/CryptoHalHidl.cpp
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -386,7 +386,7 @@
     ALOGE_IF(!hResult.isOk(), "notifyResolution txn failed %s", hResult.description().c_str());
 }
 
-status_t CryptoHalHidl::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
+DrmStatus CryptoHalHidl::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
     Mutex::Autolock autoLock(mLock);
 
     if (mInitCheck != OK) {
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index c394d5a..754f066 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -20,6 +20,7 @@
 #include <mediadrm/DrmHal.h>
 #include <mediadrm/DrmHalAidl.h>
 #include <mediadrm/DrmHalHidl.h>
+#include <mediadrm/DrmStatus.h>
 #include <mediadrm/DrmUtils.h>
 
 namespace android {
@@ -31,49 +32,49 @@
 
 DrmHal::~DrmHal() {}
 
-status_t DrmHal::initCheck() const {
-    if (mDrmHalAidl->initCheck() == OK || mDrmHalHidl->initCheck() == OK) return OK;
-    if (mDrmHalAidl->initCheck() == NO_INIT || mDrmHalHidl->initCheck() == NO_INIT) return NO_INIT;
+DrmStatus DrmHal::initCheck() const {
+    if (mDrmHalAidl->initCheck() == OK || mDrmHalHidl->initCheck() == OK) return DrmStatus(OK);
+    if (mDrmHalAidl->initCheck() == NO_INIT || mDrmHalHidl->initCheck() == NO_INIT)
+        return DrmStatus(NO_INIT);
     return mDrmHalHidl->initCheck();
 }
 
-status_t DrmHal::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
-                                         DrmPlugin::SecurityLevel securityLevel, bool* result) {
-    status_t statusResult;
-    statusResult = mDrmHalAidl->isCryptoSchemeSupported(uuid, mimeType, securityLevel, result);
+DrmStatus DrmHal::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                          DrmPlugin::SecurityLevel securityLevel, bool* result) {
+    DrmStatus statusResult =
+            mDrmHalAidl->isCryptoSchemeSupported(uuid, mimeType, securityLevel, result);
     if (*result) return statusResult;
     return mDrmHalHidl->isCryptoSchemeSupported(uuid, mimeType, securityLevel, result);
 }
 
-status_t DrmHal::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
-    status_t statusResult;
-    statusResult = mDrmHalAidl->createPlugin(uuid, appPackageName);
-    if (statusResult != OK) return mDrmHalHidl->createPlugin(uuid, appPackageName);
-    return statusResult;
+DrmStatus DrmHal::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
+    return mDrmHalAidl->createPlugin(uuid, appPackageName) == OK
+                   ? DrmStatus(OK)
+                   : mDrmHalHidl->createPlugin(uuid, appPackageName);
 }
 
-status_t DrmHal::destroyPlugin() {
-    status_t statusResult = mDrmHalAidl->destroyPlugin();
-    status_t statusResultHidl = mDrmHalHidl->destroyPlugin();
+DrmStatus DrmHal::destroyPlugin() {
+    DrmStatus statusResult = mDrmHalAidl->destroyPlugin();
+    DrmStatus statusResultHidl = mDrmHalHidl->destroyPlugin();
     if (statusResult != OK) return statusResult;
     return statusResultHidl;
 }
 
-status_t DrmHal::openSession(DrmPlugin::SecurityLevel securityLevel, Vector<uint8_t>& sessionId) {
+DrmStatus DrmHal::openSession(DrmPlugin::SecurityLevel securityLevel, Vector<uint8_t>& sessionId) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->openSession(securityLevel, sessionId);
     return mDrmHalHidl->openSession(securityLevel, sessionId);
 }
 
-status_t DrmHal::closeSession(Vector<uint8_t> const& sessionId) {
+DrmStatus DrmHal::closeSession(Vector<uint8_t> const& sessionId) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->closeSession(sessionId);
     return mDrmHalHidl->closeSession(sessionId);
 }
 
-status_t DrmHal::getKeyRequest(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& initData,
-                               String8 const& mimeType, DrmPlugin::KeyType keyType,
-                               KeyedVector<String8, String8> const& optionalParameters,
-                               Vector<uint8_t>& request, String8& defaultUrl,
-                               DrmPlugin::KeyRequestType* keyRequestType) {
+DrmStatus DrmHal::getKeyRequest(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& initData,
+                                String8 const& mimeType, DrmPlugin::KeyType keyType,
+                                KeyedVector<String8, String8> const& optionalParameters,
+                                Vector<uint8_t>& request, String8& defaultUrl,
+                                DrmPlugin::KeyRequestType* keyRequestType) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->getKeyRequest(sessionId, initData, mimeType, keyType,
                                           optionalParameters, request, defaultUrl, keyRequestType);
@@ -81,212 +82,212 @@
                                       request, defaultUrl, keyRequestType);
 }
 
-status_t DrmHal::provideKeyResponse(Vector<uint8_t> const& sessionId,
-                                    Vector<uint8_t> const& response, Vector<uint8_t>& keySetId) {
+DrmStatus DrmHal::provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                     Vector<uint8_t> const& response, Vector<uint8_t>& keySetId) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->provideKeyResponse(sessionId, response, keySetId);
     return mDrmHalHidl->provideKeyResponse(sessionId, response, keySetId);
 }
 
-status_t DrmHal::removeKeys(Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHal::removeKeys(Vector<uint8_t> const& keySetId) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeKeys(keySetId);
     return mDrmHalHidl->removeKeys(keySetId);
 }
 
-status_t DrmHal::restoreKeys(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHal::restoreKeys(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keySetId) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->restoreKeys(sessionId, keySetId);
     return mDrmHalHidl->restoreKeys(sessionId, keySetId);
 }
 
-status_t DrmHal::queryKeyStatus(Vector<uint8_t> const& sessionId,
-                                KeyedVector<String8, String8>& infoMap) const {
+DrmStatus DrmHal::queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                 KeyedVector<String8, String8>& infoMap) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->queryKeyStatus(sessionId, infoMap);
     return mDrmHalHidl->queryKeyStatus(sessionId, infoMap);
 }
 
-status_t DrmHal::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
-                                     Vector<uint8_t>& request, String8& defaultUrl) {
+DrmStatus DrmHal::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                      Vector<uint8_t>& request, String8& defaultUrl) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->getProvisionRequest(certType, certAuthority, request, defaultUrl);
     return mDrmHalHidl->getProvisionRequest(certType, certAuthority, request, defaultUrl);
 }
 
-status_t DrmHal::provideProvisionResponse(Vector<uint8_t> const& response,
-                                          Vector<uint8_t>& certificate,
-                                          Vector<uint8_t>& wrappedKey) {
+DrmStatus DrmHal::provideProvisionResponse(Vector<uint8_t> const& response,
+                                           Vector<uint8_t>& certificate,
+                                           Vector<uint8_t>& wrappedKey) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->provideProvisionResponse(response, certificate, wrappedKey);
     return mDrmHalHidl->provideProvisionResponse(response, certificate, wrappedKey);
 }
 
-status_t DrmHal::getSecureStops(List<Vector<uint8_t>>& secureStops) {
+DrmStatus DrmHal::getSecureStops(List<Vector<uint8_t>>& secureStops) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecureStops(secureStops);
     return mDrmHalHidl->getSecureStops(secureStops);
 }
 
-status_t DrmHal::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
+DrmStatus DrmHal::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecureStopIds(secureStopIds);
     return mDrmHalHidl->getSecureStopIds(secureStopIds);
 }
 
-status_t DrmHal::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
+DrmStatus DrmHal::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecureStop(ssid, secureStop);
     return mDrmHalHidl->getSecureStop(ssid, secureStop);
 }
 
-status_t DrmHal::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
+DrmStatus DrmHal::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->releaseSecureStops(ssRelease);
     return mDrmHalHidl->releaseSecureStops(ssRelease);
 }
 
-status_t DrmHal::removeSecureStop(Vector<uint8_t> const& ssid) {
+DrmStatus DrmHal::removeSecureStop(Vector<uint8_t> const& ssid) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeSecureStop(ssid);
     return mDrmHalHidl->removeSecureStop(ssid);
 }
 
-status_t DrmHal::removeAllSecureStops() {
+DrmStatus DrmHal::removeAllSecureStops() {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeAllSecureStops();
     return mDrmHalHidl->removeAllSecureStops();
 }
 
-status_t DrmHal::getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
-                               DrmPlugin::HdcpLevel* maxLevel) const {
+DrmStatus DrmHal::getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
+                                DrmPlugin::HdcpLevel* maxLevel) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getHdcpLevels(connectedLevel, maxLevel);
     return mDrmHalHidl->getHdcpLevels(connectedLevel, maxLevel);
 }
 
-status_t DrmHal::getNumberOfSessions(uint32_t* currentSessions, uint32_t* maxSessions) const {
+DrmStatus DrmHal::getNumberOfSessions(uint32_t* currentSessions, uint32_t* maxSessions) const {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->getNumberOfSessions(currentSessions, maxSessions);
     return mDrmHalHidl->getNumberOfSessions(currentSessions, maxSessions);
 }
 
-status_t DrmHal::getSecurityLevel(Vector<uint8_t> const& sessionId,
-                                  DrmPlugin::SecurityLevel* level) const {
+DrmStatus DrmHal::getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                   DrmPlugin::SecurityLevel* level) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecurityLevel(sessionId, level);
     return mDrmHalHidl->getSecurityLevel(sessionId, level);
 }
 
-status_t DrmHal::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
+DrmStatus DrmHal::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getOfflineLicenseKeySetIds(keySetIds);
     return mDrmHalHidl->getOfflineLicenseKeySetIds(keySetIds);
 }
 
-status_t DrmHal::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHal::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeOfflineLicense(keySetId);
     return mDrmHalHidl->removeOfflineLicense(keySetId);
 }
 
-status_t DrmHal::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
-                                        DrmPlugin::OfflineLicenseState* licenseState) const {
+DrmStatus DrmHal::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                         DrmPlugin::OfflineLicenseState* licenseState) const {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->getOfflineLicenseState(keySetId, licenseState);
     return mDrmHalHidl->getOfflineLicenseState(keySetId, licenseState);
 }
 
-status_t DrmHal::getPropertyString(String8 const& name, String8& value) const {
+DrmStatus DrmHal::getPropertyString(String8 const& name, String8& value) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getPropertyString(name, value);
     return mDrmHalHidl->getPropertyString(name, value);
 }
 
-status_t DrmHal::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
+DrmStatus DrmHal::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getPropertyByteArray(name, value);
     return mDrmHalHidl->getPropertyByteArray(name, value);
 }
 
-status_t DrmHal::setPropertyString(String8 const& name, String8 const& value) const {
+DrmStatus DrmHal::setPropertyString(String8 const& name, String8 const& value) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setPropertyString(name, value);
     return mDrmHalHidl->setPropertyString(name, value);
 }
 
-status_t DrmHal::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
+DrmStatus DrmHal::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setPropertyByteArray(name, value);
     return mDrmHalHidl->setPropertyByteArray(name, value);
 }
 
-status_t DrmHal::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
+DrmStatus DrmHal::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getMetrics(consumer);
     return mDrmHalHidl->getMetrics(consumer);
 }
 
-status_t DrmHal::setCipherAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+DrmStatus DrmHal::setCipherAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->setCipherAlgorithm(sessionId, algorithm);
     return mDrmHalHidl->setCipherAlgorithm(sessionId, algorithm);
 }
 
-status_t DrmHal::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+DrmStatus DrmHal::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setMacAlgorithm(sessionId, algorithm);
     return mDrmHalHidl->setMacAlgorithm(sessionId, algorithm);
 }
 
-status_t DrmHal::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                         Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                         Vector<uint8_t>& output) {
+DrmStatus DrmHal::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                          Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                          Vector<uint8_t>& output) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->encrypt(sessionId, keyId, input, iv, output);
     return mDrmHalHidl->encrypt(sessionId, keyId, input, iv, output);
 }
 
-status_t DrmHal::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                         Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                         Vector<uint8_t>& output) {
+DrmStatus DrmHal::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                          Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                          Vector<uint8_t>& output) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->decrypt(sessionId, keyId, input, iv, output);
     return mDrmHalHidl->decrypt(sessionId, keyId, input, iv, output);
 }
 
-status_t DrmHal::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                      Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
+DrmStatus DrmHal::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                       Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->sign(sessionId, keyId, message, signature);
     return mDrmHalHidl->sign(sessionId, keyId, message, signature);
 }
 
-status_t DrmHal::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                        Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
-                        bool& match) {
+DrmStatus DrmHal::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                         Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                         bool& match) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->verify(sessionId, keyId, message, signature, match);
     return mDrmHalHidl->verify(sessionId, keyId, message, signature, match);
 }
 
-status_t DrmHal::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
-                         Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
-                         Vector<uint8_t>& signature) {
+DrmStatus DrmHal::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                          Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                          Vector<uint8_t>& signature) {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->signRSA(sessionId, algorithm, message, wrappedKey, signature);
     return mDrmHalHidl->signRSA(sessionId, algorithm, message, wrappedKey, signature);
 }
 
-status_t DrmHal::setListener(const sp<IDrmClient>& listener) {
+DrmStatus DrmHal::setListener(const sp<IDrmClient>& listener) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setListener(listener);
     return mDrmHalHidl->setListener(listener);
 }
 
-status_t DrmHal::requiresSecureDecoder(const char* mime, bool* required) const {
+DrmStatus DrmHal::requiresSecureDecoder(const char* mime, bool* required) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->requiresSecureDecoder(mime, required);
     return mDrmHalHidl->requiresSecureDecoder(mime, required);
 }
 
-status_t DrmHal::requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
-                                       bool* required) const {
+DrmStatus DrmHal::requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
+                                        bool* required) const {
     if (mDrmHalAidl->initCheck() == OK)
         return mDrmHalAidl->requiresSecureDecoder(mime, securityLevel, required);
     return mDrmHalHidl->requiresSecureDecoder(mime, securityLevel, required);
 }
 
-status_t DrmHal::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
+DrmStatus DrmHal::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setPlaybackId(sessionId, playbackId);
     return mDrmHalHidl->setPlaybackId(sessionId, playbackId);
 }
 
-status_t DrmHal::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+DrmStatus DrmHal::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
     if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getLogMessages(logs);
     return mDrmHalHidl->getLogMessages(logs);
 }
 
-status_t DrmHal::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+DrmStatus DrmHal::getSupportedSchemes(std::vector<uint8_t>& schemes) const {
     status_t statusResult;
     statusResult = mDrmHalAidl->getSupportedSchemes(schemes);
     if (statusResult == OK) return statusResult;
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index bdd83e9..5ec7337 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -29,9 +29,9 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <mediadrm/DrmHalAidl.h>
 #include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/DrmStatus.h>
 #include <mediadrm/DrmUtils.h>
 
-using ::android::DrmUtils::statusAidlToStatusT;
 using ::aidl::android::hardware::drm::CryptoSchemes;
 using ::aidl::android::hardware::drm::DrmMetricNamedValue;
 using ::aidl::android::hardware::drm::DrmMetricValue;
@@ -55,6 +55,7 @@
 using ::aidl::android::hardware::drm::Status;
 using ::aidl::android::hardware::drm::SupportedContentType;
 using ::aidl::android::hardware::drm::Uuid;
+using ::android::DrmUtils::statusAidlToDrmStatus;
 using DrmMetricGroupAidl = ::aidl::android::hardware::drm::DrmMetricGroup;
 using DrmMetricGroupHidl = ::android::hardware::drm::V1_1::DrmMetricGroup;
 using DrmMetricAidl = ::aidl::android::hardware::drm::DrmMetric;
@@ -392,23 +393,24 @@
 
 // DrmHalAidl methods
 DrmHalAidl::DrmHalAidl()
-    : mListener(::ndk::SharedRefBase::make<DrmHalListener>(&mMetrics)),
+    : mMetrics(std::make_shared<MediaDrmMetrics>()),
+      mListener(::ndk::SharedRefBase::make<DrmHalListener>(mMetrics)),
       mFactories(DrmUtils::makeDrmFactoriesAidl()),
       mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {}
 
-status_t DrmHalAidl::initCheck() const {
-    return mInitCheck;
+DrmStatus DrmHalAidl::initCheck() const {
+    return DrmStatus(mInitCheck);
 }
 
 DrmHalAidl::~DrmHalAidl() {}
 
-status_t DrmHalAidl::setListener(const sp<IDrmClient>& listener) {
+DrmStatus DrmHalAidl::setListener(const sp<IDrmClient>& listener) {
     mListener->setListener(listener);
-    return NO_ERROR;
+    return DrmStatus(NO_ERROR);
 }
 
-status_t DrmHalAidl::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
-                                             DrmPlugin::SecurityLevel level, bool* isSupported) {
+DrmStatus DrmHalAidl::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                              DrmPlugin::SecurityLevel level, bool* isSupported) {
     Mutex::Autolock autoLock(mLock);
     *isSupported = false;
     Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
@@ -438,9 +440,9 @@
                 // isCryptoSchemeSupported(uuid, mimeType)
                 *isSupported = contentTypes.count(mimeTypeStr);
             }
-            return OK;
+            return DrmStatus(OK);
         } else if (mimeType == "") {
-            return BAD_VALUE;
+            return DrmStatus(BAD_VALUE);
         }
 
         auto ct = contentTypes[mimeTypeStr];
@@ -452,17 +454,17 @@
         break;
     }
 
-    return OK;
+    return DrmStatus(OK);
 }
 
-status_t DrmHalAidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
+DrmStatus DrmHalAidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
     Mutex::Autolock autoLock(mLock);
-
+    if (mInitCheck == ERROR_UNSUPPORTED) return mInitCheck;
     Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
     std::string appPackageNameAidl = toStdString(appPackageName);
     std::shared_ptr<IDrmPluginAidl> pluginAidl;
-    mMetrics.SetAppPackageName(appPackageName);
-    mMetrics.SetAppUid(AIBinder_getCallingUid());
+    mMetrics->SetAppPackageName(appPackageName);
+    mMetrics->SetAppUid(AIBinder_getCallingUid());
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
         ::ndk::ScopedAStatus status =
                 mFactories[i]->createDrmPlugin(uuidAidl, appPackageNameAidl, &pluginAidl);
@@ -497,10 +499,10 @@
         }
     }
 
-    return mInitCheck;
+    return DrmStatus(mInitCheck);
 }
 
-status_t DrmHalAidl::openSession(DrmPlugin::SecurityLevel level, Vector<uint8_t>& sessionId) {
+DrmStatus DrmHalAidl::openSession(DrmPlugin::SecurityLevel level, Vector<uint8_t>& sessionId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -510,14 +512,14 @@
         return ERROR_DRM_CANNOT_HANDLE;
     }
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
     bool retry = true;
     do {
         std::vector<uint8_t> aSessionId;
 
         ::ndk::ScopedAStatus status = mPlugin->openSession(aSecurityLevel, &aSessionId);
         if (status.isOk()) sessionId = toVector(aSessionId);
-        err = statusAidlToStatusT(status);
+        err = statusAidlToDrmStatus(status);
 
         if (err == ERROR_DRM_RESOURCE_BUSY && retry) {
             mLock.unlock();
@@ -538,20 +540,20 @@
                 AIBinder_getCallingPid(), std::static_pointer_cast<IResourceManagerClient>(client),
                 sessionId);
         mOpenSessions.push_back(client);
-        mMetrics.SetSessionStart(sessionId);
+        mMetrics->SetSessionStart(sessionId);
     }
 
-    mMetrics.mOpenSessionCounter.Increment(err);
+    mMetrics->mOpenSessionCounter.Increment(err);
     return err;
 }
 
-status_t DrmHalAidl::closeSession(Vector<uint8_t> const& sessionId) {
+DrmStatus DrmHalAidl::closeSession(Vector<uint8_t> const& sessionId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     std::vector<uint8_t> sessionIdAidl = toStdVec(sessionId);
     ::ndk::ScopedAStatus status = mPlugin->closeSession(sessionIdAidl);
-    status_t response = statusAidlToStatusT(status);
+    DrmStatus response = statusAidlToDrmStatus(status);
     if (status.isOk()) {
         DrmSessionManager::Instance()->removeSession(sessionId);
         for (auto i = mOpenSessions.begin(); i != mOpenSessions.end(); i++) {
@@ -561,22 +563,22 @@
             }
         }
 
-        mMetrics.SetSessionEnd(sessionId);
+        mMetrics->SetSessionEnd(sessionId);
     }
 
-    mMetrics.mCloseSessionCounter.Increment(response);
+    mMetrics->mCloseSessionCounter.Increment(response);
     return response;
 }
 
-status_t DrmHalAidl::getKeyRequest(Vector<uint8_t> const& sessionId,
-                                   Vector<uint8_t> const& initData, String8 const& mimeType,
-                                   DrmPlugin::KeyType keyType,
-                                   KeyedVector<String8, String8> const& optionalParameters,
-                                   Vector<uint8_t>& request, String8& defaultUrl,
-                                   DrmPlugin::KeyRequestType* keyRequestType) {
+DrmStatus DrmHalAidl::getKeyRequest(Vector<uint8_t> const& sessionId,
+                                    Vector<uint8_t> const& initData, String8 const& mimeType,
+                                    DrmPlugin::KeyType keyType,
+                                    KeyedVector<String8, String8> const& optionalParameters,
+                                    Vector<uint8_t>& request, String8& defaultUrl,
+                                    DrmPlugin::KeyRequestType* keyRequestType) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
-    EventTimer<status_t> keyRequestTimer(&mMetrics.mGetKeyRequestTimeUs);
+    EventTimer<status_t> keyRequestTimer(&mMetrics->mGetKeyRequestTimeUs);
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
@@ -592,7 +594,7 @@
         return BAD_VALUE;
     }
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     std::vector<uint8_t> sessionIdAidl = toStdVec(sessionId);
     std::vector<uint8_t> initDataAidl = toStdVec(initData);
@@ -607,21 +609,21 @@
         *keyRequestType = toKeyRequestType(keyRequest.requestType);
     }
 
-    err = statusAidlToStatusT(status);
+    err = statusAidlToDrmStatus(status);
     keyRequestTimer.SetAttribute(err);
     return err;
 }
 
-status_t DrmHalAidl::provideKeyResponse(Vector<uint8_t> const& sessionId,
-                                        Vector<uint8_t> const& response,
-                                        Vector<uint8_t>& keySetId) {
+DrmStatus DrmHalAidl::provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                         Vector<uint8_t> const& response,
+                                         Vector<uint8_t>& keySetId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
-    EventTimer<status_t> keyResponseTimer(&mMetrics.mProvideKeyResponseTimeUs);
+    EventTimer<status_t> keyResponseTimer(&mMetrics->mProvideKeyResponseTimeUs);
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     std::vector<uint8_t> sessionIdAidl = toStdVec(sessionId);
     std::vector<uint8_t> responseAidl = toStdVec(response);
@@ -630,21 +632,21 @@
             mPlugin->provideKeyResponse(sessionIdAidl, responseAidl, &keySetIdsAidl);
 
     if (status.isOk()) keySetId = toVector(keySetIdsAidl.keySetId);
-    err = statusAidlToStatusT(status);
+    err = statusAidlToDrmStatus(status);
     keyResponseTimer.SetAttribute(err);
     return err;
 }
 
-status_t DrmHalAidl::removeKeys(Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHalAidl::removeKeys(Vector<uint8_t> const& keySetId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     ::ndk::ScopedAStatus status = mPlugin->removeKeys(toStdVec(keySetId));
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::restoreKeys(Vector<uint8_t> const& sessionId,
-                                 Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHalAidl::restoreKeys(Vector<uint8_t> const& sessionId,
+                                  Vector<uint8_t> const& keySetId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -653,11 +655,11 @@
     KeySetId keySetIdsAidl;
     keySetIdsAidl.keySetId = toStdVec(keySetId);
     ::ndk::ScopedAStatus status = mPlugin->restoreKeys(toStdVec(sessionId), keySetIdsAidl);
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::queryKeyStatus(Vector<uint8_t> const& sessionId,
-                                    KeyedVector<String8, String8>& infoMap) const {
+DrmStatus DrmHalAidl::queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                     KeyedVector<String8, String8>& infoMap) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -668,15 +670,15 @@
 
     infoMap = toKeyedVector(infoMapAidl);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
-                                         Vector<uint8_t>& request, String8& defaultUrl) {
+DrmStatus DrmHalAidl::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                          Vector<uint8_t>& request, String8& defaultUrl) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     ProvisionRequest requestAidl;
     ::ndk::ScopedAStatus status = mPlugin->getProvisionRequest(
@@ -685,29 +687,29 @@
     request = toVector(requestAidl.request);
     defaultUrl = toString8(requestAidl.defaultUrl);
 
-    err = statusAidlToStatusT(status);
-    mMetrics.mGetProvisionRequestCounter.Increment(err);
+    err = statusAidlToDrmStatus(status);
+    mMetrics->mGetProvisionRequestCounter.Increment(err);
     return err;
 }
 
-status_t DrmHalAidl::provideProvisionResponse(Vector<uint8_t> const& response,
-                                              Vector<uint8_t>& certificate,
-                                              Vector<uint8_t>& wrappedKey) {
+DrmStatus DrmHalAidl::provideProvisionResponse(Vector<uint8_t> const& response,
+                                               Vector<uint8_t>& certificate,
+                                               Vector<uint8_t>& wrappedKey) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
     ProvideProvisionResponseResult result;
     ::ndk::ScopedAStatus status = mPlugin->provideProvisionResponse(toStdVec(response), &result);
 
     certificate = toVector(result.certificate);
     wrappedKey = toVector(result.wrappedKey);
-    err = statusAidlToStatusT(status);
-    mMetrics.mProvideProvisionResponseCounter.Increment(err);
+    err = statusAidlToDrmStatus(status);
+    mMetrics->mProvideProvisionResponseCounter.Increment(err);
     return err;
 }
 
-status_t DrmHalAidl::getSecureStops(List<Vector<uint8_t>>& secureStops) {
+DrmStatus DrmHalAidl::getSecureStops(List<Vector<uint8_t>>& secureStops) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -716,10 +718,10 @@
 
     secureStops = toSecureStops(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
+DrmStatus DrmHalAidl::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -728,10 +730,10 @@
 
     secureStopIds = toSecureStopIds(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
+DrmStatus DrmHalAidl::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -743,10 +745,10 @@
 
     secureStop = toVector(result.opaqueData);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
+DrmStatus DrmHalAidl::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -754,10 +756,10 @@
     ssId.opaqueData = toStdVec(ssRelease);
     ::ndk::ScopedAStatus status = mPlugin->releaseSecureStops(ssId);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::removeSecureStop(Vector<uint8_t> const& ssid) {
+DrmStatus DrmHalAidl::removeSecureStop(Vector<uint8_t> const& ssid) {
     Mutex::Autolock autoLock(mLock);
 
     INIT_CHECK();
@@ -765,19 +767,19 @@
     SecureStopId ssidAidl;
     ssidAidl.secureStopId = toStdVec(ssid);
     ::ndk::ScopedAStatus status = mPlugin->removeSecureStop(ssidAidl);
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::removeAllSecureStops() {
+DrmStatus DrmHalAidl::removeAllSecureStops() {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     ::ndk::ScopedAStatus status = mPlugin->releaseAllSecureStops();
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getHdcpLevels(DrmPlugin::HdcpLevel* connected,
-                                   DrmPlugin::HdcpLevel* max) const {
+DrmStatus DrmHalAidl::getHdcpLevels(DrmPlugin::HdcpLevel* connected,
+                                    DrmPlugin::HdcpLevel* max) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -794,10 +796,10 @@
     *connected = toHdcpLevel(lvlsAidl.connectedLevel);
     *max = toHdcpLevel(lvlsAidl.maxLevel);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getNumberOfSessions(uint32_t* open, uint32_t* max) const {
+DrmStatus DrmHalAidl::getNumberOfSessions(uint32_t* open, uint32_t* max) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -814,11 +816,11 @@
     *open = result.currentSessions;
     *max = result.maxSessions;
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getSecurityLevel(Vector<uint8_t> const& sessionId,
-                                      DrmPlugin::SecurityLevel* level) const {
+DrmStatus DrmHalAidl::getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                       DrmPlugin::SecurityLevel* level) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -833,10 +835,10 @@
 
     *level = toSecurityLevel(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
+DrmStatus DrmHalAidl::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -845,21 +847,21 @@
 
     keySetIds = toKeySetIds(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHalAidl::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     KeySetId keySetIdAidl;
     keySetIdAidl.keySetId = toStdVec(keySetId);
     ::ndk::ScopedAStatus status = mPlugin->removeOfflineLicense(keySetIdAidl);
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
-                                            DrmPlugin::OfflineLicenseState* licenseState) const {
+DrmStatus DrmHalAidl::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                             DrmPlugin::OfflineLicenseState* licenseState) const {
     Mutex::Autolock autoLock(mLock);
 
     INIT_CHECK();
@@ -873,15 +875,15 @@
 
     *licenseState = toOfflineLicenseState(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getPropertyString(String8 const& name, String8& value) const {
+DrmStatus DrmHalAidl::getPropertyString(String8 const& name, String8& value) const {
     Mutex::Autolock autoLock(mLock);
-    return getPropertyStringInternal(name, value);
+    return DrmStatus(getPropertyStringInternal(name, value));
 }
 
-status_t DrmHalAidl::getPropertyStringInternal(String8 const& name, String8& value) const {
+DrmStatus DrmHalAidl::getPropertyStringInternal(String8 const& name, String8& value) const {
     // This function is internal to the class and should only be called while
     // mLock is already held.
     INIT_CHECK();
@@ -891,54 +893,55 @@
 
     value = toString8(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
+DrmStatus DrmHalAidl::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
     Mutex::Autolock autoLock(mLock);
-    return getPropertyByteArrayInternal(name, value);
+    return DrmStatus(getPropertyByteArrayInternal(name, value));
 }
 
-status_t DrmHalAidl::getPropertyByteArrayInternal(String8 const& name,
-                                                  Vector<uint8_t>& value) const {
+DrmStatus DrmHalAidl::getPropertyByteArrayInternal(String8 const& name,
+                                                   Vector<uint8_t>& value) const {
     // This function is internal to the class and should only be called while
     // mLock is already held.
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     std::vector<uint8_t> result;
     ::ndk::ScopedAStatus status = mPlugin->getPropertyByteArray(toStdString(name), &result);
 
     value = toVector(result);
-    err = statusAidlToStatusT(status);
+    err = statusAidlToDrmStatus(status);
     if (name == kPropertyDeviceUniqueId) {
-        mMetrics.mGetDeviceUniqueIdCounter.Increment(err);
+        mMetrics->mGetDeviceUniqueIdCounter.Increment(err);
     }
     return err;
 }
 
-status_t DrmHalAidl::setPropertyString(String8 const& name, String8 const& value) const {
+DrmStatus DrmHalAidl::setPropertyString(String8 const& name, String8 const& value) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     ::ndk::ScopedAStatus status = mPlugin->setPropertyString(toStdString(name), toStdString(value));
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
+DrmStatus DrmHalAidl::setPropertyByteArray(String8 const& name,
+                                           Vector<uint8_t> const& value) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     ::ndk::ScopedAStatus status = mPlugin->setPropertyByteArray(toStdString(name), toStdVec(value));
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
+DrmStatus DrmHalAidl::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
     if (consumer == nullptr) {
-        return UNEXPECTED_NULL;
+        return DrmStatus(UNEXPECTED_NULL);
     }
-    consumer->consumeFrameworkMetrics(mMetrics);
+    consumer->consumeFrameworkMetrics(*mMetrics.get());
 
     // Append vendor metrics if they are supported.
 
@@ -957,7 +960,7 @@
     vendor += description;
 
     hidl_vec<DrmMetricGroupHidl> pluginMetrics;
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     std::vector<DrmMetricGroupAidl> result;
     ::ndk::ScopedAStatus status = mPlugin->getMetrics(&result);
@@ -967,13 +970,13 @@
         consumer->consumeHidlMetrics(vendor, pluginMetrics);
     }
 
-    err = statusAidlToStatusT(status);
+    err = statusAidlToDrmStatus(status);
 
     return err;
 }
 
-status_t DrmHalAidl::setCipherAlgorithm(Vector<uint8_t> const& sessionId,
-                                        String8 const& algorithm) {
+DrmStatus DrmHalAidl::setCipherAlgorithm(Vector<uint8_t> const& sessionId,
+                                         String8 const& algorithm) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -981,10 +984,10 @@
 
     ::ndk::ScopedAStatus status =
             mPlugin->setCipherAlgorithm(toStdVec(sessionId), toStdString(algorithm));
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+DrmStatus DrmHalAidl::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -992,12 +995,12 @@
 
     ::ndk::ScopedAStatus status =
             mPlugin->setMacAlgorithm(toStdVec(sessionId), toStdString(algorithm));
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                             Vector<uint8_t>& output) {
+DrmStatus DrmHalAidl::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1009,12 +1012,12 @@
 
     output = toVector(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                             Vector<uint8_t>& output) {
+DrmStatus DrmHalAidl::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1026,11 +1029,11 @@
 
     output = toVector(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                          Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
+DrmStatus DrmHalAidl::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                           Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1042,12 +1045,12 @@
 
     signature = toVector(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                            Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
-                            bool& match) {
+DrmStatus DrmHalAidl::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                             bool& match) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1056,12 +1059,12 @@
     ::ndk::ScopedAStatus status = mPlugin->verify(toStdVec(sessionId), toStdVec(keyId),
                                                   toStdVec(message), toStdVec(signature), &match);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
-                             Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
-                             Vector<uint8_t>& signature) {
+DrmStatus DrmHalAidl::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                              Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                              Vector<uint8_t>& signature) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1074,10 +1077,10 @@
 
     signature = toVector(result);
 
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::requiresSecureDecoder(const char* mime, bool* required) const {
+DrmStatus DrmHalAidl::requiresSecureDecoder(const char* mime, bool* required) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1086,14 +1089,15 @@
         mPlugin->requiresSecureDecoder(mimeAidl, SecurityLevel::DEFAULT, required);
     if (!status.isOk()) {
         DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %d", status.getServiceSpecificError());
-        return DEAD_OBJECT;
+        return DrmStatus(DEAD_OBJECT);
     }
 
-    return OK;
+    return DrmStatus(OK);
 }
 
-status_t DrmHalAidl::requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
-                                           bool* required) const {
+DrmStatus DrmHalAidl::requiresSecureDecoder(const char* mime,
+                                            DrmPlugin::SecurityLevel securityLevel,
+                                            bool* required) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1101,7 +1105,7 @@
     std::string mimeAidl(mime);
     ::ndk::ScopedAStatus status = mPlugin->requiresSecureDecoder(mimeAidl, aLevel, required);
 
-    status_t err = statusAidlToStatusT(status);
+    DrmStatus err = statusAidlToDrmStatus(status);
     if (!status.isOk()) {
         DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %d", status.getServiceSpecificError());
     }
@@ -1109,17 +1113,17 @@
     return err;
 }
 
-status_t DrmHalAidl::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
+DrmStatus DrmHalAidl::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
     std::string playbackIdAidl(playbackId);
     ::ndk::ScopedAStatus status = mPlugin->setPlaybackId(toStdVec(sessionId), playbackIdAidl);
-    return statusAidlToStatusT(status);
+    return statusAidlToDrmStatus(status);
 }
 
-status_t DrmHalAidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+DrmStatus DrmHalAidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
     Mutex::Autolock autoLock(mLock);
-    return DrmUtils::GetLogMessagesAidl<IDrmPluginAidl>(mPlugin, logs);
+    return DrmStatus(DrmUtils::GetLogMessagesAidl<IDrmPluginAidl>(mPlugin, logs));
 }
 
 void DrmHalAidl::closeOpenSessions() {
@@ -1143,7 +1147,7 @@
         getPropertyByteArrayInternal(String8("metrics"), metricsVector) == OK) {
         metricsString = toBase64StringNoPad(metricsVector.array(), metricsVector.size());
         status_t res = android::reportDrmPluginMetrics(metricsString, vendor, description,
-                                                       mMetrics.GetAppUid());
+                                                       mMetrics->GetAppUid());
         if (res != OK) {
             ALOGE("Metrics were retrieved but could not be reported: %d", res);
         }
@@ -1153,7 +1157,7 @@
 
 std::string DrmHalAidl::reportFrameworkMetrics(const std::string& pluginMetrics) const {
     mediametrics_handle_t item(mediametrics_create("mediadrm"));
-    mediametrics_setUid(item, mMetrics.GetAppUid());
+    mediametrics_setUid(item, mMetrics->GetAppUid());
     String8 vendor;
     String8 description;
     status_t result = getPropertyStringInternal(String8("vendor"), vendor);
@@ -1170,7 +1174,7 @@
     }
 
     std::string serializedMetrics;
-    result = mMetrics.GetSerializedMetrics(&serializedMetrics);
+    result = mMetrics->GetSerializedMetrics(&serializedMetrics);
     if (result != OK) {
         ALOGE("Failed to serialize framework metrics: %d", result);
     }
@@ -1189,7 +1193,7 @@
     return serializedMetrics;
 }
 
-status_t DrmHalAidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+DrmStatus DrmHalAidl::getSupportedSchemes(std::vector<uint8_t>& schemes) const {
     Mutex::Autolock autoLock(mLock);
 
     if (mFactories.empty()) return UNKNOWN_ERROR;
@@ -1205,14 +1209,14 @@
         }
     }
 
-    return OK;
+    return DrmStatus(OK);
 }
 
 void DrmHalAidl::cleanup() {
     closeOpenSessions();
 
     Mutex::Autolock autoLock(mLock);
-    reportFrameworkMetrics(reportPluginMetrics());
+    if (mInitCheck == OK) reportFrameworkMetrics(reportPluginMetrics());
 
     setListener(NULL);
     mInitCheck = NO_INIT;
@@ -1225,9 +1229,9 @@
     mPlugin.reset();
 }
 
-status_t DrmHalAidl::destroyPlugin() {
+DrmStatus DrmHalAidl::destroyPlugin() {
     cleanup();
-    return OK;
+    return DrmStatus(OK);
 }
 
 ::ndk::ScopedAStatus DrmHalAidl::onEvent(EventTypeAidl eventTypeAidl,
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index c38dbef..00ea004 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -35,6 +35,7 @@
 #include <mediadrm/DrmHalHidl.h>
 #include <mediadrm/DrmSessionClientInterface.h>
 #include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/DrmStatus.h>
 #include <mediadrm/DrmUtils.h>
 #include <mediadrm/IDrmMetricsConsumer.h>
 #include <utils/Log.h>
@@ -309,7 +310,7 @@
     closeOpenSessions();
 
     Mutex::Autolock autoLock(mLock);
-    reportFrameworkMetrics(reportPluginMetrics());
+    if (mInitCheck == OK) reportFrameworkMetrics(reportPluginMetrics());
 
     setListener(NULL);
     mInitCheck = NO_INIT;
@@ -368,14 +369,14 @@
     return plugin;
 }
 
-status_t DrmHalHidl::initCheck() const {
-    return mInitCheck;
+DrmStatus DrmHalHidl::initCheck() const {
+    return DrmStatus(mInitCheck);
 }
 
-status_t DrmHalHidl::setListener(const sp<IDrmClient>& listener) {
+DrmStatus DrmHalHidl::setListener(const sp<IDrmClient>& listener) {
     Mutex::Autolock lock(mEventLock);
     mListener = listener;
-    return NO_ERROR;
+    return DrmStatus(NO_ERROR);
 }
 
 Return<void> DrmHalHidl::sendEvent(EventType hEventType, const hidl_vec<uint8_t>& sessionId,
@@ -502,10 +503,10 @@
     return Void();
 }
 
-status_t DrmHalHidl::matchMimeTypeAndSecurityLevel(const sp<IDrmFactory>& factory,
-                                                   const uint8_t uuid[16], const String8& mimeType,
-                                                   DrmPlugin::SecurityLevel level,
-                                                   bool* isSupported) {
+DrmStatus DrmHalHidl::matchMimeTypeAndSecurityLevel(const sp<IDrmFactory>& factory,
+                                                    const uint8_t uuid[16], const String8& mimeType,
+                                                    DrmPlugin::SecurityLevel level,
+                                                    bool* isSupported) {
     *isSupported = false;
 
     // handle default value cases
@@ -513,40 +514,50 @@
         if (mimeType == "") {
             // isCryptoSchemeSupported(uuid)
             *isSupported = true;
-        } else {
-            // isCryptoSchemeSupported(uuid, mimeType)
-            *isSupported = factory->isContentTypeSupported(mimeType.string());
+            return DrmStatus(OK);
         }
-        return OK;
+        // isCryptoSchemeSupported(uuid, mimeType)
+        auto hResult = factory->isContentTypeSupported(mimeType.string());
+        if (!hResult.isOk()) {
+            return DrmStatus(DEAD_OBJECT);
+        }
+        *isSupported = hResult;
+        return DrmStatus(OK);
     } else if (mimeType == "") {
-        return BAD_VALUE;
+        return DrmStatus(BAD_VALUE);
     }
 
     sp<drm::V1_2::IDrmFactory> factoryV1_2 = drm::V1_2::IDrmFactory::castFrom(factory);
     if (factoryV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
+        return DrmStatus(ERROR_UNSUPPORTED);
     } else {
-        *isSupported = factoryV1_2->isCryptoSchemeSupported_1_2(uuid, mimeType.string(),
+        auto hResult = factoryV1_2->isCryptoSchemeSupported_1_2(uuid, mimeType.string(),
                                                                 toHidlSecurityLevel(level));
-        return OK;
+        if (!hResult.isOk()) {
+            return DrmStatus(DEAD_OBJECT);
+        }
+        *isSupported = hResult;
+        return DrmStatus(OK);
     }
 }
 
-status_t DrmHalHidl::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
-                                             DrmPlugin::SecurityLevel level, bool* isSupported) {
+DrmStatus DrmHalHidl::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                              DrmPlugin::SecurityLevel level, bool* isSupported) {
     Mutex::Autolock autoLock(mLock);
     *isSupported = false;
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
-        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+        auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
+        if (hResult.isOk() && hResult) {
             return matchMimeTypeAndSecurityLevel(mFactories[i], uuid, mimeType, level, isSupported);
         }
     }
-    return OK;
+    return DrmStatus(OK);
 }
 
-status_t DrmHalHidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
+DrmStatus DrmHalHidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
     Mutex::Autolock autoLock(mLock);
 
+    if (mInitCheck == ERROR_UNSUPPORTED) return mInitCheck;
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
         auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
         if (hResult.isOk() && hResult) {
@@ -581,15 +592,15 @@
         }
     }
 
-    return mInitCheck;
+    return DrmStatus(mInitCheck);
 }
 
-status_t DrmHalHidl::destroyPlugin() {
+DrmStatus DrmHalHidl::destroyPlugin() {
     cleanup();
-    return OK;
+    return DrmStatus(OK);
 }
 
-status_t DrmHalHidl::openSession(DrmPlugin::SecurityLevel level, Vector<uint8_t>& sessionId) {
+DrmStatus DrmHalHidl::openSession(DrmPlugin::SecurityLevel level, Vector<uint8_t>& sessionId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -604,7 +615,7 @@
         }
     }
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
     bool retry = true;
     do {
         hidl_vec<uint8_t> hSessionId;
@@ -657,7 +668,7 @@
     return err;
 }
 
-status_t DrmHalHidl::closeSession(Vector<uint8_t> const& sessionId) {
+DrmStatus DrmHalHidl::closeSession(Vector<uint8_t> const& sessionId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -672,13 +683,13 @@
                 }
             }
         }
-        status_t response = toStatusT(status);
+        DrmStatus response = toStatusT(status);
         mMetrics.SetSessionEnd(sessionId);
         mMetrics.mCloseSessionCounter.Increment(response);
         return response;
     }
     mMetrics.mCloseSessionCounter.Increment(DEAD_OBJECT);
-    return DEAD_OBJECT;
+    return DrmStatus(DEAD_OBJECT);
 }
 
 static DrmPlugin::KeyRequestType toKeyRequestType(KeyRequestType keyRequestType) {
@@ -712,12 +723,12 @@
     }
 }
 
-status_t DrmHalHidl::getKeyRequest(Vector<uint8_t> const& sessionId,
-                                   Vector<uint8_t> const& initData, String8 const& mimeType,
-                                   DrmPlugin::KeyType keyType,
-                                   KeyedVector<String8, String8> const& optionalParameters,
-                                   Vector<uint8_t>& request, String8& defaultUrl,
-                                   DrmPlugin::KeyRequestType* keyRequestType) {
+DrmStatus DrmHalHidl::getKeyRequest(Vector<uint8_t> const& sessionId,
+                                    Vector<uint8_t> const& initData, String8 const& mimeType,
+                                    DrmPlugin::KeyType keyType,
+                                    KeyedVector<String8, String8> const& optionalParameters,
+                                    Vector<uint8_t>& request, String8& defaultUrl,
+                                    DrmPlugin::KeyRequestType* keyRequestType) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
     EventTimer<status_t> keyRequestTimer(&mMetrics.mGetKeyRequestTimeUs);
@@ -738,7 +749,7 @@
 
     ::KeyedVector hOptionalParameters = toHidlKeyedVector(optionalParameters);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
     Return<void> hResult;
 
     if (mPluginV1_2 != NULL) {
@@ -787,16 +798,16 @@
     return err;
 }
 
-status_t DrmHalHidl::provideKeyResponse(Vector<uint8_t> const& sessionId,
-                                        Vector<uint8_t> const& response,
-                                        Vector<uint8_t>& keySetId) {
+DrmStatus DrmHalHidl::provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                         Vector<uint8_t> const& response,
+                                         Vector<uint8_t>& keySetId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
     EventTimer<status_t> keyResponseTimer(&mMetrics.mProvideKeyResponseTimeUs);
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult =
             mPlugin->provideKeyResponse(toHidlVec(sessionId), toHidlVec(response),
@@ -811,27 +822,27 @@
     return err;
 }
 
-status_t DrmHalHidl::removeKeys(Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHalHidl::removeKeys(Vector<uint8_t> const& keySetId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     Return<Status> status = mPlugin->removeKeys(toHidlVec(keySetId));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::restoreKeys(Vector<uint8_t> const& sessionId,
-                                 Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHalHidl::restoreKeys(Vector<uint8_t> const& sessionId,
+                                  Vector<uint8_t> const& keySetId) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
     Return<Status> status = mPlugin->restoreKeys(toHidlVec(sessionId), toHidlVec(keySetId));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::queryKeyStatus(Vector<uint8_t> const& sessionId,
-                                    KeyedVector<String8, String8>& infoMap) const {
+DrmStatus DrmHalHidl::queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                     KeyedVector<String8, String8>& infoMap) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -839,7 +850,7 @@
 
     ::KeyedVector hInfoMap;
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPlugin->queryKeyStatus(
             toHidlVec(sessionId), [&](Status status, const hidl_vec<KeyValue>& map) {
@@ -849,15 +860,15 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? DrmStatus(err) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
-                                         Vector<uint8_t>& request, String8& defaultUrl) {
+DrmStatus DrmHalHidl::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                          Vector<uint8_t>& request, String8& defaultUrl) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
     Return<void> hResult;
 
     if (mPluginV1_2 != NULL) {
@@ -888,13 +899,13 @@
     return err;
 }
 
-status_t DrmHalHidl::provideProvisionResponse(Vector<uint8_t> const& response,
-                                              Vector<uint8_t>& certificate,
-                                              Vector<uint8_t>& wrappedKey) {
+DrmStatus DrmHalHidl::provideProvisionResponse(Vector<uint8_t> const& response,
+                                               Vector<uint8_t>& certificate,
+                                               Vector<uint8_t>& wrappedKey) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPlugin->provideProvisionResponse(
             toHidlVec(response), [&](Status status, const hidl_vec<uint8_t>& hCertificate,
@@ -911,11 +922,11 @@
     return err;
 }
 
-status_t DrmHalHidl::getSecureStops(List<Vector<uint8_t>>& secureStops) {
+DrmStatus DrmHalHidl::getSecureStops(List<Vector<uint8_t>>& secureStops) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult =
             mPlugin->getSecureStops([&](Status status, const hidl_vec<SecureStop>& hSecureStops) {
@@ -925,10 +936,10 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
+DrmStatus DrmHalHidl::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
     Mutex::Autolock autoLock(mLock);
 
     if (mInitCheck != OK) {
@@ -939,7 +950,7 @@
         return ERROR_DRM_CANNOT_HANDLE;
     }
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPluginV1_1->getSecureStopIds(
             [&](Status status, const hidl_vec<SecureStopId>& hSecureStopIds) {
@@ -949,14 +960,14 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
+DrmStatus DrmHalHidl::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPlugin->getSecureStop(
             toHidlVec(ssid), [&](Status status, const SecureStop& hSecureStop) {
@@ -966,10 +977,10 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
+DrmStatus DrmHalHidl::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -981,10 +992,10 @@
     } else {
         status = mPlugin->releaseSecureStop(toHidlVec(ssRelease));
     }
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::removeSecureStop(Vector<uint8_t> const& ssid) {
+DrmStatus DrmHalHidl::removeSecureStop(Vector<uint8_t> const& ssid) {
     Mutex::Autolock autoLock(mLock);
 
     if (mInitCheck != OK) {
@@ -996,10 +1007,10 @@
     }
 
     Return<Status> status = mPluginV1_1->removeSecureStop(toHidlVec(ssid));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::removeAllSecureStops() {
+DrmStatus DrmHalHidl::removeAllSecureStops() {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1009,18 +1020,18 @@
     } else {
         status = mPlugin->releaseAllSecureStops();
     }
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getHdcpLevels(DrmPlugin::HdcpLevel* connected,
-                                   DrmPlugin::HdcpLevel* max) const {
+DrmStatus DrmHalHidl::getHdcpLevels(DrmPlugin::HdcpLevel* connected,
+                                    DrmPlugin::HdcpLevel* max) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     if (connected == NULL || max == NULL) {
         return BAD_VALUE;
     }
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     *connected = DrmPlugin::kHdcpLevelUnknown;
     *max = DrmPlugin::kHdcpLevelUnknown;
@@ -1046,26 +1057,26 @@
                     err = toStatusT(status);
                 });
     } else {
-        return ERROR_DRM_CANNOT_HANDLE;
+        return DrmStatus(ERROR_DRM_CANNOT_HANDLE);
     }
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getNumberOfSessions(uint32_t* open, uint32_t* max) const {
+DrmStatus DrmHalHidl::getNumberOfSessions(uint32_t* open, uint32_t* max) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     if (open == NULL || max == NULL) {
         return BAD_VALUE;
     }
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     *open = 0;
     *max = 0;
 
     if (mPluginV1_1 == NULL) {
-        return ERROR_DRM_CANNOT_HANDLE;
+        return DrmStatus(ERROR_DRM_CANNOT_HANDLE);
     }
 
     Return<void> hResult =
@@ -1077,21 +1088,21 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getSecurityLevel(Vector<uint8_t> const& sessionId,
-                                      DrmPlugin::SecurityLevel* level) const {
+DrmStatus DrmHalHidl::getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                       DrmPlugin::SecurityLevel* level) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     if (level == NULL) {
-        return BAD_VALUE;
+        return DrmStatus(BAD_VALUE);
     }
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     if (mPluginV1_1 == NULL) {
-        return ERROR_DRM_CANNOT_HANDLE;
+        return DrmStatus(ERROR_DRM_CANNOT_HANDLE);
     }
 
     *level = DrmPlugin::kSecurityLevelUnknown;
@@ -1104,21 +1115,21 @@
                                                              err = toStatusT(status);
                                                          });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
+DrmStatus DrmHalHidl::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
     Mutex::Autolock autoLock(mLock);
 
     if (mInitCheck != OK) {
-        return mInitCheck;
+        return DrmStatus(mInitCheck);
     }
 
     if (mPluginV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
+        return DrmStatus(ERROR_UNSUPPORTED);
     }
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPluginV1_2->getOfflineLicenseKeySetIds(
             [&](Status status, const hidl_vec<KeySetId>& hKeySetIds) {
@@ -1128,38 +1139,38 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
+DrmStatus DrmHalHidl::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
     Mutex::Autolock autoLock(mLock);
 
     if (mInitCheck != OK) {
-        return mInitCheck;
+        return DrmStatus(mInitCheck);
     }
 
     if (mPluginV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
+        return DrmStatus(ERROR_UNSUPPORTED);
     }
 
     Return<Status> status = mPluginV1_2->removeOfflineLicense(toHidlVec(keySetId));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
-                                            DrmPlugin::OfflineLicenseState* licenseState) const {
+DrmStatus DrmHalHidl::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                             DrmPlugin::OfflineLicenseState* licenseState) const {
     Mutex::Autolock autoLock(mLock);
 
     if (mInitCheck != OK) {
-        return mInitCheck;
+        return DrmStatus(mInitCheck);
     }
 
     if (mPluginV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
+        return DrmStatus(ERROR_UNSUPPORTED);
     }
     *licenseState = DrmPlugin::kOfflineLicenseStateUnknown;
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPluginV1_2->getOfflineLicenseState(
             toHidlVec(keySetId), [&](Status status, OfflineLicenseState hLicenseState) {
@@ -1169,20 +1180,20 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getPropertyString(String8 const& name, String8& value) const {
+DrmStatus DrmHalHidl::getPropertyString(String8 const& name, String8& value) const {
     Mutex::Autolock autoLock(mLock);
     return getPropertyStringInternal(name, value);
 }
 
-status_t DrmHalHidl::getPropertyStringInternal(String8 const& name, String8& value) const {
+DrmStatus DrmHalHidl::getPropertyStringInternal(String8 const& name, String8& value) const {
     // This function is internal to the class and should only be called while
     // mLock is already held.
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPlugin->getPropertyString(
             toHidlString(name), [&](Status status, const hidl_string& hValue) {
@@ -1192,21 +1203,21 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
+DrmStatus DrmHalHidl::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
     Mutex::Autolock autoLock(mLock);
     return getPropertyByteArrayInternal(name, value);
 }
 
-status_t DrmHalHidl::getPropertyByteArrayInternal(String8 const& name,
-                                                  Vector<uint8_t>& value) const {
+DrmStatus DrmHalHidl::getPropertyByteArrayInternal(String8 const& name,
+                                                   Vector<uint8_t>& value) const {
     // This function is internal to the class and should only be called while
     // mLock is already held.
     INIT_CHECK();
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPlugin->getPropertyByteArray(
             toHidlString(name), [&](Status status, const hidl_vec<uint8_t>& hValue) {
@@ -1223,25 +1234,26 @@
     return err;
 }
 
-status_t DrmHalHidl::setPropertyString(String8 const& name, String8 const& value) const {
+DrmStatus DrmHalHidl::setPropertyString(String8 const& name, String8 const& value) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     Return<Status> status = mPlugin->setPropertyString(toHidlString(name), toHidlString(value));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
+DrmStatus DrmHalHidl::setPropertyByteArray(String8 const& name,
+                                           Vector<uint8_t> const& value) const {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     Return<Status> status = mPlugin->setPropertyByteArray(toHidlString(name), toHidlVec(value));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
+DrmStatus DrmHalHidl::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
     if (consumer == nullptr) {
-        return UNEXPECTED_NULL;
+        return DrmStatus(UNEXPECTED_NULL);
     }
     consumer->consumeFrameworkMetrics(mMetrics);
 
@@ -1262,7 +1274,7 @@
         vendor += description;
 
         hidl_vec<DrmMetricGroup> pluginMetrics;
-        status_t err = UNKNOWN_ERROR;
+        DrmStatus err = UNKNOWN_ERROR;
 
         Return<void> status =
                 mPluginV1_1->getMetrics([&](Status status, hidl_vec<DrmMetricGroup> pluginMetrics) {
@@ -1273,14 +1285,14 @@
                     }
                     err = toStatusT(status);
                 });
-        return status.isOk() ? err : DEAD_OBJECT;
+        return status.isOk() ? err : DrmStatus(DEAD_OBJECT);
     }
 
-    return OK;
+    return DrmStatus(OK);
 }
 
-status_t DrmHalHidl::setCipherAlgorithm(Vector<uint8_t> const& sessionId,
-                                        String8 const& algorithm) {
+DrmStatus DrmHalHidl::setCipherAlgorithm(Vector<uint8_t> const& sessionId,
+                                         String8 const& algorithm) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
@@ -1288,28 +1300,28 @@
 
     Return<Status> status =
             mPlugin->setCipherAlgorithm(toHidlVec(sessionId), toHidlString(algorithm));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+DrmStatus DrmHalHidl::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
     Return<Status> status = mPlugin->setMacAlgorithm(toHidlVec(sessionId), toHidlString(algorithm));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    return status.isOk() ? DrmStatus(toStatusT(status)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                             Vector<uint8_t>& output) {
+DrmStatus DrmHalHidl::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult =
             mPlugin->encrypt(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(input),
@@ -1320,18 +1332,18 @@
                                  err = toStatusT(status);
                              });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                             Vector<uint8_t>& output) {
+DrmStatus DrmHalHidl::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult =
             mPlugin->decrypt(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(input),
@@ -1342,17 +1354,17 @@
                                  err = toStatusT(status);
                              });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                          Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
+DrmStatus DrmHalHidl::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                           Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPlugin->sign(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(message),
                                          [&](Status status, const hidl_vec<uint8_t>& hSignature) {
@@ -1362,18 +1374,18 @@
                                              err = toStatusT(status);
                                          });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                            Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
-                            bool& match) {
+DrmStatus DrmHalHidl::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                             bool& match) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult =
             mPlugin->verify(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(message),
@@ -1386,18 +1398,18 @@
                                 err = toStatusT(status);
                             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
-                             Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
-                             Vector<uint8_t>& signature) {
+DrmStatus DrmHalHidl::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                              Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                              Vector<uint8_t>& signature) {
     Mutex::Autolock autoLock(mLock);
     INIT_CHECK();
 
     DrmSessionManager::Instance()->useSession(sessionId);
 
-    status_t err = UNKNOWN_ERROR;
+    DrmStatus err = UNKNOWN_ERROR;
 
     Return<void> hResult = mPlugin->signRSA(
             toHidlVec(sessionId), toHidlString(algorithm), toHidlVec(message),
@@ -1408,7 +1420,7 @@
                 err = toStatusT(status);
             });
 
-    return hResult.isOk() ? err : DEAD_OBJECT;
+    return hResult.isOk() ? err : DrmStatus(DEAD_OBJECT);
 }
 
 std::string DrmHalHidl::reportFrameworkMetrics(const std::string& pluginMetrics) const {
@@ -1467,55 +1479,56 @@
     return metricsString;
 }
 
-status_t DrmHalHidl::requiresSecureDecoder(const char* mime, bool* required) const {
+DrmStatus DrmHalHidl::requiresSecureDecoder(const char* mime, bool* required) const {
     Mutex::Autolock autoLock(mLock);
     if (mPluginV1_4 == NULL) {
-        return false;
+        return DrmStatus(false);
     }
     auto hResult = mPluginV1_4->requiresSecureDecoderDefault(hidl_string(mime));
     if (!hResult.isOk()) {
         DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
-        return DEAD_OBJECT;
+        return DrmStatus(DEAD_OBJECT);
     }
     if (required) {
         *required = hResult;
     }
-    return OK;
+    return DrmStatus(OK);
 }
 
-status_t DrmHalHidl::requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
-                                           bool* required) const {
+DrmStatus DrmHalHidl::requiresSecureDecoder(const char* mime,
+                                            DrmPlugin::SecurityLevel securityLevel,
+                                            bool* required) const {
     Mutex::Autolock autoLock(mLock);
     if (mPluginV1_4 == NULL) {
-        return false;
+        return DrmStatus(false);
     }
     auto hLevel = toHidlSecurityLevel(securityLevel);
     auto hResult = mPluginV1_4->requiresSecureDecoder(hidl_string(mime), hLevel);
     if (!hResult.isOk()) {
         DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
-        return DEAD_OBJECT;
+        return DrmStatus(DEAD_OBJECT);
     }
     if (required) {
         *required = hResult;
     }
-    return OK;
+    return DrmStatus(OK);
 }
 
-status_t DrmHalHidl::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
+DrmStatus DrmHalHidl::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
     Mutex::Autolock autoLock(mLock);
     if (mPluginV1_4 == NULL) {
-        return ERROR_UNSUPPORTED;
+        return DrmStatus(ERROR_UNSUPPORTED);
     }
     auto err = mPluginV1_4->setPlaybackId(toHidlVec(sessionId), hidl_string(playbackId));
-    return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
+    return err.isOk() ? DrmStatus(toStatusT(err)) : DrmStatus(DEAD_OBJECT);
 }
 
-status_t DrmHalHidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+DrmStatus DrmHalHidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
     Mutex::Autolock autoLock(mLock);
-    return DrmUtils::GetLogMessages<drm::V1_4::IDrmPlugin>(mPlugin, logs);
+    return DrmStatus(DrmUtils::GetLogMessages<drm::V1_4::IDrmPlugin>(mPlugin, logs));
 }
 
-status_t DrmHalHidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+DrmStatus DrmHalHidl::getSupportedSchemes(std::vector<uint8_t>& schemes) const {
     Mutex::Autolock autoLock(mLock);
     for (auto &factory : mFactories) {
         sp<drm::V1_3::IDrmFactory> factoryV1_3 = drm::V1_3::IDrmFactory::castFrom(factory);
@@ -1531,7 +1544,7 @@
             });
     }
 
-    return OK;
+    return DrmStatus(OK);
 }
 
 }  // namespace android
diff --git a/drm/libmediadrm/DrmHalListener.cpp b/drm/libmediadrm/DrmHalListener.cpp
index cfcf475..4e868ac 100644
--- a/drm/libmediadrm/DrmHalListener.cpp
+++ b/drm/libmediadrm/DrmHalListener.cpp
@@ -37,12 +37,12 @@
     return vec;
 }
 
-DrmHalListener::DrmHalListener(MediaDrmMetrics* metrics)
+DrmHalListener::DrmHalListener(const std::shared_ptr<MediaDrmMetrics>& metrics)
     : mMetrics(metrics) {}
 
 DrmHalListener::~DrmHalListener() {}
 
-void DrmHalListener::setListener(sp<IDrmClient> listener) {
+void DrmHalListener::setListener(const sp<IDrmClient>& listener) {
     Mutex::Autolock lock(mEventLock);
     mListener = listener;
 }
diff --git a/drm/libmediadrm/DrmMetricsConsumer.cpp b/drm/libmediadrm/DrmMetricsConsumer.cpp
index c06f09b..fd095b7 100644
--- a/drm/libmediadrm/DrmMetricsConsumer.cpp
+++ b/drm/libmediadrm/DrmMetricsConsumer.cpp
@@ -42,7 +42,7 @@
         }
         return type_names[attribute];
     }
-    
+
     static const char *type_names[] = {"PROVISION_REQUIRED", "KEY_NEEDED",
                                        "KEY_EXPIRED", "VENDOR_DEFINED",
                                        "SESSION_RECLAIMED"};
diff --git a/drm/libmediadrm/DrmMetricsLogger.cpp b/drm/libmediadrm/DrmMetricsLogger.cpp
new file mode 100644
index 0000000..ce4d730
--- /dev/null
+++ b/drm/libmediadrm/DrmMetricsLogger.cpp
@@ -0,0 +1,651 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "DrmMetricsLogger"
+
+#include <media/MediaMetrics.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+#include <mediadrm/DrmHal.h>
+#include <mediadrm/DrmMetricsLogger.h>
+#include <mediadrm/DrmUtils.h>
+
+namespace android {
+
+namespace {
+
+std::vector<uint8_t> toStdVec(Vector<uint8_t> const& sessionId) {
+    auto sessionKey = sessionId.array();
+    std::vector<uint8_t> vec(sessionKey, sessionKey + sessionId.size());
+    return vec;
+}
+
+}  // namespace
+
+DrmMetricsLogger::DrmMetricsLogger(IDrmFrontend frontend)
+    : mImpl(sp<DrmHal>::make()), mUuid(), mObjNonce(), mFrontend(frontend) {}
+
+DrmMetricsLogger::~DrmMetricsLogger() {}
+
+int MediaErrorToEnum(status_t err) {
+#define ERROR_BAD_VALUE (BAD_VALUE)
+#define ERROR_DEAD_OBJECT (DEAD_OBJECT)
+#define STATUS_CASE(status) \
+    case ERROR_##status: \
+        return ENUM_##status
+
+    switch (err) {
+        STATUS_CASE(DRM_UNKNOWN);
+        STATUS_CASE(DRM_NO_LICENSE);
+        STATUS_CASE(DRM_LICENSE_EXPIRED);
+        STATUS_CASE(DRM_RESOURCE_BUSY);
+        STATUS_CASE(DRM_INSUFFICIENT_OUTPUT_PROTECTION);
+        STATUS_CASE(DRM_SESSION_NOT_OPENED);
+        STATUS_CASE(DRM_CANNOT_HANDLE);
+        STATUS_CASE(DRM_INSUFFICIENT_SECURITY);
+        STATUS_CASE(DRM_FRAME_TOO_LARGE);
+        STATUS_CASE(DRM_SESSION_LOST_STATE);
+        STATUS_CASE(DRM_CERTIFICATE_MALFORMED);
+        STATUS_CASE(DRM_CERTIFICATE_MISSING);
+        STATUS_CASE(DRM_CRYPTO_LIBRARY);
+        STATUS_CASE(DRM_GENERIC_OEM);
+        STATUS_CASE(DRM_GENERIC_PLUGIN);
+        STATUS_CASE(DRM_INIT_DATA);
+        STATUS_CASE(DRM_KEY_NOT_LOADED);
+        STATUS_CASE(DRM_LICENSE_PARSE);
+        STATUS_CASE(DRM_LICENSE_POLICY);
+        STATUS_CASE(DRM_LICENSE_RELEASE);
+        STATUS_CASE(DRM_LICENSE_REQUEST_REJECTED);
+        STATUS_CASE(DRM_LICENSE_RESTORE);
+        STATUS_CASE(DRM_LICENSE_STATE);
+        STATUS_CASE(DRM_MEDIA_FRAMEWORK);
+        STATUS_CASE(DRM_PROVISIONING_CERTIFICATE);
+        STATUS_CASE(DRM_PROVISIONING_CONFIG);
+        STATUS_CASE(DRM_PROVISIONING_PARSE);
+        STATUS_CASE(DRM_PROVISIONING_REQUEST_REJECTED);
+        STATUS_CASE(DRM_PROVISIONING_RETRY);
+        STATUS_CASE(DRM_RESOURCE_CONTENTION);
+        STATUS_CASE(DRM_SECURE_STOP_RELEASE);
+        STATUS_CASE(DRM_STORAGE_READ);
+        STATUS_CASE(DRM_STORAGE_WRITE);
+        STATUS_CASE(DRM_ZERO_SUBSAMPLES);
+        STATUS_CASE(DRM_INVALID_STATE);
+        STATUS_CASE(BAD_VALUE);
+        STATUS_CASE(DRM_NOT_PROVISIONED);
+        STATUS_CASE(DRM_DEVICE_REVOKED);
+        STATUS_CASE(DRM_DECRYPT);
+        STATUS_CASE(DEAD_OBJECT);
+#undef ERROR_BAD_VALUE
+#undef ERROR_DEAD_OBJECT
+#undef STATUS_CASE
+    }
+    return ENUM_DRM_UNKNOWN;
+}
+
+int DrmPluginSecurityLevelToJavaSecurityLevel(DrmPlugin::SecurityLevel securityLevel) {
+#define STATUS_CASE(status) \
+    case DrmPlugin::k##status: \
+        return J##status
+
+    switch (securityLevel) {
+        STATUS_CASE(SecurityLevelUnknown);
+        STATUS_CASE(SecurityLevelSwSecureCrypto);
+        STATUS_CASE(SecurityLevelSwSecureDecode);
+        STATUS_CASE(SecurityLevelHwSecureCrypto);
+        STATUS_CASE(SecurityLevelHwSecureDecode);
+        STATUS_CASE(SecurityLevelHwSecureAll);
+        STATUS_CASE(SecurityLevelMax);
+#undef STATUS_CASE
+    }
+    return static_cast<int>(securityLevel);
+}
+
+
+DrmStatus DrmMetricsLogger::initCheck() const {
+    DrmStatus status = mImpl->initCheck();
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::isCryptoSchemeSupported(const uint8_t uuid[IDRM_UUID_SIZE],
+                                                    const String8& mimeType,
+                                                    DrmPlugin::SecurityLevel securityLevel,
+                                                    bool* result) {
+    DrmStatus status = mImpl->isCryptoSchemeSupported(uuid, mimeType, securityLevel, result);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::createPlugin(const uint8_t uuid[IDRM_UUID_SIZE],
+                                         const String8& appPackageName) {
+    std::memcpy(mUuid.data(), uuid, IDRM_UUID_SIZE);
+    mUuid[0] = betoh64(mUuid[0]);
+    mUuid[1] = betoh64(mUuid[1]);
+    if (kUuidSchemeMap.count(mUuid)) {
+        mScheme = kUuidSchemeMap.at(mUuid);
+    } else {
+        mScheme = "Other";
+    }
+    if (generateNonce(&mObjNonce, kNonceSize, __func__) != OK) {
+        return ERROR_DRM_RESOURCE_BUSY;
+    }
+    DrmStatus status = mImpl->createPlugin(uuid, appPackageName);
+    if (status == OK) {
+        String8 version8;
+        if (getPropertyString(String8("version"), version8) == OK) {
+            mVersion = version8.string();
+        }
+        reportMediaDrmCreated();
+    } else {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::destroyPlugin() {
+    DrmStatus status = mImpl->destroyPlugin();
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::openSession(DrmPlugin::SecurityLevel securityLevel,
+                                        Vector<uint8_t>& sessionId) {
+    SessionContext ctx{};
+    if (generateNonce(&ctx.mNonce, kNonceSize, __func__) != OK) {
+        return ERROR_DRM_RESOURCE_BUSY;
+    }
+    DrmStatus status = mImpl->openSession(securityLevel, sessionId);
+    if (status == OK) {
+        std::vector<uint8_t> sessionKey = toStdVec(sessionId);
+        ctx.mTargetSecurityLevel = securityLevel;
+        if (getSecurityLevel(sessionId, &ctx.mActualSecurityLevel) != OK) {
+            ctx.mActualSecurityLevel = DrmPlugin::kSecurityLevelUnknown;
+        }
+        if (!mVersion.empty()) {
+            ctx.mVersion = mVersion;
+        }
+        {
+            const std::lock_guard<std::mutex> lock(mSessionMapMutex);
+            mSessionMap.insert({sessionKey, ctx});
+        }
+        reportMediaDrmSessionOpened(sessionKey);
+    } else {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::closeSession(Vector<uint8_t> const& sessionId) {
+    std::vector<uint8_t> sid = toStdVec(sessionId);
+    DrmStatus status = mImpl->closeSession(sessionId);
+    if (status == OK) {
+        const std::lock_guard<std::mutex> lock(mSessionMapMutex);
+        mSessionMap.erase(sid);
+    } else {
+        // TODO(b/275729711): reclaim sessions that failed to close
+        reportMediaDrmErrored(status, __func__, sid);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getKeyRequest(Vector<uint8_t> const& sessionId,
+                                          Vector<uint8_t> const& initData, String8 const& mimeType,
+                                          DrmPlugin::KeyType keyType,
+                                          KeyedVector<String8, String8> const& optionalParameters,
+                                          Vector<uint8_t>& request, String8& defaultUrl,
+                                          DrmPlugin::KeyRequestType* keyRequestType) {
+    DrmStatus status =
+            mImpl->getKeyRequest(sessionId, initData, mimeType, keyType, optionalParameters,
+                                 request, defaultUrl, keyRequestType);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                               Vector<uint8_t> const& response,
+                                               Vector<uint8_t>& keySetId) {
+    DrmStatus status = mImpl->provideKeyResponse(sessionId, response, keySetId);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::removeKeys(Vector<uint8_t> const& keySetId) {
+    DrmStatus status = mImpl->removeKeys(keySetId);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::restoreKeys(Vector<uint8_t> const& sessionId,
+                                        Vector<uint8_t> const& keySetId) {
+    DrmStatus status = mImpl->restoreKeys(sessionId, keySetId);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                           KeyedVector<String8, String8>& infoMap) const {
+    DrmStatus status = mImpl->queryKeyStatus(sessionId, infoMap);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getProvisionRequest(String8 const& certType,
+                                                String8 const& certAuthority,
+                                                Vector<uint8_t>& request, String8& defaultUrl) {
+    DrmStatus status = mImpl->getProvisionRequest(certType, certAuthority, request, defaultUrl);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::provideProvisionResponse(Vector<uint8_t> const& response,
+                                                     Vector<uint8_t>& certificate,
+                                                     Vector<uint8_t>& wrappedKey) {
+    DrmStatus status = mImpl->provideProvisionResponse(response, certificate, wrappedKey);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getSecureStops(List<Vector<uint8_t>>& secureStops) {
+    DrmStatus status = mImpl->getSecureStops(secureStops);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
+    DrmStatus status = mImpl->getSecureStopIds(secureStopIds);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getSecureStop(Vector<uint8_t> const& ssid,
+                                          Vector<uint8_t>& secureStop) {
+    DrmStatus status = mImpl->getSecureStop(ssid, secureStop);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
+    DrmStatus status = mImpl->releaseSecureStops(ssRelease);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::removeSecureStop(Vector<uint8_t> const& ssid) {
+    DrmStatus status = mImpl->removeSecureStop(ssid);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::removeAllSecureStops() {
+    DrmStatus status = mImpl->removeAllSecureStops();
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
+                                          DrmPlugin::HdcpLevel* maxLevel) const {
+    DrmStatus status = mImpl->getHdcpLevels(connectedLevel, maxLevel);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getNumberOfSessions(uint32_t* currentSessions,
+                                                uint32_t* maxSessions) const {
+    DrmStatus status = mImpl->getNumberOfSessions(currentSessions, maxSessions);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                             DrmPlugin::SecurityLevel* level) const {
+    DrmStatus status = mImpl->getSecurityLevel(sessionId, level);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
+    DrmStatus status = mImpl->getOfflineLicenseKeySetIds(keySetIds);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
+    DrmStatus status = mImpl->removeOfflineLicense(keySetId);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getOfflineLicenseState(
+        Vector<uint8_t> const& keySetId, DrmPlugin::OfflineLicenseState* licenseState) const {
+    DrmStatus status = mImpl->getOfflineLicenseState(keySetId, licenseState);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getPropertyString(String8 const& name, String8& value) const {
+    DrmStatus status = mImpl->getPropertyString(name, value);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getPropertyByteArray(String8 const& name,
+                                                 Vector<uint8_t>& value) const {
+    DrmStatus status = mImpl->getPropertyByteArray(name, value);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::setPropertyString(String8 const& name, String8 const& value) const {
+    DrmStatus status = mImpl->setPropertyString(name, value);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::setPropertyByteArray(String8 const& name,
+                                                 Vector<uint8_t> const& value) const {
+    DrmStatus status = mImpl->setPropertyByteArray(name, value);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
+    DrmStatus status = mImpl->getMetrics(consumer);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::setCipherAlgorithm(Vector<uint8_t> const& sessionId,
+                                               String8 const& algorithm) {
+    DrmStatus status = mImpl->setCipherAlgorithm(sessionId, algorithm);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::setMacAlgorithm(Vector<uint8_t> const& sessionId,
+                                            String8 const& algorithm) {
+    DrmStatus status = mImpl->setMacAlgorithm(sessionId, algorithm);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                                    Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                                    Vector<uint8_t>& output) {
+    DrmStatus status = mImpl->encrypt(sessionId, keyId, input, iv, output);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                                    Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                                    Vector<uint8_t>& output) {
+    DrmStatus status = mImpl->decrypt(sessionId, keyId, input, iv, output);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                                 Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
+    DrmStatus status = mImpl->sign(sessionId, keyId, message, signature);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                                   Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                                   bool& match) {
+    DrmStatus status = mImpl->verify(sessionId, keyId, message, signature, match);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                                    Vector<uint8_t> const& message,
+                                    Vector<uint8_t> const& wrappedKey, Vector<uint8_t>& signature) {
+    DrmStatus status = mImpl->signRSA(sessionId, algorithm, message, wrappedKey, signature);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::setListener(const sp<IDrmClient>& listener) {
+    DrmStatus status = mImpl->setListener(listener);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::requiresSecureDecoder(const char* mime, bool* required) const {
+    DrmStatus status = mImpl->requiresSecureDecoder(mime, required);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::requiresSecureDecoder(const char* mime,
+                                                  DrmPlugin::SecurityLevel securityLevel,
+                                                  bool* required) const {
+    DrmStatus status = mImpl->requiresSecureDecoder(mime, securityLevel, required);
+    if (status != OK) {
+        reportMediaDrmErrored(status, "requiresSecureDecoderLevel");
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::setPlaybackId(Vector<uint8_t> const& sessionId,
+                                          const char* playbackId) {
+    DrmStatus status = mImpl->setPlaybackId(sessionId, playbackId);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__, toStdVec(sessionId));
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+    DrmStatus status = mImpl->getLogMessages(logs);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+DrmStatus DrmMetricsLogger::getSupportedSchemes(std::vector<uint8_t>& schemes) const {
+    DrmStatus status = mImpl->getSupportedSchemes(schemes);
+    if (status != OK) {
+        reportMediaDrmErrored(status, __func__);
+    }
+    return status;
+}
+
+void DrmMetricsLogger::reportMediaDrmCreated() const {
+    mediametrics_handle_t handle(mediametrics_create("mediadrm.created"));
+    mediametrics_setCString(handle, "scheme", mScheme.c_str());
+    mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
+    mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
+    mediametrics_setInt32(handle, "frontend", mFrontend);
+    mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
+    mediametrics_setCString(handle, "version", mVersion.c_str());
+    mediametrics_selfRecord(handle);
+    mediametrics_delete(handle);
+}
+
+void DrmMetricsLogger::reportMediaDrmSessionOpened(const std::vector<uint8_t>& sessionId) const {
+    mediametrics_handle_t handle(mediametrics_create("mediadrm.session_opened"));
+    mediametrics_setCString(handle, "scheme", mScheme.c_str());
+    mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
+    mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
+    mediametrics_setInt32(handle, "frontend", mFrontend);
+    mediametrics_setCString(handle, "version", mVersion.c_str());
+    mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
+    const std::lock_guard<std::mutex> lock(mSessionMapMutex);
+    auto it = mSessionMap.find(sessionId);
+    if (it != mSessionMap.end()) {
+        mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
+        mediametrics_setInt32(handle, "requested_security_level",
+                    DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mTargetSecurityLevel));
+        mediametrics_setInt32(handle, "opened_security_level",
+                    DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mActualSecurityLevel));
+    }
+    mediametrics_selfRecord(handle);
+    mediametrics_delete(handle);
+}
+
+void DrmMetricsLogger::reportMediaDrmErrored(const DrmStatus& error_code, const char* api,
+                                             const std::vector<uint8_t>& sessionId) const {
+    mediametrics_handle_t handle(mediametrics_create("mediadrm.errored"));
+    mediametrics_setCString(handle, "scheme", mScheme.c_str());
+    mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
+    mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
+    mediametrics_setInt32(handle, "frontend", mFrontend);
+    mediametrics_setCString(handle, "version", mVersion.c_str());
+    mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
+    if (!sessionId.empty()) {
+        const std::lock_guard<std::mutex> lock(mSessionMapMutex);
+        auto it = mSessionMap.find(sessionId);
+        if (it != mSessionMap.end()) {
+            mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
+            mediametrics_setInt32(handle, "security_level",
+                        DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mActualSecurityLevel));
+        }
+    }
+    mediametrics_setCString(handle, "api", api);
+    mediametrics_setInt32(handle, "error_code", MediaErrorToEnum(error_code));
+    mediametrics_setInt32(handle, "cdm_err", error_code.getCdmErr());
+    mediametrics_setInt32(handle, "oem_err", error_code.getOemErr());
+    mediametrics_setInt32(handle, "error_context", error_code.getContext());
+    mediametrics_selfRecord(handle);
+    mediametrics_delete(handle);
+}
+
+DrmStatus DrmMetricsLogger::generateNonce(std::string* out, size_t size, const char* api) {
+    std::vector<uint8_t> buf(size);
+    ssize_t bytes = getrandom(buf.data(), size, GRND_NONBLOCK);
+    if (bytes < size) {
+        ALOGE("getrandom failed: %d", errno);
+        reportMediaDrmErrored(ERROR_DRM_RESOURCE_BUSY, api);
+        return ERROR_DRM_RESOURCE_BUSY;
+    }
+    android::AString tmp;
+    encodeBase64(buf.data(), size, &tmp);
+    out->assign(tmp.c_str());
+    return OK;
+}
+
+const std::map<std::array<int64_t, 2>, std::string> DrmMetricsLogger::kUuidSchemeMap {
+        {{(int64_t)0x6DD8B3C345F44A68, (int64_t)0xBF3A64168D01A4A6}, "ABV DRM (MoDRM)"},
+        {{(int64_t)0xF239E769EFA34850, (int64_t)0x9C16A903C6932EFB},
+         "Adobe Primetime DRM version 4"},
+        {{(int64_t)0x616C746963617374, (int64_t)0x2D50726F74656374}, "Alticast"},
+        {{(int64_t)0x94CE86FB07FF4F43, (int64_t)0xADB893D2FA968CA2}, "Apple FairPlay"},
+        {{(int64_t)0x279FE473512C48FE, (int64_t)0xADE8D176FEE6B40F}, "Arris Titanium"},
+        {{(int64_t)0x3D5E6D359B9A41E8, (int64_t)0xB843DD3C6E72C42C}, "ChinaDRM"},
+        {{(int64_t)0x3EA8778F77424BF9, (int64_t)0xB18BE834B2ACBD47}, "Clear Key AES-128"},
+        {{(int64_t)0xBE58615B19C44684, (int64_t)0x88B3C8C57E99E957}, "Clear Key SAMPLE-AES"},
+        {{(int64_t)0xE2719D58A985B3C9, (int64_t)0x781AB030AF78D30E}, "Clear Key DASH-IF"},
+        {{(int64_t)0x644FE7B5260F4FAD, (int64_t)0x949A0762FFB054B4}, "CMLA (OMA DRM)"},
+        {{(int64_t)0x37C332587B994C7E, (int64_t)0xB15D19AF74482154}, "Commscope Titanium V3"},
+        {{(int64_t)0x45D481CB8FE049C0, (int64_t)0xADA9AB2D2455B2F2}, "CoreCrypt"},
+        {{(int64_t)0xDCF4E3E362F15818, (int64_t)0x7BA60A6FE33FF3DD}, "DigiCAP SmartXess"},
+        {{(int64_t)0x35BF197B530E42D7, (int64_t)0x8B651B4BF415070F}, "DivX DRM Series 5"},
+        {{(int64_t)0x80A6BE7E14484C37, (int64_t)0x9E70D5AEBE04C8D2}, "Irdeto Content Protection"},
+        {{(int64_t)0x5E629AF538DA4063, (int64_t)0x897797FFBD9902D4},
+         "Marlin Adaptive Streaming Simple Profile V1.0"},
+        {{(int64_t)0x9A04F07998404286, (int64_t)0xAB92E65BE0885F95}, "Microsoft PlayReady"},
+        {{(int64_t)0x6A99532D869F5922, (int64_t)0x9A91113AB7B1E2F3}, "MobiTV DRM"},
+        {{(int64_t)0xADB41C242DBF4A6D, (int64_t)0x958B4457C0D27B95}, "Nagra MediaAccess PRM 3.0"},
+        {{(int64_t)0x1F83E1E86EE94F0D, (int64_t)0xBA2F5EC4E3ED1A66}, "SecureMedia"},
+        {{(int64_t)0x992C46E6C4374899, (int64_t)0xB6A050FA91AD0E39}, "SecureMedia SteelKnot"},
+        {{(int64_t)0xA68129D3575B4F1A, (int64_t)0x9CBA3223846CF7C3},
+         "Synamedia/Cisco/NDS VideoGuard DRM"},
+        {{(int64_t)0xAA11967FCC014A4A, (int64_t)0x8E99C5D3DDDFEA2D}, "Unitend DRM (UDRM)"},
+        {{(int64_t)0x9A27DD82FDE24725, (int64_t)0x8CBC4234AA06EC09}, "Verimatrix VCAS"},
+        {{(int64_t)0xB4413586C58CFFB0, (int64_t)0x94A5D4896C1AF6C3}, "Viaccess-Orca DRM (VODRM)"},
+        {{(int64_t)0x793B79569F944946, (int64_t)0xA94223E7EF7E44B4}, "VisionCrypt"},
+        {{(int64_t)0x1077EFECC0B24D02, (int64_t)0xACE33C1E52E2FB4B}, "W3C Common PSSH box"},
+        {{(int64_t)0xEDEF8BA979D64ACE, (int64_t)0xA3C827DCD51D21ED}, "Widevine Content Protection"},
+};
+
+}  // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/DrmSessionManager.cpp b/drm/libmediadrm/DrmSessionManager.cpp
index e31395d..301538f 100644
--- a/drm/libmediadrm/DrmSessionManager.cpp
+++ b/drm/libmediadrm/DrmSessionManager.cpp
@@ -34,6 +34,7 @@
 namespace android {
 
 using aidl::android::media::MediaResourceParcel;
+using aidl::android::media::ClientInfoParcel;
 
 namespace {
 void ResourceManagerServiceDied(void* cookie) {
@@ -137,7 +138,10 @@
 
     static int64_t clientId = 0;
     mSessionMap[toStdVec(sessionId)] = (SessionInfo){pid, uid, clientId};
-    mService->addResource(pid, uid, clientId++, drm, toResourceVec(sessionId, INT64_MAX));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = static_cast<int32_t>(uid),
+                                .id = clientId++};
+    mService->addResource(clientInfo, drm, toResourceVec(sessionId, INT64_MAX));
 }
 
 void DrmSessionManager::useSession(const Vector<uint8_t> &sessionId) {
@@ -150,7 +154,10 @@
     }
 
     auto info = it->second;
-    mService->addResource(info.pid, info.uid, info.clientId, NULL, toResourceVec(sessionId, -1));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(info.pid),
+                                .uid = static_cast<int32_t>(info.uid),
+                                .id = info.clientId};
+    mService->addResource(clientInfo, NULL, toResourceVec(sessionId, -1));
 }
 
 void DrmSessionManager::removeSession(const Vector<uint8_t> &sessionId) {
@@ -164,7 +171,10 @@
 
     auto info = it->second;
     // removeClient instead of removeSession because each client has only one session
-    mService->removeClient(info.pid, info.clientId);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(info.pid),
+                                .uid = static_cast<int32_t>(info.uid),
+                                .id = info.clientId};
+    mService->removeClient(clientInfo);
     mSessionMap.erase(it);
 }
 
@@ -182,9 +192,13 @@
 
     // cannot update mSessionMap because we do not know which sessionId is reclaimed;
     // we rely on IResourceManagerClient to removeSession in reclaimResource
-    Vector<uint8_t> dummy;
+    Vector<uint8_t> placeHolder;
     bool success;
-    ScopedAStatus status = service->reclaimResource(callingPid, toResourceVec(dummy, INT64_MAX), &success);
+    uid_t uid = AIBinder_getCallingUid();
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(callingPid),
+                                .uid = static_cast<int32_t>(uid)};
+    ScopedAStatus status = service->reclaimResource(
+        clientInfo, toResourceVec(placeHolder, INT64_MAX), &success);
     return status.isOk() && success;
 }
 
diff --git a/drm/libmediadrm/DrmStatus.cpp b/drm/libmediadrm/DrmStatus.cpp
new file mode 100644
index 0000000..f622160
--- /dev/null
+++ b/drm/libmediadrm/DrmStatus.cpp
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediadrm/DrmStatus.h>
+#include <json/json.h>
+
+namespace android {
+
+DrmStatus::DrmStatus(status_t err, const char *msg) : mStatus(err) {
+    Json::Value errorDetails;
+    Json::Reader reader;
+    if (!reader.parse(msg, errorDetails)) {
+        mErrMsg = msg;
+        return;
+    }
+
+    auto val = errorDetails["cdmError"];
+    if (val.isInt()) {
+        mCdmErr = val.asInt();
+    }
+    val = errorDetails["oemError"];
+    if (val.isInt()) {
+        mOemErr = val.asInt();
+    }
+    val = errorDetails["context"];
+    if (val.isInt()) {
+        mCtx = val.asInt();
+    }
+    val = errorDetails["errorMessage"];
+    if (val.isString()) {
+        mErrMsg = val.asString();
+    } else {
+        mErrMsg = msg;
+    }
+}
+
+}  // namespace android
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index be0cd4b..a99b1d1 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -32,6 +32,7 @@
 #include <android/hardware/drm/1.4/IDrmFactory.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/HidlSupport.h>
+#include <json/json.h>
 
 #include <cutils/properties.h>
 #include <utils/Errors.h>
@@ -58,11 +59,11 @@
 
 namespace {
 
-template <typename Hal>
-Hal* MakeObject(status_t* pstatus) {
+template <typename Hal, typename... Ts>
+Hal* MakeObject(status_t* pstatus, Ts... args) {
     status_t err = OK;
     status_t& status = pstatus ? *pstatus : err;
-    auto obj = new Hal();
+    auto obj = new Hal(args...);
     status = obj->initCheck();
     if (status != OK && status != NO_INIT) {
         return NULL;
@@ -191,8 +192,8 @@
     return factories;
 }
 
-sp<IDrm> MakeDrm(status_t* pstatus) {
-    return MakeObject<DrmHal>(pstatus);
+sp<IDrm> MakeDrm(IDrmFrontend frontend, status_t* pstatus) {
+    return MakeObject<DrmMetricsLogger>(pstatus, frontend);
 }
 
 sp<ICrypto> MakeCrypto(status_t* pstatus) {
@@ -362,18 +363,23 @@
 }
 }  // namespace
 
-std::string GetExceptionMessage(status_t err, const char* msg,
+std::string GetExceptionMessage(const DrmStatus &err, const char* defaultMsg,
                                 const Vector<::V1_4::LogMessage>& logs) {
     std::string ruler("==============================");
     std::string header("Beginning of DRM Plugin Log");
     std::string footer("End of DRM Plugin Log");
+    std::string msg(err.getErrorMessage());
     String8 msg8;
-    if (msg) {
-        msg8 += msg;
+    if (!msg.empty()) {
+        msg8 += msg.c_str();
+        msg8 += ": ";
+    } else if (defaultMsg) {
+        msg8 += defaultMsg;
         msg8 += ": ";
     }
-    auto errStr = StrCryptoError(err);
-    msg8 += errStr.c_str();
+    msg8 += StrCryptoError(err).c_str();
+    msg8 += String8::format("\ncdm err: %d, oem err: %d, ctx: %d",
+                            err.getCdmErr(), err.getOemErr(), err.getContext());
     msg8 += String8::format("\n%s %s %s", ruler.c_str(), header.c_str(), ruler.c_str());
 
     for (auto log : logs) {
@@ -410,6 +416,141 @@
     return logs;
 }
 
+DrmStatus statusAidlToDrmStatus(::ndk::ScopedAStatus& statusAidl) {
+    if (statusAidl.isOk()) return OK;
+    if (statusAidl.getExceptionCode() != EX_SERVICE_SPECIFIC) return DEAD_OBJECT;
+    auto astatus = static_cast<StatusAidl>(statusAidl.getServiceSpecificError());
+    status_t status{};
+    switch (astatus) {
+    case StatusAidl::OK:
+        status = OK;
+        break;
+    case StatusAidl::BAD_VALUE:
+        status = BAD_VALUE;
+        break;
+    case StatusAidl::ERROR_DRM_CANNOT_HANDLE:
+        status = ERROR_DRM_CANNOT_HANDLE;
+        break;
+    case StatusAidl::ERROR_DRM_DECRYPT:
+        status = ERROR_DRM_DECRYPT;
+        break;
+    case StatusAidl::ERROR_DRM_DEVICE_REVOKED:
+        status = ERROR_DRM_DEVICE_REVOKED;
+        break;
+    case StatusAidl::ERROR_DRM_FRAME_TOO_LARGE:
+        status = ERROR_DRM_FRAME_TOO_LARGE;
+        break;
+    case StatusAidl::ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION:
+        status = ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION;
+        break;
+    case StatusAidl::ERROR_DRM_INSUFFICIENT_SECURITY:
+        status = ERROR_DRM_INSUFFICIENT_SECURITY;
+        break;
+    case StatusAidl::ERROR_DRM_INVALID_STATE:
+        status = ERROR_DRM_INVALID_STATE;
+        break;
+    case StatusAidl::ERROR_DRM_LICENSE_EXPIRED:
+        status = ERROR_DRM_LICENSE_EXPIRED;
+        break;
+    case StatusAidl::ERROR_DRM_NO_LICENSE:
+        status = ERROR_DRM_NO_LICENSE;
+        break;
+    case StatusAidl::ERROR_DRM_NOT_PROVISIONED:
+        status = ERROR_DRM_NOT_PROVISIONED;
+        break;
+    case StatusAidl::ERROR_DRM_RESOURCE_BUSY:
+        status = ERROR_DRM_RESOURCE_BUSY;
+        break;
+    case StatusAidl::ERROR_DRM_RESOURCE_CONTENTION:
+        status = ERROR_DRM_RESOURCE_CONTENTION;
+        break;
+    case StatusAidl::ERROR_DRM_SESSION_LOST_STATE:
+        status = ERROR_DRM_SESSION_LOST_STATE;
+        break;
+    case StatusAidl::ERROR_DRM_SESSION_NOT_OPENED:
+        status = ERROR_DRM_SESSION_NOT_OPENED;
+        break;
+
+    // New in S / drm@1.4:
+    case StatusAidl::CANNOT_DECRYPT_ZERO_SUBSAMPLES:
+        status = ERROR_DRM_ZERO_SUBSAMPLES;
+        break;
+    case StatusAidl::CRYPTO_LIBRARY_ERROR:
+        status = ERROR_DRM_CRYPTO_LIBRARY;
+        break;
+    case StatusAidl::GENERAL_OEM_ERROR:
+        status = ERROR_DRM_GENERIC_OEM;
+        break;
+    case StatusAidl::GENERAL_PLUGIN_ERROR:
+        status = ERROR_DRM_GENERIC_PLUGIN;
+        break;
+    case StatusAidl::INIT_DATA_INVALID:
+        status = ERROR_DRM_INIT_DATA;
+        break;
+    case StatusAidl::KEY_NOT_LOADED:
+        status = ERROR_DRM_KEY_NOT_LOADED;
+        break;
+    case StatusAidl::LICENSE_PARSE_ERROR:
+        status = ERROR_DRM_LICENSE_PARSE;
+        break;
+    case StatusAidl::LICENSE_POLICY_ERROR:
+        status = ERROR_DRM_LICENSE_POLICY;
+        break;
+    case StatusAidl::LICENSE_RELEASE_ERROR:
+        status = ERROR_DRM_LICENSE_RELEASE;
+        break;
+    case StatusAidl::LICENSE_REQUEST_REJECTED:
+        status = ERROR_DRM_LICENSE_REQUEST_REJECTED;
+        break;
+    case StatusAidl::LICENSE_RESTORE_ERROR:
+        status = ERROR_DRM_LICENSE_RESTORE;
+        break;
+    case StatusAidl::LICENSE_STATE_ERROR:
+        status = ERROR_DRM_LICENSE_STATE;
+        break;
+    case StatusAidl::MALFORMED_CERTIFICATE:
+        status = ERROR_DRM_CERTIFICATE_MALFORMED;
+        break;
+    case StatusAidl::MEDIA_FRAMEWORK_ERROR:
+        status = ERROR_DRM_MEDIA_FRAMEWORK;
+        break;
+    case StatusAidl::MISSING_CERTIFICATE:
+        status = ERROR_DRM_CERTIFICATE_MISSING;
+        break;
+    case StatusAidl::PROVISIONING_CERTIFICATE_ERROR:
+        status = ERROR_DRM_PROVISIONING_CERTIFICATE;
+        break;
+    case StatusAidl::PROVISIONING_CONFIGURATION_ERROR:
+        status = ERROR_DRM_PROVISIONING_CONFIG;
+        break;
+    case StatusAidl::PROVISIONING_PARSE_ERROR:
+        status = ERROR_DRM_PROVISIONING_PARSE;
+        break;
+    case StatusAidl::PROVISIONING_REQUEST_REJECTED:
+        status = ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
+        break;
+    case StatusAidl::RETRYABLE_PROVISIONING_ERROR:
+        status = ERROR_DRM_PROVISIONING_RETRY;
+        break;
+    case StatusAidl::SECURE_STOP_RELEASE_ERROR:
+        status = ERROR_DRM_SECURE_STOP_RELEASE;
+        break;
+    case StatusAidl::STORAGE_READ_FAILURE:
+        status = ERROR_DRM_STORAGE_READ;
+        break;
+    case StatusAidl::STORAGE_WRITE_FAILURE:
+        status = ERROR_DRM_STORAGE_WRITE;
+        break;
+
+    case StatusAidl::ERROR_DRM_UNKNOWN:
+    default:
+        status = ERROR_DRM_UNKNOWN;
+        break;
+    }
+
+    return DrmStatus(status, statusAidl.getMessage());
+}
+
 LogBuffer gLogBuf;
 }  // namespace DrmUtils
 }  // namespace android
diff --git a/drm/libmediadrm/TEST_MAPPING b/drm/libmediadrm/TEST_MAPPING
index bc15879..8d7be22 100644
--- a/drm/libmediadrm/TEST_MAPPING
+++ b/drm/libmediadrm/TEST_MAPPING
@@ -1,19 +1,19 @@
 {
   "presubmit": [
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
 	  "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.MediaDrmTest"
+          "include-filter": "com.google.android.media.wvts.MediaDrmTest"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineDashPolicyTests"
+          "include-filter": "com.google.android.media.wvts.WidevineDashPolicyTests"
         }
       ]
     }
diff --git a/drm/libmediadrm/fuzzer/Android.bp b/drm/libmediadrm/fuzzer/Android.bp
index a85e3cf..deda9ef 100644
--- a/drm/libmediadrm/fuzzer/Android.bp
+++ b/drm/libmediadrm/fuzzer/Android.bp
@@ -37,6 +37,7 @@
         "liblog",
         "resourcemanager_aidl_interface-ndk",
         "libaidlcommonsupport",
+        "libjsoncpp",
     ],
     header_libs: [
         "libmedia_headers",
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHal.h b/drm/libmediadrm/include/mediadrm/CryptoHal.h
index 32a6741..60cbbf8 100644
--- a/drm/libmediadrm/include/mediadrm/CryptoHal.h
+++ b/drm/libmediadrm/include/mediadrm/CryptoHal.h
@@ -38,7 +38,7 @@
     virtual bool requiresSecureDecoderComponent(
             const char *mime) const;
     virtual void notifyResolution(uint32_t width, uint32_t height);
-    virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId);
+    virtual DrmStatus setMediaDrmSession(const Vector<uint8_t> &sessionId);
     virtual ssize_t decrypt(const uint8_t key[16], const uint8_t iv[16],
             CryptoPlugin::Mode mode, const CryptoPlugin::Pattern &pattern,
             const drm::V1_0::SharedBuffer &source, size_t offset,
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h b/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
index 50878a6..eec4585 100644
--- a/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
+++ b/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
@@ -49,7 +49,7 @@
     virtual status_t destroyPlugin();
     virtual bool requiresSecureDecoderComponent(const char* mime) const;
     virtual void notifyResolution(uint32_t width, uint32_t height);
-    virtual status_t setMediaDrmSession(const Vector<uint8_t>& sessionId);
+    virtual DrmStatus setMediaDrmSession(const Vector<uint8_t>& sessionId);
     virtual ssize_t decrypt(const uint8_t key[16], const uint8_t iv[16], CryptoPlugin::Mode mode,
                             const CryptoPlugin::Pattern& pattern, const ::SharedBuffer& source,
                             size_t offset, const CryptoPlugin::SubSample* subSamples,
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHalHidl.h b/drm/libmediadrm/include/mediadrm/CryptoHalHidl.h
index 6db1e89..0150d7c 100644
--- a/drm/libmediadrm/include/mediadrm/CryptoHalHidl.h
+++ b/drm/libmediadrm/include/mediadrm/CryptoHalHidl.h
@@ -57,7 +57,7 @@
 
     virtual void notifyResolution(uint32_t width, uint32_t height);
 
-    virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId);
+    virtual DrmStatus setMediaDrmSession(const Vector<uint8_t> &sessionId);
 
     virtual ssize_t decrypt(const uint8_t key[16], const uint8_t iv[16],
             CryptoPlugin::Mode mode, const CryptoPlugin::Pattern &pattern,
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index eab597b..16e5fe1 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -15,6 +15,7 @@
  */
 
 #include <mediadrm/IDrm.h>
+#include <mediadrm/DrmStatus.h>
 
 #ifndef DRM_HAL_H_
 #define DRM_HAL_H_
@@ -24,100 +25,99 @@
 struct DrmHal : public IDrm {
     DrmHal();
     virtual ~DrmHal();
-    virtual status_t initCheck() const;
-    virtual status_t isCryptoSchemeSupported(const uint8_t uuid[16],
-                                             const String8 &mimeType,
-                                             DrmPlugin::SecurityLevel securityLevel,
-                                             bool *result);
-    virtual status_t createPlugin(const uint8_t uuid[16],
-                                  const String8 &appPackageName);
-    virtual status_t destroyPlugin();
-    virtual status_t openSession(DrmPlugin::SecurityLevel securityLevel,
+    virtual DrmStatus initCheck() const;
+    virtual DrmStatus isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                              DrmPlugin::SecurityLevel securityLevel, bool* result);
+    virtual DrmStatus createPlugin(const uint8_t uuid[16],
+                                   const String8 &appPackageName);
+    virtual DrmStatus destroyPlugin();
+    virtual DrmStatus openSession(DrmPlugin::SecurityLevel securityLevel,
             Vector<uint8_t> &sessionId);
-    virtual status_t closeSession(Vector<uint8_t> const &sessionId);
-    virtual status_t
+    virtual DrmStatus closeSession(Vector<uint8_t> const &sessionId);
+    virtual DrmStatus
         getKeyRequest(Vector<uint8_t> const &sessionId,
                       Vector<uint8_t> const &initData,
                       String8 const &mimeType, DrmPlugin::KeyType keyType,
                       KeyedVector<String8, String8> const &optionalParameters,
                       Vector<uint8_t> &request, String8 &defaultUrl,
                       DrmPlugin::KeyRequestType *keyRequestType);
-    virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
-                                        Vector<uint8_t> const &response,
-                                        Vector<uint8_t> &keySetId);
-    virtual status_t removeKeys(Vector<uint8_t> const &keySetId);
-    virtual status_t restoreKeys(Vector<uint8_t> const &sessionId,
-                                 Vector<uint8_t> const &keySetId);
-    virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId,
-                                    KeyedVector<String8, String8> &infoMap) const;
-    virtual status_t getProvisionRequest(String8 const &certType,
-                                         String8 const &certAuthority,
-                                         Vector<uint8_t> &request,
-                                         String8 &defaultUrl);
-    virtual status_t provideProvisionResponse(Vector<uint8_t> const &response,
-                                              Vector<uint8_t> &certificate,
-                                              Vector<uint8_t> &wrappedKey);
-    virtual status_t getSecureStops(List<Vector<uint8_t>> &secureStops);
-    virtual status_t getSecureStopIds(List<Vector<uint8_t>> &secureStopIds);
-    virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
-    virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
-    virtual status_t removeSecureStop(Vector<uint8_t> const &ssid);
-    virtual status_t removeAllSecureStops();
-    virtual status_t getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
+    virtual DrmStatus provideKeyResponse(Vector<uint8_t> const &sessionId,
+                                         Vector<uint8_t> const &response,
+                                         Vector<uint8_t> &keySetId);
+    virtual DrmStatus removeKeys(Vector<uint8_t> const &keySetId);
+    virtual DrmStatus restoreKeys(Vector<uint8_t> const &sessionId,
+                                  Vector<uint8_t> const &keySetId);
+    virtual DrmStatus queryKeyStatus(Vector<uint8_t> const &sessionId,
+                                     KeyedVector<String8, String8> &infoMap) const;
+    virtual DrmStatus getProvisionRequest(String8 const &certType,
+                                          String8 const &certAuthority,
+                                          Vector<uint8_t> &request,
+                                          String8 &defaultUrl);
+    virtual DrmStatus provideProvisionResponse(Vector<uint8_t> const &response,
+                                               Vector<uint8_t> &certificate,
+                                               Vector<uint8_t> &wrappedKey);
+    virtual DrmStatus getSecureStops(List<Vector<uint8_t>> &secureStops);
+    virtual DrmStatus getSecureStopIds(List<Vector<uint8_t>> &secureStopIds);
+    virtual DrmStatus getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
+    virtual DrmStatus releaseSecureStops(Vector<uint8_t> const &ssRelease);
+    virtual DrmStatus removeSecureStop(Vector<uint8_t> const &ssid);
+    virtual DrmStatus removeAllSecureStops();
+    virtual DrmStatus getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
             DrmPlugin::HdcpLevel *maxLevel) const;
-    virtual status_t getNumberOfSessions(uint32_t *currentSessions,
+    virtual DrmStatus getNumberOfSessions(uint32_t *currentSessions,
             uint32_t *maxSessions) const;
-    virtual status_t getSecurityLevel(Vector<uint8_t> const &sessionId,
+    virtual DrmStatus getSecurityLevel(Vector<uint8_t> const &sessionId,
             DrmPlugin::SecurityLevel *level) const;
-    virtual status_t getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const;
-    virtual status_t removeOfflineLicense(Vector<uint8_t> const &keySetId);
-    virtual status_t getOfflineLicenseState(Vector<uint8_t> const &keySetId,
+    virtual DrmStatus getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const;
+    virtual DrmStatus removeOfflineLicense(Vector<uint8_t> const &keySetId);
+    virtual DrmStatus getOfflineLicenseState(Vector<uint8_t> const &keySetId,
             DrmPlugin::OfflineLicenseState *licenseState) const;
-    virtual status_t getPropertyString(String8 const &name, String8 &value) const;
-    virtual status_t getPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> &value) const;
-    virtual status_t setPropertyString(String8 const &name,
-                                       String8 const &value ) const;
-    virtual status_t setPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> const &value) const;
-    virtual status_t getMetrics(const sp<IDrmMetricsConsumer> &consumer);
-    virtual status_t setCipherAlgorithm(Vector<uint8_t> const &sessionId,
-                                        String8 const &algorithm);
-    virtual status_t setMacAlgorithm(Vector<uint8_t> const &sessionId,
-                                     String8 const &algorithm);
-    virtual status_t encrypt(Vector<uint8_t> const &sessionId,
+    virtual DrmStatus getPropertyString(String8 const &name, String8 &value) const;
+    virtual DrmStatus getPropertyByteArray(String8 const &name,
+                                           Vector<uint8_t> &value) const;
+    virtual DrmStatus setPropertyString(String8 const &name,
+                                        String8 const &value ) const;
+    virtual DrmStatus setPropertyByteArray(String8 const &name,
+                                           Vector<uint8_t> const &value) const;
+    virtual DrmStatus getMetrics(const sp<IDrmMetricsConsumer> &consumer);
+    virtual DrmStatus setCipherAlgorithm(Vector<uint8_t> const &sessionId,
+                                         String8 const &algorithm);
+    virtual DrmStatus setMacAlgorithm(Vector<uint8_t> const &sessionId,
+                                      String8 const &algorithm);
+    virtual DrmStatus encrypt(Vector<uint8_t> const &sessionId,
+                              Vector<uint8_t> const &keyId,
+                              Vector<uint8_t> const &input,
+                              Vector<uint8_t> const &iv,
+                              Vector<uint8_t> &output);
+    virtual DrmStatus decrypt(Vector<uint8_t> const &sessionId,
+                              Vector<uint8_t> const &keyId,
+                              Vector<uint8_t> const &input,
+                              Vector<uint8_t> const &iv,
+                              Vector<uint8_t> &output);
+    virtual DrmStatus sign(Vector<uint8_t> const &sessionId,
+                           Vector<uint8_t> const &keyId,
+                           Vector<uint8_t> const &message,
+                           Vector<uint8_t> &signature);
+    virtual DrmStatus verify(Vector<uint8_t> const &sessionId,
                              Vector<uint8_t> const &keyId,
-                             Vector<uint8_t> const &input,
-                             Vector<uint8_t> const &iv,
-                             Vector<uint8_t> &output);
-    virtual status_t decrypt(Vector<uint8_t> const &sessionId,
-                             Vector<uint8_t> const &keyId,
-                             Vector<uint8_t> const &input,
-                             Vector<uint8_t> const &iv,
-                             Vector<uint8_t> &output);
-    virtual status_t sign(Vector<uint8_t> const &sessionId,
-                          Vector<uint8_t> const &keyId,
-                          Vector<uint8_t> const &message,
-                          Vector<uint8_t> &signature);
-    virtual status_t verify(Vector<uint8_t> const &sessionId,
-                            Vector<uint8_t> const &keyId,
-                            Vector<uint8_t> const &message,
-                            Vector<uint8_t> const &signature,
-                            bool &match);
-    virtual status_t signRSA(Vector<uint8_t> const &sessionId,
-                             String8 const &algorithm,
                              Vector<uint8_t> const &message,
-                             Vector<uint8_t> const &wrappedKey,
-                             Vector<uint8_t> &signature);
-    virtual status_t setListener(const sp<IDrmClient>& listener);
-    virtual status_t requiresSecureDecoder(const char *mime, bool *required) const;
-    virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
-                                           bool *required) const;
-    virtual status_t setPlaybackId(
+                             Vector<uint8_t> const &signature,
+                             bool &match);
+    virtual DrmStatus signRSA(Vector<uint8_t> const &sessionId,
+                              String8 const &algorithm,
+                              Vector<uint8_t> const &message,
+                              Vector<uint8_t> const &wrappedKey,
+                              Vector<uint8_t> &signature);
+    virtual DrmStatus setListener(const sp<IDrmClient>& listener);
+    virtual DrmStatus requiresSecureDecoder(const char *mime, bool *required) const;
+    virtual DrmStatus requiresSecureDecoder(const char *mime,
+                                            DrmPlugin::SecurityLevel securityLevel,
+                                            bool *required) const;
+    virtual DrmStatus setPlaybackId(
             Vector<uint8_t> const &sessionId,
             const char *playbackId);
-    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
-    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
+    virtual DrmStatus getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+    virtual DrmStatus getSupportedSchemes(std::vector<uint8_t> &schemes) const;
 
 private:
     sp<IDrm> mDrmHalHidl;
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
index 0f51ce9..a81b312 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
@@ -25,6 +25,7 @@
 #include <mediadrm/DrmMetrics.h>
 #include <mediadrm/DrmSessionManager.h>
 #include <mediadrm/DrmHalListener.h>
+#include <mediadrm/DrmStatus.h>
 #include <mediadrm/IDrm.h>
 
 using IDrmPluginAidl = ::aidl::android::hardware::drm::IDrmPlugin;
@@ -38,74 +39,78 @@
     struct DrmSessionClient;
     DrmHalAidl();
     virtual ~DrmHalAidl();
-    virtual status_t initCheck() const;
-    virtual status_t isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
-                                             DrmPlugin::SecurityLevel securityLevel, bool* result);
-    virtual status_t createPlugin(const uint8_t uuid[16], const String8& appPackageName);
-    virtual status_t destroyPlugin();
-    virtual status_t openSession(DrmPlugin::SecurityLevel securityLevel,
-                                 Vector<uint8_t>& sessionId);
-    virtual status_t closeSession(Vector<uint8_t> const& sessionId);
-    virtual status_t getKeyRequest(Vector<uint8_t> const& sessionId,
-                                   Vector<uint8_t> const& initData, String8 const& mimeType,
-                                   DrmPlugin::KeyType keyType,
-                                   KeyedVector<String8, String8> const& optionalParameters,
-                                   Vector<uint8_t>& request, String8& defaultUrl,
-                                   DrmPlugin::KeyRequestType* keyRequestType);
-    virtual status_t provideKeyResponse(Vector<uint8_t> const& sessionId,
-                                        Vector<uint8_t> const& response, Vector<uint8_t>& keySetId);
-    virtual status_t removeKeys(Vector<uint8_t> const& keySetId);
-    virtual status_t restoreKeys(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keySetId);
-    virtual status_t queryKeyStatus(Vector<uint8_t> const& sessionId,
-                                    KeyedVector<String8, String8>& infoMap) const;
-    virtual status_t getProvisionRequest(String8 const& certType, String8 const& certAuthority,
-                                         Vector<uint8_t>& request, String8& defaultUrl);
-    virtual status_t provideProvisionResponse(Vector<uint8_t> const& response,
-                                              Vector<uint8_t>& certificate,
-                                              Vector<uint8_t>& wrappedKey);
-    virtual status_t getSecureStops(List<Vector<uint8_t>>& secureStops);
-    virtual status_t getSecureStopIds(List<Vector<uint8_t>>& secureStopIds);
-    virtual status_t getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop);
-    virtual status_t releaseSecureStops(Vector<uint8_t> const& ssRelease);
-    virtual status_t removeSecureStop(Vector<uint8_t> const& ssid);
-    virtual status_t removeAllSecureStops();
-    virtual status_t getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
-                                   DrmPlugin::HdcpLevel* maxLevel) const;
-    virtual status_t getNumberOfSessions(uint32_t* currentSessions, uint32_t* maxSessions) const;
-    virtual status_t getSecurityLevel(Vector<uint8_t> const& sessionId,
-                                      DrmPlugin::SecurityLevel* level) const;
-    virtual status_t getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const;
-    virtual status_t removeOfflineLicense(Vector<uint8_t> const& keySetId);
-    virtual status_t getOfflineLicenseState(Vector<uint8_t> const& keySetId,
-                                            DrmPlugin::OfflineLicenseState* licenseState) const;
-    virtual status_t getPropertyString(String8 const& name, String8& value) const;
-    virtual status_t getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const;
-    virtual status_t setPropertyString(String8 const& name, String8 const& value) const;
-    virtual status_t setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const;
-    virtual status_t getMetrics(const sp<IDrmMetricsConsumer>& consumer);
-    virtual status_t setCipherAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm);
-    virtual status_t setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm);
-    virtual status_t encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                             Vector<uint8_t>& output);
-    virtual status_t decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
-                             Vector<uint8_t>& output);
-    virtual status_t sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                          Vector<uint8_t> const& message, Vector<uint8_t>& signature);
-    virtual status_t verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
-                            Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
-                            bool& match);
-    virtual status_t signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
-                             Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
-                             Vector<uint8_t>& signature);
-    virtual status_t setListener(const sp<IDrmClient>& listener);
-    virtual status_t requiresSecureDecoder(const char* mime, bool* required) const;
-    virtual status_t requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
-                                           bool* required) const;
-    virtual status_t setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId);
-    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
-    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
+    virtual DrmStatus initCheck() const;
+    virtual DrmStatus isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                              DrmPlugin::SecurityLevel securityLevel, bool* result);
+    virtual DrmStatus createPlugin(const uint8_t uuid[16], const String8& appPackageName);
+    virtual DrmStatus destroyPlugin();
+    virtual DrmStatus openSession(DrmPlugin::SecurityLevel securityLevel,
+                                  Vector<uint8_t>& sessionId);
+    virtual DrmStatus closeSession(Vector<uint8_t> const& sessionId);
+    virtual DrmStatus getKeyRequest(Vector<uint8_t> const& sessionId,
+                                    Vector<uint8_t> const& initData, String8 const& mimeType,
+                                    DrmPlugin::KeyType keyType,
+                                    KeyedVector<String8, String8> const& optionalParameters,
+                                    Vector<uint8_t>& request, String8& defaultUrl,
+                                    DrmPlugin::KeyRequestType* keyRequestType);
+    virtual DrmStatus provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                         Vector<uint8_t> const& response,
+                                         Vector<uint8_t>& keySetId);
+    virtual DrmStatus removeKeys(Vector<uint8_t> const& keySetId);
+    virtual DrmStatus restoreKeys(Vector<uint8_t> const& sessionId,
+                                  Vector<uint8_t> const& keySetId);
+    virtual DrmStatus queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                     KeyedVector<String8, String8>& infoMap) const;
+    virtual DrmStatus getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                          Vector<uint8_t>& request, String8& defaultUrl);
+    virtual DrmStatus provideProvisionResponse(Vector<uint8_t> const& response,
+                                               Vector<uint8_t>& certificate,
+                                               Vector<uint8_t>& wrappedKey);
+    virtual DrmStatus getSecureStops(List<Vector<uint8_t>>& secureStops);
+    virtual DrmStatus getSecureStopIds(List<Vector<uint8_t>>& secureStopIds);
+    virtual DrmStatus getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop);
+    virtual DrmStatus releaseSecureStops(Vector<uint8_t> const& ssRelease);
+    virtual DrmStatus removeSecureStop(Vector<uint8_t> const& ssid);
+    virtual DrmStatus removeAllSecureStops();
+    virtual DrmStatus getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
+                                    DrmPlugin::HdcpLevel* maxLevel) const;
+    virtual DrmStatus getNumberOfSessions(uint32_t* currentSessions, uint32_t* maxSessions) const;
+    virtual DrmStatus getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                       DrmPlugin::SecurityLevel* level) const;
+    virtual DrmStatus getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const;
+    virtual DrmStatus removeOfflineLicense(Vector<uint8_t> const& keySetId);
+    virtual DrmStatus getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                             DrmPlugin::OfflineLicenseState* licenseState) const;
+    virtual DrmStatus getPropertyString(String8 const& name, String8& value) const;
+    virtual DrmStatus getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const;
+    virtual DrmStatus setPropertyString(String8 const& name, String8 const& value) const;
+    virtual DrmStatus setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const;
+    virtual DrmStatus getMetrics(const sp<IDrmMetricsConsumer>& consumer);
+    virtual DrmStatus setCipherAlgorithm(Vector<uint8_t> const& sessionId,
+                                         String8 const& algorithm);
+    virtual DrmStatus setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm);
+    virtual DrmStatus encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output);
+    virtual DrmStatus decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output);
+    virtual DrmStatus sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                           Vector<uint8_t> const& message, Vector<uint8_t>& signature);
+    virtual DrmStatus verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                             bool& match);
+    virtual DrmStatus signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                              Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                              Vector<uint8_t>& signature);
+    virtual DrmStatus setListener(const sp<IDrmClient>& listener);
+    virtual DrmStatus requiresSecureDecoder(const char* mime, bool* required) const;
+    virtual DrmStatus requiresSecureDecoder(const char* mime,
+                                            DrmPlugin::SecurityLevel securityLevel,
+                                            bool* required) const;
+    virtual DrmStatus setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId);
+    virtual DrmStatus getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
+    virtual DrmStatus getSupportedSchemes(std::vector<uint8_t>& schemes) const;
 
     ::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
                                  const std::vector<uint8_t>& in_sessionId,
@@ -118,7 +123,7 @@
     ::ndk::ScopedAStatus onSessionLostState(const std::vector<uint8_t>& in_sessionId);
   private:
     static Mutex mLock;
-    mutable MediaDrmMetrics mMetrics;
+    std::shared_ptr<MediaDrmMetrics> mMetrics;
     std::shared_ptr<DrmHalListener> mListener;
     const std::vector<std::shared_ptr<IDrmFactoryAidl>> mFactories;
     std::shared_ptr<IDrmPluginAidl> mPlugin;
@@ -128,8 +133,8 @@
     void closeOpenSessions();
     std::string reportPluginMetrics() const;
     std::string reportFrameworkMetrics(const std::string& pluginMetrics) const;
-    status_t getPropertyStringInternal(String8 const& name, String8& value) const;
-    status_t getPropertyByteArrayInternal(String8 const& name, Vector<uint8_t>& value) const;
+    DrmStatus getPropertyStringInternal(String8 const& name, String8& value) const;
+    DrmStatus getPropertyByteArrayInternal(String8 const& name, Vector<uint8_t>& value) const;
     DISALLOW_EVIL_CONSTRUCTORS(DrmHalAidl);
 };
 
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
index 11f0608..73a9e1d 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
@@ -26,6 +26,7 @@
 #include <media/drm/DrmAPI.h>
 #include <mediadrm/DrmMetrics.h>
 #include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/DrmStatus.h>
 #include <mediadrm/IDrm.h>
 #include <mediadrm/IDrmClient.h>
 #include <mediadrm/IDrmMetricsConsumer.h>
@@ -63,24 +64,22 @@
     DrmHalHidl();
     virtual ~DrmHalHidl();
 
-    virtual status_t initCheck() const;
+    virtual DrmStatus initCheck() const;
 
-    virtual status_t isCryptoSchemeSupported(const uint8_t uuid[16],
-                                             const String8& mimeType,
-                                             DrmPlugin::SecurityLevel level,
-                                             bool *isSupported);
+    virtual DrmStatus isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                              DrmPlugin::SecurityLevel level, bool* isSupported);
 
-    virtual status_t createPlugin(const uint8_t uuid[16],
-                                  const String8 &appPackageName);
+    virtual DrmStatus createPlugin(const uint8_t uuid[16],
+                                   const String8 &appPackageName);
 
-    virtual status_t destroyPlugin();
+    virtual DrmStatus destroyPlugin();
 
-    virtual status_t openSession(DrmPlugin::SecurityLevel level,
+    virtual DrmStatus openSession(DrmPlugin::SecurityLevel level,
             Vector<uint8_t> &sessionId);
 
-    virtual status_t closeSession(Vector<uint8_t> const &sessionId);
+    virtual DrmStatus closeSession(Vector<uint8_t> const &sessionId);
 
-    virtual status_t
+    virtual DrmStatus
         getKeyRequest(Vector<uint8_t> const &sessionId,
                       Vector<uint8_t> const &initData,
                       String8 const &mimeType, DrmPlugin::KeyType keyType,
@@ -88,103 +87,104 @@
                       Vector<uint8_t> &request, String8 &defaultUrl,
                       DrmPlugin::KeyRequestType *keyRequestType);
 
-    virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
-                                        Vector<uint8_t> const &response,
-                                        Vector<uint8_t> &keySetId);
+    virtual DrmStatus provideKeyResponse(Vector<uint8_t> const &sessionId,
+                                         Vector<uint8_t> const &response,
+                                         Vector<uint8_t> &keySetId);
 
-    virtual status_t removeKeys(Vector<uint8_t> const &keySetId);
+    virtual DrmStatus removeKeys(Vector<uint8_t> const &keySetId);
 
-    virtual status_t restoreKeys(Vector<uint8_t> const &sessionId,
-                                 Vector<uint8_t> const &keySetId);
+    virtual DrmStatus restoreKeys(Vector<uint8_t> const &sessionId,
+                                  Vector<uint8_t> const &keySetId);
 
-    virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId,
-                                    KeyedVector<String8, String8> &infoMap) const;
+    virtual DrmStatus queryKeyStatus(Vector<uint8_t> const &sessionId,
+                                     KeyedVector<String8, String8> &infoMap) const;
 
-    virtual status_t getProvisionRequest(String8 const &certType,
-                                         String8 const &certAuthority,
-                                         Vector<uint8_t> &request,
-                                         String8 &defaultUrl);
+    virtual DrmStatus getProvisionRequest(String8 const &certType,
+                                          String8 const &certAuthority,
+                                          Vector<uint8_t> &request,
+                                          String8 &defaultUrl);
 
-    virtual status_t provideProvisionResponse(Vector<uint8_t> const &response,
-                                              Vector<uint8_t> &certificate,
-                                              Vector<uint8_t> &wrappedKey);
+    virtual DrmStatus provideProvisionResponse(Vector<uint8_t> const &response,
+                                               Vector<uint8_t> &certificate,
+                                               Vector<uint8_t> &wrappedKey);
 
-    virtual status_t getSecureStops(List<Vector<uint8_t>> &secureStops);
-    virtual status_t getSecureStopIds(List<Vector<uint8_t>> &secureStopIds);
-    virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
+    virtual DrmStatus getSecureStops(List<Vector<uint8_t>> &secureStops);
+    virtual DrmStatus getSecureStopIds(List<Vector<uint8_t>> &secureStopIds);
+    virtual DrmStatus getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
 
-    virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
-    virtual status_t removeSecureStop(Vector<uint8_t> const &ssid);
-    virtual status_t removeAllSecureStops();
+    virtual DrmStatus releaseSecureStops(Vector<uint8_t> const &ssRelease);
+    virtual DrmStatus removeSecureStop(Vector<uint8_t> const &ssid);
+    virtual DrmStatus removeAllSecureStops();
 
-    virtual status_t getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
+    virtual DrmStatus getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
             DrmPlugin::HdcpLevel *maxLevel) const;
-    virtual status_t getNumberOfSessions(uint32_t *currentSessions,
+    virtual DrmStatus getNumberOfSessions(uint32_t *currentSessions,
             uint32_t *maxSessions) const;
-    virtual status_t getSecurityLevel(Vector<uint8_t> const &sessionId,
+    virtual DrmStatus getSecurityLevel(Vector<uint8_t> const &sessionId,
             DrmPlugin::SecurityLevel *level) const;
 
-    virtual status_t getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const;
-    virtual status_t removeOfflineLicense(Vector<uint8_t> const &keySetId);
-    virtual status_t getOfflineLicenseState(Vector<uint8_t> const &keySetId,
+    virtual DrmStatus getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const;
+    virtual DrmStatus removeOfflineLicense(Vector<uint8_t> const &keySetId);
+    virtual DrmStatus getOfflineLicenseState(Vector<uint8_t> const &keySetId,
             DrmPlugin::OfflineLicenseState *licenseState) const;
 
-    virtual status_t getPropertyString(String8 const &name, String8 &value ) const;
-    virtual status_t getPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> &value ) const;
-    virtual status_t setPropertyString(String8 const &name, String8 const &value ) const;
-    virtual status_t setPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> const &value ) const;
-    virtual status_t getMetrics(const sp<IDrmMetricsConsumer> &consumer);
+    virtual DrmStatus getPropertyString(String8 const &name, String8 &value ) const;
+    virtual DrmStatus getPropertyByteArray(String8 const &name,
+                                           Vector<uint8_t> &value ) const;
+    virtual DrmStatus setPropertyString(String8 const &name, String8 const &value ) const;
+    virtual DrmStatus setPropertyByteArray(String8 const &name,
+                                           Vector<uint8_t> const &value ) const;
+    virtual DrmStatus getMetrics(const sp<IDrmMetricsConsumer> &consumer);
 
-    virtual status_t setCipherAlgorithm(Vector<uint8_t> const &sessionId,
-                                        String8 const &algorithm);
+    virtual DrmStatus setCipherAlgorithm(Vector<uint8_t> const &sessionId,
+                                         String8 const &algorithm);
 
-    virtual status_t setMacAlgorithm(Vector<uint8_t> const &sessionId,
-                                     String8 const &algorithm);
+    virtual DrmStatus setMacAlgorithm(Vector<uint8_t> const &sessionId,
+                                      String8 const &algorithm);
 
-    virtual status_t encrypt(Vector<uint8_t> const &sessionId,
+    virtual DrmStatus encrypt(Vector<uint8_t> const &sessionId,
+                              Vector<uint8_t> const &keyId,
+                              Vector<uint8_t> const &input,
+                              Vector<uint8_t> const &iv,
+                              Vector<uint8_t> &output);
+
+    virtual DrmStatus decrypt(Vector<uint8_t> const &sessionId,
+                              Vector<uint8_t> const &keyId,
+                              Vector<uint8_t> const &input,
+                              Vector<uint8_t> const &iv,
+                              Vector<uint8_t> &output);
+
+    virtual DrmStatus sign(Vector<uint8_t> const &sessionId,
+                           Vector<uint8_t> const &keyId,
+                           Vector<uint8_t> const &message,
+                           Vector<uint8_t> &signature);
+
+    virtual DrmStatus verify(Vector<uint8_t> const &sessionId,
                              Vector<uint8_t> const &keyId,
-                             Vector<uint8_t> const &input,
-                             Vector<uint8_t> const &iv,
-                             Vector<uint8_t> &output);
-
-    virtual status_t decrypt(Vector<uint8_t> const &sessionId,
-                             Vector<uint8_t> const &keyId,
-                             Vector<uint8_t> const &input,
-                             Vector<uint8_t> const &iv,
-                             Vector<uint8_t> &output);
-
-    virtual status_t sign(Vector<uint8_t> const &sessionId,
-                          Vector<uint8_t> const &keyId,
-                          Vector<uint8_t> const &message,
-                          Vector<uint8_t> &signature);
-
-    virtual status_t verify(Vector<uint8_t> const &sessionId,
-                            Vector<uint8_t> const &keyId,
-                            Vector<uint8_t> const &message,
-                            Vector<uint8_t> const &signature,
-                            bool &match);
-
-    virtual status_t signRSA(Vector<uint8_t> const &sessionId,
-                             String8 const &algorithm,
                              Vector<uint8_t> const &message,
-                             Vector<uint8_t> const &wrappedKey,
-                             Vector<uint8_t> &signature);
+                             Vector<uint8_t> const &signature,
+                             bool &match);
 
-    virtual status_t setListener(const sp<IDrmClient>& listener);
+    virtual DrmStatus signRSA(Vector<uint8_t> const &sessionId,
+                              String8 const &algorithm,
+                              Vector<uint8_t> const &message,
+                              Vector<uint8_t> const &wrappedKey,
+                              Vector<uint8_t> &signature);
 
-    virtual status_t requiresSecureDecoder(const char *mime, bool *required) const;
+    virtual DrmStatus setListener(const sp<IDrmClient>& listener);
 
-    virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
-                                           bool *required) const;
+    virtual DrmStatus requiresSecureDecoder(const char *mime, bool *required) const;
 
-    virtual status_t setPlaybackId(
+    virtual DrmStatus requiresSecureDecoder(const char *mime,
+                                            DrmPlugin::SecurityLevel securityLevel,
+                                            bool *required) const;
+
+    virtual DrmStatus setPlaybackId(
             Vector<uint8_t> const &sessionId,
             const char *playbackId);
 
-    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
-    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
+    virtual DrmStatus getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+    virtual DrmStatus getSupportedSchemes(std::vector<uint8_t> &schemes) const;
 
     // Methods of IDrmPluginListener
     Return<void> sendEvent(EventType eventType,
@@ -238,10 +238,10 @@
 
     std::string reportPluginMetrics() const;
     std::string reportFrameworkMetrics(const std::string& pluginMetrics) const;
-    status_t getPropertyStringInternal(String8 const &name, String8 &value) const;
-    status_t getPropertyByteArrayInternal(String8 const &name,
+    DrmStatus getPropertyStringInternal(String8 const &name, String8 &value) const;
+    DrmStatus getPropertyByteArrayInternal(String8 const &name,
                                           Vector<uint8_t> &value) const;
-    status_t matchMimeTypeAndSecurityLevel(const sp<IDrmFactory> &factory,
+    DrmStatus matchMimeTypeAndSecurityLevel(const sp<IDrmFactory> &factory,
                                            const uint8_t uuid[16],
                                            const String8 &mimeType,
                                            DrmPlugin::SecurityLevel level,
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalListener.h b/drm/libmediadrm/include/mediadrm/DrmHalListener.h
index 22361ad..0eed929 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalListener.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalListener.h
@@ -27,7 +27,7 @@
 
 namespace android {
 struct DrmHalListener : public BnDrmPluginListener {
-    explicit DrmHalListener(MediaDrmMetrics* mMetrics);
+    explicit DrmHalListener(const std::shared_ptr<MediaDrmMetrics>& in_metrics);
     ~DrmHalListener();
     ::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
                                  const std::vector<uint8_t>& in_sessionId,
@@ -38,9 +38,9 @@
                                       const std::vector<KeyStatusAidl>& in_keyStatusList,
                                       bool in_hasNewUsableKey);
     ::ndk::ScopedAStatus onSessionLostState(const std::vector<uint8_t>& in_sessionId);
-    void setListener(sp<IDrmClient> listener);
+    void setListener(const sp<IDrmClient>& listener);
 private:
-    mutable MediaDrmMetrics* mMetrics;
+    std::shared_ptr<MediaDrmMetrics> mMetrics;
     sp<IDrmClient> mListener;
     mutable Mutex mEventLock;
     mutable Mutex mNotifyLock;
diff --git a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
new file mode 100644
index 0000000..e72f7f7
--- /dev/null
+++ b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
@@ -0,0 +1,229 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediadrm/DrmStatus.h>
+#include <mediadrm/IDrm.h>
+#include <sys/random.h>
+#include <map>
+#include <mutex>
+
+#ifndef DRM_METRICS_LOGGER_H
+#define DRM_METRICS_LOGGER_H
+
+namespace android {
+
+// Keep enums in sync with frameworks/proto_logging/stats/enums/media/drm/enums.proto
+
+enum {
+    ENUM_DRM_UNKNOWN = 0,
+    ENUM_DRM_NO_LICENSE = 1,
+    ENUM_DRM_LICENSE_EXPIRED = 2,
+    ENUM_DRM_RESOURCE_BUSY = 3,
+    ENUM_DRM_INSUFFICIENT_OUTPUT_PROTECTION = 4,
+    ENUM_DRM_SESSION_NOT_OPENED = 5,
+    ENUM_DRM_CANNOT_HANDLE = 6,
+    ENUM_DRM_INSUFFICIENT_SECURITY = 7,
+    ENUM_DRM_FRAME_TOO_LARGE = 8,
+    ENUM_DRM_SESSION_LOST_STATE = 9,
+    ENUM_DRM_CERTIFICATE_MALFORMED = 10,
+    ENUM_DRM_CERTIFICATE_MISSING = 11,
+    ENUM_DRM_CRYPTO_LIBRARY = 12,
+    ENUM_DRM_GENERIC_OEM = 13,
+    ENUM_DRM_GENERIC_PLUGIN = 14,
+    ENUM_DRM_INIT_DATA = 15,
+    ENUM_DRM_KEY_NOT_LOADED = 16,
+    ENUM_DRM_LICENSE_PARSE = 17,
+    ENUM_DRM_LICENSE_POLICY = 18,
+    ENUM_DRM_LICENSE_RELEASE = 19,
+    ENUM_DRM_LICENSE_REQUEST_REJECTED = 20,
+    ENUM_DRM_LICENSE_RESTORE = 21,
+    ENUM_DRM_LICENSE_STATE = 22,
+    ENUM_DRM_MEDIA_FRAMEWORK = 23,
+    ENUM_DRM_PROVISIONING_CERTIFICATE = 24,
+    ENUM_DRM_PROVISIONING_CONFIG = 25,
+    ENUM_DRM_PROVISIONING_PARSE = 26,
+    ENUM_DRM_PROVISIONING_REQUEST_REJECTED = 27,
+    ENUM_DRM_PROVISIONING_RETRY = 28,
+    ENUM_DRM_RESOURCE_CONTENTION = 29,
+    ENUM_DRM_SECURE_STOP_RELEASE = 30,
+    ENUM_DRM_STORAGE_READ = 31,
+    ENUM_DRM_STORAGE_WRITE = 32,
+    ENUM_DRM_ZERO_SUBSAMPLES = 33,
+    ENUM_DRM_INVALID_STATE = 34,
+    ENUM_BAD_VALUE = 35,
+    ENUM_DRM_NOT_PROVISIONED = 36,
+    ENUM_DRM_DEVICE_REVOKED = 37,
+    ENUM_DRM_DECRYPT = 38,
+    ENUM_DEAD_OBJECT = 39,
+};
+
+enum {
+    JSecurityLevelUnknown = 0,
+    JSecurityLevelSwSecureCrypto = 1,
+    JSecurityLevelSwSecureDecode = 2,
+    JSecurityLevelHwSecureCrypto = 3,
+    JSecurityLevelHwSecureDecode = 4,
+    JSecurityLevelHwSecureAll = 5,
+    JSecurityLevelMax = 6,
+};
+
+struct SessionContext {
+    std::string mNonce;
+    DrmPlugin::SecurityLevel mTargetSecurityLevel;
+    DrmPlugin::SecurityLevel mActualSecurityLevel;
+    std::string mVersion;
+};
+
+class DrmMetricsLogger : public IDrm {
+  public:
+    DrmMetricsLogger(IDrmFrontend);
+
+    virtual ~DrmMetricsLogger();
+
+    virtual DrmStatus initCheck() const;
+
+    virtual DrmStatus isCryptoSchemeSupported(const uint8_t uuid[IDRM_UUID_SIZE],
+                                              const String8& mimeType,
+                                              DrmPlugin::SecurityLevel securityLevel,
+                                              bool* result);
+
+    virtual DrmStatus createPlugin(const uint8_t uuid[IDRM_UUID_SIZE],
+                                   const String8& appPackageName);
+
+    virtual DrmStatus destroyPlugin();
+
+    virtual DrmStatus openSession(DrmPlugin::SecurityLevel securityLevel,
+                                  Vector<uint8_t>& sessionId);
+
+    virtual DrmStatus closeSession(Vector<uint8_t> const& sessionId);
+
+    virtual DrmStatus getKeyRequest(Vector<uint8_t> const& sessionId,
+                                    Vector<uint8_t> const& initData, String8 const& mimeType,
+                                    DrmPlugin::KeyType keyType,
+                                    KeyedVector<String8, String8> const& optionalParameters,
+                                    Vector<uint8_t>& request, String8& defaultUrl,
+                                    DrmPlugin::KeyRequestType* keyRequestType);
+
+    virtual DrmStatus provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                         Vector<uint8_t> const& response,
+                                         Vector<uint8_t>& keySetId);
+
+    virtual DrmStatus removeKeys(Vector<uint8_t> const& keySetId);
+
+    virtual DrmStatus restoreKeys(Vector<uint8_t> const& sessionId,
+                                  Vector<uint8_t> const& keySetId);
+
+    virtual DrmStatus queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                     KeyedVector<String8, String8>& infoMap) const;
+
+    virtual DrmStatus getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                          Vector<uint8_t>& request, String8& defaultUrl);
+
+    virtual DrmStatus provideProvisionResponse(Vector<uint8_t> const& response,
+                                               Vector<uint8_t>& certificate,
+                                               Vector<uint8_t>& wrappedKey);
+
+    virtual DrmStatus getSecureStops(List<Vector<uint8_t>>& secureStops);
+    virtual DrmStatus getSecureStopIds(List<Vector<uint8_t>>& secureStopIds);
+    virtual DrmStatus getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop);
+
+    virtual DrmStatus releaseSecureStops(Vector<uint8_t> const& ssRelease);
+    virtual DrmStatus removeSecureStop(Vector<uint8_t> const& ssid);
+    virtual DrmStatus removeAllSecureStops();
+
+    virtual DrmStatus getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
+                                    DrmPlugin::HdcpLevel* maxLevel) const;
+    virtual DrmStatus getNumberOfSessions(uint32_t* currentSessions, uint32_t* maxSessions) const;
+    virtual DrmStatus getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                       DrmPlugin::SecurityLevel* level) const;
+
+    virtual DrmStatus getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const;
+    virtual DrmStatus removeOfflineLicense(Vector<uint8_t> const& keySetId);
+    virtual DrmStatus getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                             DrmPlugin::OfflineLicenseState* licenseState) const;
+
+    virtual DrmStatus getPropertyString(String8 const& name, String8& value) const;
+    virtual DrmStatus getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const;
+    virtual DrmStatus setPropertyString(String8 const& name, String8 const& value) const;
+    virtual DrmStatus setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const;
+
+    virtual DrmStatus getMetrics(const sp<IDrmMetricsConsumer>& consumer);
+
+    virtual DrmStatus setCipherAlgorithm(Vector<uint8_t> const& sessionId,
+                                         String8 const& algorithm);
+
+    virtual DrmStatus setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm);
+
+    virtual DrmStatus encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output);
+
+    virtual DrmStatus decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                              Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                              Vector<uint8_t>& output);
+
+    virtual DrmStatus sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                           Vector<uint8_t> const& message, Vector<uint8_t>& signature);
+
+    virtual DrmStatus verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                             bool& match);
+
+    virtual DrmStatus signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                              Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                              Vector<uint8_t>& signature);
+
+    virtual DrmStatus setListener(const sp<IDrmClient>& listener);
+
+    virtual DrmStatus requiresSecureDecoder(const char* mime, bool* required) const;
+
+    virtual DrmStatus requiresSecureDecoder(const char* mime,
+                                            DrmPlugin::SecurityLevel securityLevel,
+                                            bool* required) const;
+
+    virtual DrmStatus setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId);
+
+    virtual DrmStatus getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
+
+    virtual DrmStatus getSupportedSchemes(std::vector<uint8_t>& schemes) const;
+
+    void reportMediaDrmCreated() const;
+
+    void reportMediaDrmSessionOpened(const std::vector<uint8_t>& sessionId) const;
+
+    void reportMediaDrmErrored(
+            const DrmStatus& error_code, const char* api,
+            const std::vector<uint8_t>& sessionId = std::vector<uint8_t>()) const;
+
+    DrmStatus generateNonce(std::string* out, size_t size, const char* api);
+
+  private:
+    static const size_t kNonceSize = 16;
+    static const std::map<std::array<int64_t, 2>, std::string> kUuidSchemeMap;
+    sp<IDrm> mImpl;
+    std::array<int64_t, 2> mUuid;
+    std::string mObjNonce;
+    std::string mScheme;
+    std::string mVersion;
+    std::map<std::vector<uint8_t>, SessionContext> mSessionMap;
+    mutable std::mutex mSessionMapMutex;
+    IDrmFrontend mFrontend;
+    DISALLOW_EVIL_CONSTRUCTORS(DrmMetricsLogger);
+};
+
+}  // namespace android
+
+#endif  // DRM_METRICS_LOGGER_H
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/IDrm.h b/drm/libmediadrm/include/mediadrm/IDrm.h
index ee2be6a..ff5f63b 100644
--- a/drm/libmediadrm/include/mediadrm/IDrm.h
+++ b/drm/libmediadrm/include/mediadrm/IDrm.h
@@ -16,6 +16,7 @@
 
 #include <media/stagefright/foundation/ABase.h>
 #include <media/drm/DrmAPI.h>
+#include <mediadrm/DrmStatus.h>
 #include <mediadrm/IDrmClient.h>
 #include <mediadrm/IDrmMetricsConsumer.h>
 
@@ -23,6 +24,8 @@
 
 #define ANDROID_IDRM_H_
 
+#define IDRM_UUID_SIZE (16)
+
 namespace android {
 namespace hardware {
 namespace drm {
@@ -32,6 +35,13 @@
 }  // namespace drm
 }  // namespace hardware
 
+enum IDrmFrontend : int32_t {
+    IDRM_UNKNOWN = 0,
+    IDRM_JNI = 1,
+    IDRM_NDK = 2,
+    IDRM_NUPLAYER = 3,
+};
+
 namespace drm = ::android::hardware::drm;
 
 struct AString;
@@ -40,24 +50,23 @@
 
     virtual ~IDrm() {}
 
-    virtual status_t initCheck() const = 0;
+    virtual DrmStatus initCheck() const = 0;
 
-    virtual status_t isCryptoSchemeSupported(const uint8_t uuid[16],
-                                             const String8 &mimeType,
-                                             DrmPlugin::SecurityLevel securityLevel,
-                                             bool *result) = 0;
+    virtual DrmStatus isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                              DrmPlugin::SecurityLevel securityLevel,
+                                              bool* result) = 0;
 
-    virtual status_t createPlugin(const uint8_t uuid[16],
-                                  const String8 &appPackageName) = 0;
+    virtual DrmStatus createPlugin(const uint8_t uuid[16],
+                                   const String8 &appPackageName) = 0;
 
-    virtual status_t destroyPlugin() = 0;
+    virtual DrmStatus destroyPlugin() = 0;
 
-    virtual status_t openSession(DrmPlugin::SecurityLevel securityLevel,
+    virtual DrmStatus openSession(DrmPlugin::SecurityLevel securityLevel,
             Vector<uint8_t> &sessionId) = 0;
 
-    virtual status_t closeSession(Vector<uint8_t> const &sessionId) = 0;
+    virtual DrmStatus closeSession(Vector<uint8_t> const &sessionId) = 0;
 
-    virtual status_t
+    virtual DrmStatus
         getKeyRequest(Vector<uint8_t> const &sessionId,
                       Vector<uint8_t> const &initData,
                       String8 const &mimeType, DrmPlugin::KeyType keyType,
@@ -65,107 +74,108 @@
                       Vector<uint8_t> &request, String8 &defaultUrl,
                       DrmPlugin::KeyRequestType *keyRequestType) = 0;
 
-    virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
-                                        Vector<uint8_t> const &response,
-                                        Vector<uint8_t> &keySetId) = 0;
+    virtual DrmStatus provideKeyResponse(Vector<uint8_t> const &sessionId,
+                                         Vector<uint8_t> const &response,
+                                         Vector<uint8_t> &keySetId) = 0;
 
-    virtual status_t removeKeys(Vector<uint8_t> const &keySetId) = 0;
+    virtual DrmStatus removeKeys(Vector<uint8_t> const &keySetId) = 0;
 
-    virtual status_t restoreKeys(Vector<uint8_t> const &sessionId,
-                                 Vector<uint8_t> const &keySetId) = 0;
+    virtual DrmStatus restoreKeys(Vector<uint8_t> const &sessionId,
+                                  Vector<uint8_t> const &keySetId) = 0;
 
-    virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId,
-                                    KeyedVector<String8, String8> &infoMap) const = 0;
+    virtual DrmStatus queryKeyStatus(Vector<uint8_t> const &sessionId,
+                                     KeyedVector<String8, String8> &infoMap) const = 0;
 
-    virtual status_t getProvisionRequest(String8 const &certType,
-                                         String8 const &certAuthority,
-                                         Vector<uint8_t> &request,
-                                         String8 &defaulUrl) = 0;
+    virtual DrmStatus getProvisionRequest(String8 const &certType,
+                                          String8 const &certAuthority,
+                                          Vector<uint8_t> &request,
+                                          String8 &defaultUrl) = 0;
 
-    virtual status_t provideProvisionResponse(Vector<uint8_t> const &response,
-                                              Vector<uint8_t> &certificate,
-                                              Vector<uint8_t> &wrappedKey) = 0;
+    virtual DrmStatus provideProvisionResponse(Vector<uint8_t> const &response,
+                                               Vector<uint8_t> &certificate,
+                                               Vector<uint8_t> &wrappedKey) = 0;
 
-    virtual status_t getSecureStops(List<Vector<uint8_t>> &secureStops) = 0;
-    virtual status_t getSecureStopIds(List<Vector<uint8_t>> &secureStopIds) = 0;
-    virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) = 0;
+    virtual DrmStatus getSecureStops(List<Vector<uint8_t>> &secureStops) = 0;
+    virtual DrmStatus getSecureStopIds(List<Vector<uint8_t>> &secureStopIds) = 0;
+    virtual DrmStatus getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) = 0;
 
-    virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease) = 0;
-    virtual status_t removeSecureStop(Vector<uint8_t> const &ssid) = 0;
-    virtual status_t removeAllSecureStops() = 0;
+    virtual DrmStatus releaseSecureStops(Vector<uint8_t> const &ssRelease) = 0;
+    virtual DrmStatus removeSecureStop(Vector<uint8_t> const &ssid) = 0;
+    virtual DrmStatus removeAllSecureStops() = 0;
 
-    virtual status_t getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
+    virtual DrmStatus getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
             DrmPlugin::HdcpLevel *maxLevel)
             const = 0;
-    virtual status_t getNumberOfSessions(uint32_t *currentSessions,
+    virtual DrmStatus getNumberOfSessions(uint32_t *currentSessions,
             uint32_t *maxSessions) const = 0;
-    virtual status_t getSecurityLevel(Vector<uint8_t> const &sessionId,
+    virtual DrmStatus getSecurityLevel(Vector<uint8_t> const &sessionId,
             DrmPlugin::SecurityLevel *level) const = 0;
 
-    virtual status_t getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const = 0;
-    virtual status_t removeOfflineLicense(Vector<uint8_t> const &keySetId) = 0;
-    virtual status_t getOfflineLicenseState(Vector<uint8_t> const &keySetId,
+    virtual DrmStatus getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const = 0;
+    virtual DrmStatus removeOfflineLicense(Vector<uint8_t> const &keySetId) = 0;
+    virtual DrmStatus getOfflineLicenseState(Vector<uint8_t> const &keySetId,
             DrmPlugin::OfflineLicenseState *licenseState) const = 0;
 
-    virtual status_t getPropertyString(String8 const &name, String8 &value) const = 0;
-    virtual status_t getPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> &value) const = 0;
-    virtual status_t setPropertyString(String8 const &name,
-                                       String8 const &value ) const = 0;
-    virtual status_t setPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> const &value) const = 0;
+    virtual DrmStatus getPropertyString(String8 const &name, String8 &value) const = 0;
+    virtual DrmStatus getPropertyByteArray(String8 const &name,
+                                           Vector<uint8_t> &value) const = 0;
+    virtual DrmStatus setPropertyString(String8 const &name,
+                                        String8 const &value ) const = 0;
+    virtual DrmStatus setPropertyByteArray(String8 const &name,
+                                           Vector<uint8_t> const &value) const = 0;
 
-    virtual status_t getMetrics(const sp<IDrmMetricsConsumer> &consumer) = 0;
+    virtual DrmStatus getMetrics(const sp<IDrmMetricsConsumer> &consumer) = 0;
 
-    virtual status_t setCipherAlgorithm(Vector<uint8_t> const &sessionId,
-                                        String8 const &algorithm) = 0;
+    virtual DrmStatus setCipherAlgorithm(Vector<uint8_t> const &sessionId,
+                                         String8 const &algorithm) = 0;
 
-    virtual status_t setMacAlgorithm(Vector<uint8_t> const &sessionId,
-                                     String8 const &algorithm) = 0;
+    virtual DrmStatus setMacAlgorithm(Vector<uint8_t> const &sessionId,
+                                      String8 const &algorithm) = 0;
 
-    virtual status_t encrypt(Vector<uint8_t> const &sessionId,
+    virtual DrmStatus encrypt(Vector<uint8_t> const &sessionId,
+                              Vector<uint8_t> const &keyId,
+                              Vector<uint8_t> const &input,
+                              Vector<uint8_t> const &iv,
+                              Vector<uint8_t> &output) = 0;
+
+    virtual DrmStatus decrypt(Vector<uint8_t> const &sessionId,
+                              Vector<uint8_t> const &keyId,
+                              Vector<uint8_t> const &input,
+                              Vector<uint8_t> const &iv,
+                              Vector<uint8_t> &output) = 0;
+
+    virtual DrmStatus sign(Vector<uint8_t> const &sessionId,
+                           Vector<uint8_t> const &keyId,
+                           Vector<uint8_t> const &message,
+                           Vector<uint8_t> &signature) = 0;
+
+    virtual DrmStatus verify(Vector<uint8_t> const &sessionId,
                              Vector<uint8_t> const &keyId,
-                             Vector<uint8_t> const &input,
-                             Vector<uint8_t> const &iv,
-                             Vector<uint8_t> &output) = 0;
-
-    virtual status_t decrypt(Vector<uint8_t> const &sessionId,
-                             Vector<uint8_t> const &keyId,
-                             Vector<uint8_t> const &input,
-                             Vector<uint8_t> const &iv,
-                             Vector<uint8_t> &output) = 0;
-
-    virtual status_t sign(Vector<uint8_t> const &sessionId,
-                          Vector<uint8_t> const &keyId,
-                          Vector<uint8_t> const &message,
-                          Vector<uint8_t> &signature) = 0;
-
-    virtual status_t verify(Vector<uint8_t> const &sessionId,
-                            Vector<uint8_t> const &keyId,
-                            Vector<uint8_t> const &message,
-                            Vector<uint8_t> const &signature,
-                            bool &match) = 0;
-
-    virtual status_t signRSA(Vector<uint8_t> const &sessionId,
-                             String8 const &algorithm,
                              Vector<uint8_t> const &message,
-                             Vector<uint8_t> const &wrappedKey,
-                             Vector<uint8_t> &signature) = 0;
+                             Vector<uint8_t> const &signature,
+                             bool &match) = 0;
 
-    virtual status_t setListener(const sp<IDrmClient>& listener) = 0;
+    virtual DrmStatus signRSA(Vector<uint8_t> const &sessionId,
+                              String8 const &algorithm,
+                              Vector<uint8_t> const &message,
+                              Vector<uint8_t> const &wrappedKey,
+                              Vector<uint8_t> &signature) = 0;
 
-    virtual status_t requiresSecureDecoder(const char *mime, bool *required) const = 0;
+    virtual DrmStatus setListener(const sp<IDrmClient>& listener) = 0;
 
-    virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
-                                           bool *required) const = 0;
+    virtual DrmStatus requiresSecureDecoder(const char *mime, bool *required) const = 0;
 
-    virtual status_t setPlaybackId(
+    virtual DrmStatus requiresSecureDecoder(const char *mime,
+                                            DrmPlugin::SecurityLevel securityLevel,
+                                            bool *required) const = 0;
+
+    virtual DrmStatus setPlaybackId(
             Vector<uint8_t> const &sessionId,
             const char *playbackId) = 0;
 
-    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const = 0;
+    virtual DrmStatus getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const = 0;
 
-    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const = 0;
+    virtual DrmStatus getSupportedSchemes(std::vector<uint8_t> &schemes) const = 0;
 
 protected:
     IDrm() {}
diff --git a/drm/libmediadrm/interface/mediadrm/DrmStatus.h b/drm/libmediadrm/interface/mediadrm/DrmStatus.h
new file mode 100644
index 0000000..15826ca
--- /dev/null
+++ b/drm/libmediadrm/interface/mediadrm/DrmStatus.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_STATUS_
+#define DRM_STATUS_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/MediaErrors.h>
+#include <utils/Errors.h>
+
+#include <stdint.h>
+#include <string>
+
+namespace android {
+
+struct DrmStatus {
+  public:
+    DrmStatus(status_t status, int32_t cdmErr = 0, int32_t oemErr = 0,
+              int32_t ctx = 0, std::string errMsg = "")
+        : mStatus(status), mCdmErr(cdmErr), mOemErr(oemErr),
+          mCtx(ctx), mErrMsg(errMsg) {}
+    DrmStatus(status_t err, const char *msg);
+    operator status_t() const { return mStatus; }
+    int32_t getCdmErr() const { return mCdmErr; }
+    int32_t getOemErr() const { return mOemErr; }
+    int32_t getContext() const { return mCtx; }
+    std::string getErrorMessage() const { return mErrMsg; }
+    bool operator==(status_t other) const { return mStatus == other; }
+    bool operator!=(status_t other) const { return mStatus != other; }
+
+  private:
+    status_t mStatus{};
+    int32_t mCdmErr{}, mOemErr{}, mCtx{};
+    std::string mErrMsg;
+};
+
+}  // namespace android
+
+#endif  // DRM_STATUS_
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 980ce55..2510f4e 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -22,6 +22,8 @@
 #include <android/hardware/drm/1.4/IDrmPlugin.h>
 #include <android/hardware/drm/1.4/types.h>
 #include <media/stagefright/MediaErrors.h>
+#include <mediadrm/DrmMetricsLogger.h>
+#include <mediadrm/DrmStatus.h>
 #include <utils/Errors.h>  // for status_t
 #include <utils/Log.h>
 #include <utils/String8.h>
@@ -35,6 +37,7 @@
 #include <ctime>
 #include <deque>
 #include <endian.h>
+#include <inttypes.h>
 #include <iterator>
 #include <mutex>
 #include <string>
@@ -103,9 +106,9 @@
 void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
     uint64_t uuid2[2] = {};
     std::memcpy(uuid2, uuid, sizeof(uuid2));
-    std::string uuidFmt("uuid=[%lx %lx] ");
+    std::string uuidFmt("uuid=[%" PRIx64 " %" PRIx64 "] ");
     uuidFmt += fmt;
-    LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
+    LogToBuffer(level, uuidFmt.c_str(), betoh64(uuid2[0]), betoh64(uuid2[1]), args...);
 }
 
 #ifndef LOG2BE
@@ -118,7 +121,7 @@
 
 bool UseDrmService();
 
-sp<IDrm> MakeDrm(status_t *pstatus = nullptr);
+sp<IDrm> MakeDrm(IDrmFrontend frontend = IDRM_JNI, status_t* pstatus = nullptr);
 
 sp<ICrypto> MakeCrypto(status_t *pstatus = nullptr);
 
@@ -198,98 +201,7 @@
     return toStatusT_1_4(err);
 }
 
-inline status_t statusAidlToStatusT(::ndk::ScopedAStatus &statusAidl) {
-    if (statusAidl.isOk()) return OK;
-    if (statusAidl.getExceptionCode() != EX_SERVICE_SPECIFIC) return DEAD_OBJECT;
-    auto status = static_cast<StatusAidl>(statusAidl.getServiceSpecificError());
-    switch (status) {
-    case StatusAidl::OK:
-        return OK;
-    case StatusAidl::BAD_VALUE:
-        return BAD_VALUE;
-    case StatusAidl::ERROR_DRM_CANNOT_HANDLE:
-        return ERROR_DRM_CANNOT_HANDLE;
-    case StatusAidl::ERROR_DRM_DECRYPT:
-        return ERROR_DRM_DECRYPT;
-    case StatusAidl::ERROR_DRM_DEVICE_REVOKED:
-        return ERROR_DRM_DEVICE_REVOKED;
-    case StatusAidl::ERROR_DRM_FRAME_TOO_LARGE:
-        return ERROR_DRM_FRAME_TOO_LARGE;
-    case StatusAidl::ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION:
-        return ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION;
-    case StatusAidl::ERROR_DRM_INSUFFICIENT_SECURITY:
-        return ERROR_DRM_INSUFFICIENT_SECURITY;
-    case StatusAidl::ERROR_DRM_INVALID_STATE:
-        return ERROR_DRM_INVALID_STATE;
-    case StatusAidl::ERROR_DRM_LICENSE_EXPIRED:
-        return ERROR_DRM_LICENSE_EXPIRED;
-    case StatusAidl::ERROR_DRM_NO_LICENSE:
-        return ERROR_DRM_NO_LICENSE;
-    case StatusAidl::ERROR_DRM_NOT_PROVISIONED:
-        return ERROR_DRM_NOT_PROVISIONED;
-    case StatusAidl::ERROR_DRM_RESOURCE_BUSY:
-        return ERROR_DRM_RESOURCE_BUSY;
-    case StatusAidl::ERROR_DRM_RESOURCE_CONTENTION:
-        return ERROR_DRM_RESOURCE_CONTENTION;
-    case StatusAidl::ERROR_DRM_SESSION_LOST_STATE:
-        return ERROR_DRM_SESSION_LOST_STATE;
-    case StatusAidl::ERROR_DRM_SESSION_NOT_OPENED:
-        return ERROR_DRM_SESSION_NOT_OPENED;
-
-    // New in S / drm@1.4:
-    case StatusAidl::CANNOT_DECRYPT_ZERO_SUBSAMPLES:
-        return ERROR_DRM_ZERO_SUBSAMPLES;
-    case StatusAidl::CRYPTO_LIBRARY_ERROR:
-        return ERROR_DRM_CRYPTO_LIBRARY;
-    case StatusAidl::GENERAL_OEM_ERROR:
-        return ERROR_DRM_GENERIC_OEM;
-    case StatusAidl::GENERAL_PLUGIN_ERROR:
-        return ERROR_DRM_GENERIC_PLUGIN;
-    case StatusAidl::INIT_DATA_INVALID:
-        return ERROR_DRM_INIT_DATA;
-    case StatusAidl::KEY_NOT_LOADED:
-        return ERROR_DRM_KEY_NOT_LOADED;
-    case StatusAidl::LICENSE_PARSE_ERROR:
-        return ERROR_DRM_LICENSE_PARSE;
-    case StatusAidl::LICENSE_POLICY_ERROR:
-        return ERROR_DRM_LICENSE_POLICY;
-    case StatusAidl::LICENSE_RELEASE_ERROR:
-        return ERROR_DRM_LICENSE_RELEASE;
-    case StatusAidl::LICENSE_REQUEST_REJECTED:
-        return ERROR_DRM_LICENSE_REQUEST_REJECTED;
-    case StatusAidl::LICENSE_RESTORE_ERROR:
-        return ERROR_DRM_LICENSE_RESTORE;
-    case StatusAidl::LICENSE_STATE_ERROR:
-        return ERROR_DRM_LICENSE_STATE;
-    case StatusAidl::MALFORMED_CERTIFICATE:
-        return ERROR_DRM_CERTIFICATE_MALFORMED;
-    case StatusAidl::MEDIA_FRAMEWORK_ERROR:
-        return ERROR_DRM_MEDIA_FRAMEWORK;
-    case StatusAidl::MISSING_CERTIFICATE:
-        return ERROR_DRM_CERTIFICATE_MISSING;
-    case StatusAidl::PROVISIONING_CERTIFICATE_ERROR:
-        return ERROR_DRM_PROVISIONING_CERTIFICATE;
-    case StatusAidl::PROVISIONING_CONFIGURATION_ERROR:
-        return ERROR_DRM_PROVISIONING_CONFIG;
-    case StatusAidl::PROVISIONING_PARSE_ERROR:
-        return ERROR_DRM_PROVISIONING_PARSE;
-    case StatusAidl::PROVISIONING_REQUEST_REJECTED:
-        return ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
-    case StatusAidl::RETRYABLE_PROVISIONING_ERROR:
-        return ERROR_DRM_PROVISIONING_RETRY;
-    case StatusAidl::SECURE_STOP_RELEASE_ERROR:
-        return ERROR_DRM_SECURE_STOP_RELEASE;
-    case StatusAidl::STORAGE_READ_FAILURE:
-        return ERROR_DRM_STORAGE_READ;
-    case StatusAidl::STORAGE_WRITE_FAILURE:
-        return ERROR_DRM_STORAGE_WRITE;
-
-    case StatusAidl::ERROR_DRM_UNKNOWN:
-    default:
-        return ERROR_DRM_UNKNOWN;
-    }
-    return ERROR_DRM_UNKNOWN;
-}
+DrmStatus statusAidlToDrmStatus(::ndk::ScopedAStatus& statusAidl);
 
 template<typename T, typename U>
 status_t GetLogMessagesAidl(const std::shared_ptr<U> &obj, Vector<::V1_4::LogMessage> &logs) {
@@ -370,17 +282,19 @@
               });
 
     logs.appendVector(allLogs);
-    return OK;
+    return toStatusT(err);
 }
 
-std::string GetExceptionMessage(status_t err, const char *msg,
+std::string GetExceptionMessage(const DrmStatus & err, const char *defaultMsg,
                                 const Vector<::V1_4::LogMessage> &logs);
 
 template<typename T>
-std::string GetExceptionMessage(status_t err, const char *msg, const sp<T> &iface) {
+std::string GetExceptionMessage(const DrmStatus &err, const char *defaultMsg, const sp<T> &iface) {
     Vector<::V1_4::LogMessage> logs;
-    iface->getLogMessages(logs);
-    return GetExceptionMessage(err, msg, logs);
+    if (iface != NULL) {
+        iface->getLogMessages(logs);
+    }
+    return GetExceptionMessage(err, defaultMsg, logs);
 }
 
 } // namespace DrmUtils
diff --git a/drm/libmediadrm/interface/mediadrm/ICrypto.h b/drm/libmediadrm/interface/mediadrm/ICrypto.h
index 2c4df60..4087186 100644
--- a/drm/libmediadrm/interface/mediadrm/ICrypto.h
+++ b/drm/libmediadrm/interface/mediadrm/ICrypto.h
@@ -17,6 +17,7 @@
 #include <cutils/native_handle.h>
 #include <media/hardware/CryptoAPI.h>
 #include <media/stagefright/foundation/ABase.h>
+#include <mediadrm/DrmStatus.h>
 #include <utils/RefBase.h>
 #include <utils/StrongPointer.h>
 
@@ -65,7 +66,7 @@
 
     virtual void notifyResolution(uint32_t width, uint32_t height) = 0;
 
-    virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId) = 0;
+    virtual DrmStatus setMediaDrmSession(const Vector<uint8_t> &sessionId) = 0;
 
     enum DestinationType {
         kDestinationTypeSharedMemory, // non-secure
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 2d1f741..eaf5051 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -37,6 +37,7 @@
     static_libs: [
         "android.hardware.common-V2-ndk",
         "libclearkeybase",
+        "libjsoncpp",
     ],
 
     local_include_dirs: ["include"],
@@ -69,3 +70,56 @@
         "android.hardware.drm-service.clearkey",
     ],
 }
+
+cc_defaults {
+    name: "fuzz_aidl_clearkey_service_defaults",
+
+    srcs: [
+        "CreatePluginFactories.cpp",
+        "CryptoPlugin.cpp",
+        "DrmFactory.cpp",
+        "DrmPlugin.cpp",
+    ],
+
+    relative_install_path: "hw",
+
+    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+
+    include_dirs: ["frameworks/av/include"],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcrypto",
+        "liblog",
+        "libprotobuf-cpp-lite",
+        "libutils",
+        "android.hardware.drm-V1-ndk",
+    ],
+
+    static_libs: [
+        "android.hardware.common-V2-ndk",
+        "libclearkeybase_fuzz",
+        "libjsoncpp",
+    ],
+
+    local_include_dirs: ["include"],
+
+    sanitize: {
+        integer_overflow: true,
+    },
+}
+
+cc_fuzz {
+    name: "android.hardware.drm-service.clearkey.aidl_fuzzer",
+    defaults: [
+        "fuzz_aidl_clearkey_service_defaults",
+        "service_fuzzer_defaults",
+    ],
+    srcs: ["fuzzer.cpp"],
+    fuzz_config: {
+        cc: [
+            "hamzeh@google.com",
+        ],
+    },
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
index 201cf02..a63471f 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
@@ -137,6 +137,8 @@
         *_aidl_return = static_cast<ssize_t>(offset);
         return toNdkScopedAStatus(Status::OK);
     } else if (in_args.mode == Mode::AES_CTR) {
+        if (!mSession) return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE,
+                    "session not found");
         size_t bytesDecrypted{};
         std::vector<int32_t> clearDataLengths;
         std::vector<int32_t> encryptedDataLengths;
@@ -144,6 +146,12 @@
             clearDataLengths.push_back(ss.numBytesOfClearData);
             encryptedDataLengths.push_back(ss.numBytesOfEncryptedData);
         }
+        if (in_args.keyId.size() != kBlockSize || in_args.iv.size() != kBlockSize) {
+            android_errorWriteLog(0x534e4554, "244569759");
+            detailedError = "invalid decrypt parameter size";
+            return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+        }
+
         auto res =
                 mSession->decrypt(in_args.keyId.data(), in_args.iv.data(),
                                   srcPtr, static_cast<uint8_t*>(destPtr),
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index ea51e9d..31cb7c0 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -16,6 +16,7 @@
 #define LOG_TAG "clearkey-DrmPlugin"
 
 #include <aidl/android/hardware/drm/DrmMetric.h>
+#include <android-base/parseint.h>
 #include <utils/Log.h>
 
 #include <inttypes.h>
@@ -83,12 +84,14 @@
 void DrmPlugin::initProperties() {
     mStringProperties.clear();
     mStringProperties[kVendorKey] = kAidlVendorValue;
-    mStringProperties[kVersionKey] = kAidlVersionValue;
+    mStringProperties[kVersionKey] = kVersionValue;
     mStringProperties[kPluginDescriptionKey] = kAidlPluginDescriptionValue;
     mStringProperties[kAlgorithmsKey] = kAidlAlgorithmsValue;
     mStringProperties[kListenerTestSupportKey] = kAidlListenerTestSupportValue;
     mStringProperties[kDrmErrorTestKey] = kAidlDrmErrorTestValue;
     mStringProperties[kAidlVersionKey] = kAidlVersionValue;
+    mStringProperties[kOemErrorKey] = "0";
+    mStringProperties[kErrorContextKey] = "0";
 
     std::vector<uint8_t> valueVector;
     valueVector.clear();
@@ -102,6 +105,26 @@
     mByteArrayProperties[kMetricsKey] = valueVector;
 }
 
+int32_t DrmPlugin::getIntProperty(const std::string& prop, int32_t defaultVal) const {
+    if (!mStringProperties.count(prop)) {
+        return defaultVal;
+    }
+    int32_t out = defaultVal;
+    if (!::android::base::ParseInt(mStringProperties.at(prop), &out)) {
+        return defaultVal;
+    }
+    return out;
+}
+
+int32_t DrmPlugin::getOemError() const {
+    return getIntProperty(kOemErrorKey);
+}
+
+int32_t DrmPlugin::getErrorContext() const {
+    return getIntProperty(kErrorContextKey);
+}
+
+//
 // The secure stop in ClearKey implementation is not installed securely.
 // This function merely creates a test environment for testing secure stops APIs.
 // The content in this secure stop is implementation dependent, the clearkey
@@ -127,7 +150,10 @@
         mSessionLibrary->destroySession(session);
         if (session->getMockError() != clearkeydrm::OK) {
             sendSessionLostState(in_sessionId);
-            return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE);
+            return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE,
+                                      nullptr,
+                                      getOemError(),
+                                      getErrorContext());
         }
         mCloseSessionOkCount++;
         return toNdkScopedAStatus(Status::OK);
@@ -177,7 +203,7 @@
     UNUSED(in_optionalParameters);
 
     KeyRequestType keyRequestType = KeyRequestType::UNKNOWN;
-    std::string defaultUrl("https://default.url");
+    std::string defaultUrl("");
 
     _aidl_return->request = {};
     _aidl_return->requestType = keyRequestType;
@@ -198,7 +224,8 @@
         if (!session.get()) {
             return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
         } else if (session->getMockError() != clearkeydrm::OK) {
-            return toNdkScopedAStatus(session->getMockError());
+            auto err = static_cast<Status>(session->getMockError());
+            return toNdkScopedAStatus(err, nullptr, getOemError(), getErrorContext());
         }
         keyRequestType = KeyRequestType::INITIAL;
     }
@@ -381,6 +408,10 @@
         value = mStringProperties[kDrmErrorTestKey];
     } else if (name == kAidlVersionKey) {
         value = mStringProperties[kAidlVersionKey];
+    } else if (name == kOemErrorKey) {
+        value = mStringProperties[kOemErrorKey];
+    } else if (name == kErrorContextKey) {
+        value = mStringProperties[kErrorContextKey];
     } else {
         ALOGE("App requested unknown string property %s", name.c_str());
         status = Status::ERROR_DRM_CANNOT_HANDLE;
@@ -474,6 +505,7 @@
         return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
     }
 
+    Mutex::Autolock lock(mSecurityLevelLock);
     std::map<std::vector<uint8_t>, ::aidl::android::hardware::drm::SecurityLevel>::iterator itr =
             mSecurityLevel.find(sid);
     if (itr == mSecurityLevel.end()) {
@@ -920,6 +952,13 @@
         }
     }
 
+    if (in_propertyName == kOemErrorKey || in_propertyName == kErrorContextKey) {
+        int32_t err = 0;
+        if (!::android::base::ParseInt(in_value, &err)) {
+            return toNdkScopedAStatus(Status::BAD_VALUE);
+        }
+    }
+
     mStringProperties[key] = std::string(in_value.c_str());
     return toNdkScopedAStatus(Status::OK);
 }
@@ -1009,6 +1048,7 @@
         return Status::ERROR_DRM_SESSION_NOT_OPENED;
     }
 
+    Mutex::Autolock lock(mSecurityLevelLock);
     std::map<std::vector<uint8_t>, SecurityLevel>::iterator itr = mSecurityLevel.find(sid);
     if (itr != mSecurityLevel.end()) {
         mSecurityLevel[sid] = level;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp b/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp
new file mode 100644
index 0000000..9ef331f
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "CreatePluginFactories.h"
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+using ::aidl::android::hardware::drm::clearkey::createDrmFactory;
+using ::aidl::android::hardware::drm::clearkey::DrmFactory;
+
+using android::fuzzService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    std::shared_ptr<DrmFactory> drmFactory = createDrmFactory();
+    fuzzService(drmFactory->asBinder().get(),  FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h b/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
index 9257b17..0db3c37 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
@@ -15,9 +15,12 @@
  */
 #pragma once
 
+#include <cstdint>
 #include <string>
 #include <vector>
 
+#include <json/json.h>
+
 #include <android/binder_auto_utils.h>
 #include "aidl/android/hardware/drm/Status.h"
 #include "ClearKeyTypes.h"
@@ -41,17 +44,32 @@
 }
 
 inline ::ndk::ScopedAStatus toNdkScopedAStatus(::aidl::android::hardware::drm::Status status,
-                                               const char* msg = nullptr) {
+                                               const char* msg = nullptr,
+                                               int32_t oemError = 0,
+                                               int32_t errorContext = 0) {
+
+
     if (Status::OK == status) {
         return ::ndk::ScopedAStatus::ok();
-    } else {
-        auto err = static_cast<int32_t>(status);
-        if (msg) {
-            return ::ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(err, msg);
-        } else {
-            return ::ndk::ScopedAStatus::fromServiceSpecificError(err);
-        }
     }
+
+    Json::Value errObj(Json::objectValue);
+    auto err = static_cast<int32_t>(status);
+    errObj["cdmError"] = err;
+
+    if (oemError) {
+        errObj["oemError"] = oemError;
+    }
+    if (errorContext) {
+        errObj["context"] = errorContext;
+    }
+    if (msg) {
+        errObj["errorMessage"] = msg;
+    }
+
+    Json::FastWriter writer;
+    return ::ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+        err, writer.write(errObj).c_str());
 }
 
 inline ::ndk::ScopedAStatus toNdkScopedAStatus(clearkeydrm::CdmResponseType res) {
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
index 25c05f0..694013a 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
@@ -148,6 +148,9 @@
 
   private:
     void initProperties();
+    int32_t getIntProperty(const std::string& prop, int32_t defaultVal = 0) const;
+    int32_t getOemError() const;
+    int32_t getErrorContext() const;
     void installSecureStop(const std::vector<uint8_t>& sessionId);
     bool makeKeySetId(std::string* keySetId);
     void setPlayPolicy();
@@ -182,7 +185,8 @@
     std::map<std::string, std::vector<uint8_t>> mByteArrayProperties;
     std::map<std::string, std::vector<uint8_t>> mReleaseKeysMap;
     std::map<std::vector<uint8_t>, std::string> mPlaybackId;
-    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel;
+    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel
+        GUARDED_BY(mSecurityLevelLock);
     ::std::shared_ptr<IDrmPluginListener> mListener;
     SessionLibrary* mSessionLibrary;
     int64_t mOpenSessionOkCount;
@@ -201,6 +205,7 @@
 
     DeviceFiles mFileHandle;
     ::android::Mutex mSecureStopLock;
+    ::android::Mutex mSecurityLevelLock;
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
 };
diff --git a/drm/mediadrm/plugins/clearkey/common/Android.bp b/drm/mediadrm/plugins/clearkey/common/Android.bp
index a6a5b28..6913df4 100644
--- a/drm/mediadrm/plugins/clearkey/common/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/common/Android.bp
@@ -97,3 +97,54 @@
         integer_overflow: true,
     },
 }
+
+cc_library_static {
+    name: "libclearkeydevicefiles-protos.common_fuzz",
+
+    proto: {
+        export_proto_headers: true,
+        type: "lite",
+    },
+    srcs: ["protos/DeviceFiles.proto"],
+}
+
+cc_library_static {
+    name: "libclearkeybase_fuzz",
+
+    srcs: [
+        "AesCtrDecryptor.cpp",
+        "Base64.cpp",
+        "Buffer.cpp",
+        "ClearKeyUUID.cpp",
+        "DeviceFiles.cpp",
+        "InitDataParser.cpp",
+        "JsonWebKey.cpp",
+        "MemoryFileSystem.cpp",
+        "Session.cpp",
+        "SessionLibrary.cpp",
+        "Utils.cpp",
+    ],
+
+    cflags: ["-Wall", "-Werror"],
+
+    include_dirs: ["frameworks/av/include"],
+
+    shared_libs: [
+        "libutils",
+        "libcrypto",
+    ],
+
+    whole_static_libs: [
+        "libjsmn",
+        "libclearkeydevicefiles-protos.common_fuzz",
+    ],
+
+    export_include_dirs: [
+        "include",
+        "include/clearkeydrm",
+    ],
+
+    sanitize: {
+        integer_overflow: true,
+    },
+}
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
index bfda388..d4e641e 100644
--- a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
@@ -21,7 +21,7 @@
 static const std::string kVendorKey("vendor");
 static const std::string kVendorValue("Google");
 static const std::string kVersionKey("version");
-static const std::string kVersionValue("1.2");
+static const std::string kVersionValue("14"); // sync with Android OS version
 static const std::string kPluginDescriptionKey("description");
 static const std::string kPluginDescriptionValue("ClearKey CDM");
 static const std::string kAlgorithmsKey("algorithms");
@@ -35,6 +35,8 @@
 static const std::string kFrameTooLargeValue("frameTooLarge");
 static const std::string kInvalidStateValue("invalidState");
 static const std::string kAidlVersionKey("aidlVersion");
+static const std::string kOemErrorKey("oemError");
+static const std::string kErrorContextKey("errorContext");
 
 static const std::string kDeviceIdKey("deviceId");
 static const uint8_t kTestDeviceIdData[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7,
diff --git a/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp b/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp
deleted file mode 100644
index e03a896..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearkeyDecryptor"
-#include <utils/Log.h>
-
-#include <openssl/aes.h>
-
-#include "AesCtrDecryptor.h"
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::SubSample;
-using ::android::hardware::drm::V1_0::Status;
-
-static const size_t kBlockBitCount = kBlockSize * 8;
-
-Status AesCtrDecryptor::decrypt(
-        const std::vector<uint8_t>& key,
-        const Iv iv, const uint8_t* source,
-        uint8_t* destination,
-        const std::vector<SubSample> subSamples,
-        size_t numSubSamples,
-        size_t* bytesDecryptedOut) {
-    uint32_t blockOffset = 0;
-    uint8_t previousEncryptedCounter[kBlockSize];
-    memset(previousEncryptedCounter, 0, kBlockSize);
-
-    if (key.size() != kBlockSize || (sizeof(Iv) / sizeof(uint8_t)) != kBlockSize) {
-        android_errorWriteLog(0x534e4554, "63982768");
-        return Status::ERROR_DRM_DECRYPT;
-    }
-
-    size_t offset = 0;
-    AES_KEY opensslKey;
-    AES_set_encrypt_key(key.data(), kBlockBitCount, &opensslKey);
-    Iv opensslIv;
-    memcpy(opensslIv, iv, sizeof(opensslIv));
-
-    for (size_t i = 0; i < numSubSamples; ++i) {
-        const SubSample& subSample = subSamples[i];
-
-        if (subSample.numBytesOfClearData > 0) {
-            memcpy(destination + offset, source + offset,
-                    subSample.numBytesOfClearData);
-            offset += subSample.numBytesOfClearData;
-        }
-
-        if (subSample.numBytesOfEncryptedData > 0) {
-            AES_ctr128_encrypt(source + offset, destination + offset,
-                    subSample.numBytesOfEncryptedData, &opensslKey,
-                    opensslIv, previousEncryptedCounter,
-                    &blockOffset);
-            offset += subSample.numBytesOfEncryptedData;
-        }
-    }
-
-    *bytesDecryptedOut = offset;
-    return Status::OK;
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
deleted file mode 100644
index b82d996..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ /dev/null
@@ -1,167 +0,0 @@
-//
-// Copyright (C) 2018 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// *** THIS PACKAGE HAS SPECIAL LICENSING CONDITIONS.  PLEASE
-//     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
-//     DEPENDING ON IT IN YOUR PROJECT. ***
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    //   legacy_by_exception_only (by exception only)
-    default_applicable_licenses: ["frameworks_av_license"],
-}
-
-cc_defaults {
-    name: "clearkey_service_defaults",
-    vendor: true,
-
-    srcs: [
-        "AesCtrDecryptor.cpp",
-        "Base64.cpp",
-        "Buffer.cpp",
-        "CreatePluginFactories.cpp",
-        "CryptoFactory.cpp",
-        "CryptoPlugin.cpp",
-        "DeviceFiles.cpp",
-        "DrmFactory.cpp",
-        "DrmPlugin.cpp",
-        "InitDataParser.cpp",
-        "JsonWebKey.cpp",
-        "MemoryFileSystem.cpp",
-        "Session.cpp",
-        "SessionLibrary.cpp",
-    ],
-
-    relative_install_path: "hw",
-
-    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
-
-    shared_libs: [
-        "android.hardware.drm@1.0",
-        "android.hardware.drm@1.1",
-        "android.hardware.drm@1.2",
-        "android.hardware.drm@1.3",
-        "android.hardware.drm@1.4",
-        "libbase",
-        "libbinder",
-        "libcrypto",
-        "libhidlbase",
-        "libhidlmemory",
-        "liblog",
-        "libprotobuf-cpp-lite",
-        "libutils",
-    ],
-
-    static_libs: [
-        "libclearkeycommon",
-        "libclearkeydevicefiles-protos",
-        "libjsmn",
-    ],
-
-    local_include_dirs: ["include"],
-
-    export_static_lib_headers: ["libjsmn"],
-
-    sanitize: {
-        integer_overflow: true,
-    },
-}
-cc_library_static {
-    name: "libclearkeydevicefiles-protos",
-    vendor: true,
-
-    proto: {
-        export_proto_headers: true,
-        type: "lite",
-    },
-    srcs: ["protos/DeviceFiles.proto"],
-}
-
-cc_library {
-    name: "libclearkeyhidl",
-    defaults: ["clearkey_service_defaults"],
-}
-
-cc_binary {
-    name: "android.hardware.drm@1.2-service.clearkey",
-    defaults: ["clearkey_service_defaults"],
-    srcs: ["service.cpp"],
-    init_rc: ["android.hardware.drm@1.2-service.clearkey.rc"],
-    vintf_fragments: ["manifest_android.hardware.drm@1.2-service.clearkey.xml"],
-}
-
-cc_binary {
-    name: "android.hardware.drm@1.2-service-lazy.clearkey",
-    overrides: ["android.hardware.drm@1.2-service.clearkey"],
-    defaults: ["clearkey_service_defaults"],
-    srcs: ["serviceLazy.cpp"],
-    init_rc: ["android.hardware.drm@1.2-service-lazy.clearkey.rc"],
-    vintf_fragments: ["manifest_android.hardware.drm@1.2-service.clearkey.xml"],
-}
-
-cc_binary {
-    name: "android.hardware.drm@1.4-service.clearkey",
-    defaults: ["clearkey_service_defaults"],
-    srcs: ["service.cpp"],
-    init_rc: ["android.hardware.drm@1.4-service.clearkey.rc"],
-    vintf_fragments: ["manifest_android.hardware.drm@1.4-service.clearkey.xml"],
-}
-
-cc_binary {
-    name: "android.hardware.drm@1.4-service-lazy.clearkey",
-    overrides: ["android.hardware.drm@1.4-service.clearkey"],
-    defaults: ["clearkey_service_defaults"],
-    srcs: ["serviceLazy.cpp"],
-    init_rc: ["android.hardware.drm@1.4-service-lazy.clearkey.rc"],
-    vintf_fragments: ["manifest_android.hardware.drm@1.4-service.clearkey.xml"],
-}
-
-cc_fuzz {
-    name: "clearkeyV1.4_fuzzer",
-    vendor: true,
-    srcs: [
-        "fuzzer/clearkeyV1.4_fuzzer.cpp",
-    ],
-    static_libs: [
-        "libclearkeyhidl",
-        "libclearkeycommon",
-        "libclearkeydevicefiles-protos",
-        "libjsmn",
-        "libprotobuf-cpp-lite",
-    ],
-    shared_libs: [
-        "android.hidl.allocator@1.0",
-        "android.hardware.drm@1.0",
-        "android.hardware.drm@1.1",
-        "android.hardware.drm@1.2",
-        "android.hardware.drm@1.3",
-        "android.hardware.drm@1.4",
-        "libcrypto",
-        "libhidlbase",
-        "libhidlmemory",
-        "liblog",
-        "libutils",
-    ],
-    fuzz_config: {
-        cc: [
-            "android-media-fuzzing-reports@google.com",
-        ],
-        componentid: 155276,
-    },
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp b/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp
deleted file mode 100644
index d81f875..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Base64.h"
-
-#include <string>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-sp<Buffer> decodeBase64(const std::string &s) {
-    size_t n = s.size();
-
-    if ((n % 4) != 0) {
-        return nullptr;
-    }
-
-    size_t padding = 0;
-    if (n >= 1 && s.c_str()[n - 1] == '=') {
-        padding = 1;
-
-        if (n >= 2 && s.c_str()[n - 2] == '=') {
-            padding = 2;
-
-            if (n >= 3 && s.c_str()[n - 3] == '=') {
-                padding = 3;
-            }
-        }
-    }
-
-    // We divide first to avoid overflow. It's OK to do this because we
-    // already made sure that n % 4 == 0.
-    size_t outLen = (n / 4) * 3 - padding;
-
-    sp<Buffer> buffer = new Buffer(outLen);
-    uint8_t *out = buffer->data();
-    if (out == nullptr || buffer->size() < outLen) {
-        return nullptr;
-    }
-
-    size_t j = 0;
-    uint32_t accum = 0;
-    for (size_t i = 0; i < n; ++i) {
-        char c = s.c_str()[i];
-        unsigned value;
-        if (c >= 'A' && c <= 'Z') {
-            value = c - 'A';
-        } else if (c >= 'a' && c <= 'z') {
-            value = 26 + c - 'a';
-        } else if (c >= '0' && c <= '9') {
-            value = 52 + c - '0';
-        } else if (c == '+' || c == '-') {
-            value = 62;
-        } else if (c == '/' || c == '_') {
-            value = 63;
-        } else if (c != '=') {
-            return nullptr;
-        } else {
-            if (i < n - padding) {
-                return nullptr;
-            }
-
-            value = 0;
-        }
-
-        accum = (accum << 6) | value;
-
-        if (((i + 1) % 4) == 0) {
-            if (j < outLen) { out[j++] = (accum >> 16); }
-            if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
-            if (j < outLen) { out[j++] = accum & 0xff; }
-
-            accum = 0;
-        }
-    }
-
-    return buffer;
-}
-
-static char encode6Bit(unsigned x) {
-    if (x <= 25) {
-        return 'A' + x;
-    } else if (x <= 51) {
-        return 'a' + x - 26;
-    } else if (x <= 61) {
-        return '0' + x - 52;
-    } else if (x == 62) {
-        return '+';
-    } else {
-        return '/';
-    }
-}
-
-void encodeBase64(const void *_data, size_t size, std::string *out) {
-    out->clear();
-
-    const uint8_t *data = (const uint8_t *)_data;
-
-    size_t i;
-    for (i = 0; i < (size / 3) * 3; i += 3) {
-        uint8_t x1 = data[i];
-        uint8_t x2 = data[i + 1];
-        uint8_t x3 = data[i + 2];
-
-        out->push_back(encode6Bit(x1 >> 2));
-        out->push_back(encode6Bit((x1 << 4 | x2 >> 4) & 0x3f));
-        out->push_back(encode6Bit((x2 << 2 | x3 >> 6) & 0x3f));
-        out->push_back(encode6Bit(x3 & 0x3f));
-    }
-    switch (size % 3) {
-        case 0:
-            break;
-        case 2:
-        {
-            uint8_t x1 = data[i];
-            uint8_t x2 = data[i + 1];
-            out->push_back(encode6Bit(x1 >> 2));
-            out->push_back(encode6Bit((x1 << 4 | x2 >> 4) & 0x3f));
-            out->push_back(encode6Bit((x2 << 2) & 0x3f));
-            out->push_back('=');
-            break;
-        }
-        default:
-        {
-            uint8_t x1 = data[i];
-            out->push_back(encode6Bit(x1 >> 2));
-            out->push_back(encode6Bit((x1 << 4) & 0x3f));
-            out->append("==");
-            break;
-        }
-    }
-}
-
-void encodeBase64Url(const void *_data, size_t size, std::string *out) {
-    encodeBase64(_data, size, out);
-
-    if ((std::string::npos != out->find("+")) ||
-            (std::string::npos != out->find("/"))) {
-        size_t outLen = out->size();
-        char *base64url = new char[outLen];
-        for (size_t i = 0; i < outLen; ++i) {
-            if (out->c_str()[i] == '+')
-                base64url[i] = '-';
-            else if (out->c_str()[i] == '/')
-                base64url[i] = '_';
-            else
-                base64url[i] = out->c_str()[i];
-        }
-
-        out->assign(base64url, outLen);
-        delete[] base64url;
-    }
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp b/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp
deleted file mode 100644
index dcb76f4..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Buffer.h"
-
-#include <android/hardware/drm/1.0/types.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-Buffer::Buffer(size_t capacity)
-      : mRangeOffset(0),
-      mOwnsData(true) {
-    mData = malloc(capacity);
-    if (mData == nullptr) {
-        mCapacity = 0;
-        mRangeLength = 0;
-    } else {
-        mCapacity = capacity;
-        mRangeLength = capacity;
-    }
-}
-
-Buffer::~Buffer() {
-    if (mOwnsData) {
-        if (mData != nullptr) {
-            free(mData);
-            mData = nullptr;
-        }
-    }
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp b/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp
deleted file mode 100644
index 4ab33d3..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "CreatePluginFactories.h"
-
-#include "CryptoFactory.h"
-#include "DrmFactory.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-extern "C" {
-
-IDrmFactory* createDrmFactory() {
-    return new DrmFactory();
-}
-
-ICryptoFactory* createCryptoFactory() {
-    return new CryptoFactory();
-}
-
-} // extern "C"
-
-}  // namespace clearkey
-}  // namespace V1_4
-}  // namespace drm
-}  // namespace hardware
-}  // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp
deleted file mode 100644
index 0bebc3b..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeyCryptoFactory"
-#include <utils/Log.h>
-
-#include "CryptoFactory.h"
-
-#include "ClearKeyUUID.h"
-#include "CryptoPlugin.h"
-#include "TypeConvert.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_4::clearkey::CryptoPlugin;
-
-Return<bool> CryptoFactory::isCryptoSchemeSupported(
-    const hidl_array<uint8_t, 16> &uuid)
-{
-    return clearkeydrm::isClearKeyUUID(uuid.data());
-}
-
-Return<void> CryptoFactory::createPlugin(
-    const hidl_array<uint8_t, 16> &uuid,
-    const hidl_vec<uint8_t> &initData,
-    createPlugin_cb _hidl_cb) {
-
-    if (!isCryptoSchemeSupported(uuid.data())) {
-        ALOGE("Clearkey Drm HAL: failed to create clearkey plugin, " \
-                "invalid crypto scheme");
-        _hidl_cb(Status::BAD_VALUE, nullptr);
-        return Void();
-    }
-
-    CryptoPlugin *cryptoPlugin = new CryptoPlugin(initData);
-    Status status = cryptoPlugin->getInitStatus();
-    if (status == Status::OK) {
-        _hidl_cb(Status::OK, cryptoPlugin);
-    } else {
-        delete cryptoPlugin;
-        _hidl_cb(status, nullptr);
-    }
-    return Void();
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
deleted file mode 100644
index 3a675f6..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ /dev/null
@@ -1,257 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeyCryptoPlugin"
-#include <utils/Log.h>
-
-#include "CryptoPlugin.h"
-#include "SessionLibrary.h"
-#include "TypeConvert.h"
-
-#include <hidlmemory/mapping.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::BufferType;
-
-Return<void> CryptoPlugin::setSharedBufferBase(
-        const hidl_memory& base, uint32_t bufferId) {
-    sp<IMemory> hidlMemory = mapMemory(base);
-    ALOGE_IF(hidlMemory == nullptr, "mapMemory returns nullptr");
-
-    std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
-
-    // allow mapMemory to return nullptr
-    mSharedBufferMap[bufferId] = hidlMemory;
-    return Void();
-}
-
-Return<void> CryptoPlugin::decrypt(
-    bool secure,
-    const hidl_array<uint8_t, 16>& keyId,
-    const hidl_array<uint8_t, 16>& iv,
-    Mode mode,
-    const Pattern& pattern,
-    const hidl_vec<SubSample>& subSamples,
-    const SharedBuffer& source,
-    uint64_t offset,
-    const DestinationBuffer& destination,
-    decrypt_cb _hidl_cb) {
-
-  Status status = Status::ERROR_DRM_UNKNOWN;
-  hidl_string detailedError;
-  uint32_t bytesWritten = 0;
-
-  Return<void> hResult = decrypt_1_2(
-      secure, keyId, iv, mode, pattern, subSamples, source, offset, destination,
-      [&](Status_V1_2 hStatus, uint32_t hBytesWritten, hidl_string hDetailedError) {
-        status = toStatus_1_0(hStatus);
-        bytesWritten = hBytesWritten;
-        detailedError = hDetailedError;
-      }
-    );
-
-  status = hResult.isOk() ? status : Status::ERROR_DRM_CANNOT_HANDLE;
-  _hidl_cb(status, bytesWritten, detailedError);
-  return Void();
-}
-
-// Returns negative values for error code and positive values for the size of
-// decrypted data.  In theory, the output size can be larger than the input
-// size, but in practice this will never happen for AES-CTR.
-Return<void> CryptoPlugin::decrypt_1_2(
-        bool secure,
-        const hidl_array<uint8_t, KEY_ID_SIZE>& keyId,
-        const hidl_array<uint8_t, KEY_IV_SIZE>& iv,
-        Mode mode,
-        const Pattern& pattern,
-        const hidl_vec<SubSample>& subSamples,
-        const SharedBuffer& source,
-        uint64_t offset,
-        const DestinationBuffer& destination,
-        decrypt_1_2_cb _hidl_cb) {
-    UNUSED(pattern);
-
-    if (secure) {
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
-            "Secure decryption is not supported with ClearKey.");
-        return Void();
-    }
-
-    std::unique_lock<std::mutex> shared_buffer_lock(mSharedBufferLock);
-    if (mSharedBufferMap.find(source.bufferId) == mSharedBufferMap.end()) {
-      _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
-               "source decrypt buffer base not set");
-      return Void();
-    }
-
-    if (destination.type == BufferType::SHARED_MEMORY) {
-      const SharedBuffer& dest = destination.nonsecureMemory;
-      if (mSharedBufferMap.find(dest.bufferId) == mSharedBufferMap.end()) {
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
-                 "destination decrypt buffer base not set");
-        return Void();
-      }
-    } else {
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
-                 "destination type not supported");
-        return Void();
-    }
-
-    sp<IMemory> sourceBase = mSharedBufferMap[source.bufferId];
-    if (sourceBase == nullptr) {
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "source is a nullptr");
-        return Void();
-    }
-
-    size_t totalSize = 0;
-    if (__builtin_add_overflow(source.offset, offset, &totalSize) ||
-        __builtin_add_overflow(totalSize, source.size, &totalSize) ||
-        totalSize > sourceBase->getSize()) {
-        android_errorWriteLog(0x534e4554, "176496160");
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "invalid buffer size");
-        return Void();
-    }
-
-    uint8_t *base = static_cast<uint8_t *>
-            (static_cast<void *>(sourceBase->getPointer()));
-    uint8_t* srcPtr = static_cast<uint8_t *>(base + source.offset + offset);
-    void* destPtr = NULL;
-    // destination.type == BufferType::SHARED_MEMORY
-    const SharedBuffer& destBuffer = destination.nonsecureMemory;
-    sp<IMemory> destBase = mSharedBufferMap[destBuffer.bufferId];
-    if (destBase == nullptr) {
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "destination is a nullptr");
-        return Void();
-    }
-
-    base = static_cast<uint8_t *>(static_cast<void *>(destBase->getPointer()));
-
-    totalSize = 0;
-    if (__builtin_add_overflow(destBuffer.offset, destBuffer.size, &totalSize) ||
-        totalSize > destBase->getSize()) {
-        android_errorWriteLog(0x534e4554, "176444622");
-        _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "invalid buffer size");
-        return Void();
-    }
-    destPtr = static_cast<void*>(base + destination.nonsecureMemory.offset);
-
-    // release mSharedBufferLock
-    shared_buffer_lock.unlock();
-
-    // Calculate the output buffer size and determine if any subsamples are
-    // encrypted.
-    size_t destSize = 0;
-    size_t srcSize = 0;
-    bool haveEncryptedSubsamples = false;
-    for (size_t i = 0; i < subSamples.size(); i++) {
-        const SubSample &subSample = subSamples[i];
-        if (__builtin_add_overflow(destSize, subSample.numBytesOfClearData, &destSize) ||
-            __builtin_add_overflow(srcSize, subSample.numBytesOfClearData, &srcSize)) {
-            _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample clear size overflow");
-            return Void();
-        }
-        if (__builtin_add_overflow(destSize, subSample.numBytesOfEncryptedData, &destSize) ||
-            __builtin_add_overflow(srcSize, subSample.numBytesOfEncryptedData, &srcSize)) {
-            _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample encrypted size overflow");
-            return Void();
-        }
-        if (subSample.numBytesOfEncryptedData > 0) {
-        haveEncryptedSubsamples = true;
-        }
-    }
-
-    if (destSize > destBuffer.size || srcSize > source.size) {
-        _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample sum too large");
-        return Void();
-    }
-
-    if (mode == Mode::UNENCRYPTED) {
-        if (haveEncryptedSubsamples) {
-            _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
-                    "Encrypted subsamples found in allegedly unencrypted data.");
-            return Void();
-        }
-
-        size_t offset = 0;
-        for (size_t i = 0; i < subSamples.size(); ++i) {
-            const SubSample& subSample = subSamples[i];
-            if (subSample.numBytesOfClearData != 0) {
-                memcpy(reinterpret_cast<uint8_t*>(destPtr) + offset,
-                       reinterpret_cast<const uint8_t*>(srcPtr) + offset,
-                       subSample.numBytesOfClearData);
-                offset += subSample.numBytesOfClearData;
-            }
-        }
-
-        _hidl_cb(Status_V1_2::OK, static_cast<ssize_t>(offset), "");
-        return Void();
-    } else if (mode == Mode::AES_CTR) {
-        size_t bytesDecrypted;
-        Status_V1_2 res = mSession->decrypt(keyId.data(), iv.data(), srcPtr,
-                static_cast<uint8_t*>(destPtr), toVector(subSamples), &bytesDecrypted);
-        if (res == Status_V1_2::OK) {
-            _hidl_cb(Status_V1_2::OK, static_cast<ssize_t>(bytesDecrypted), "");
-            return Void();
-        } else {
-            _hidl_cb(res, 0, "Decryption Error");
-            return Void();
-        }
-    } else {
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
-                "Selected encryption mode is not supported by the ClearKey DRM Plugin.");
-        return Void();
-    }
-}
-
-Return<Status> CryptoPlugin::setMediaDrmSession(
-        const hidl_vec<uint8_t>& sessionId) {
-    if (!sessionId.size()) {
-        mSession = nullptr;
-    } else {
-        mSession = SessionLibrary::get()->findSession(sessionId);
-        if (!mSession.get()) {
-            return Status::ERROR_DRM_SESSION_NOT_OPENED;
-        }
-    }
-    return Status::OK;
-}
-
-Return<void> CryptoPlugin::getLogMessages(
-        getLogMessages_cb _hidl_cb) {
-    using std::chrono::duration_cast;
-    using std::chrono::milliseconds;
-    using std::chrono::system_clock;
-
-    auto timeMillis = duration_cast<milliseconds>(
-            system_clock::now().time_since_epoch()).count();
-
-    std::vector<LogMessage> logs = {
-            { timeMillis, LogPriority::ERROR, std::string("Not implemented") }};
-    _hidl_cb(drm::V1_4::Status::OK, toHidlVec(logs));
-    return Void();
-}
-
-} // namespace clearkey
-} // namespace V1_4.
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp b/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
deleted file mode 100644
index 0385d8f..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
+++ /dev/null
@@ -1,252 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-
-#include <utils/Log.h>
-
-#include <string>
-#include <sys/stat.h>
-
-#include "DeviceFiles.h"
-#include "Utils.h"
-
-#include <openssl/sha.h>
-
-// Protobuf generated classes.
-using android::hardware::drm::V1_2::clearkey::OfflineFile;
-using android::hardware::drm::V1_2::clearkey::HashedFile;
-using android::hardware::drm::V1_2::clearkey::License;
-using android::hardware::drm::V1_2::clearkey::License_LicenseState_ACTIVE;
-using android::hardware::drm::V1_2::clearkey::License_LicenseState_RELEASING;
-
-namespace {
-const char kLicenseFileNameExt[] = ".lic";
-
-bool Hash(const std::string& data, std::string* hash) {
-    if (!hash) return false;
-
-    hash->resize(SHA256_DIGEST_LENGTH);
-
-    const unsigned char* input = reinterpret_cast<const unsigned char*>(data.data());
-    unsigned char* output = reinterpret_cast<unsigned char*>(&(*hash)[0]);
-    SHA256(input, data.size(), output);
-    return true;
-}
-
-}  // namespace
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-bool DeviceFiles::StoreLicense(
-        const std::string& keySetId, LicenseState state,
-        const std::string& licenseResponse) {
-
-    OfflineFile file;
-    file.set_type(OfflineFile::LICENSE);
-    file.set_version(OfflineFile::VERSION_1);
-
-    License* license = file.mutable_license();
-    switch (state) {
-        case kLicenseStateActive:
-            license->set_state(License_LicenseState_ACTIVE);
-            license->set_license(licenseResponse);
-            break;
-        case kLicenseStateReleasing:
-            license->set_state(License_LicenseState_RELEASING);
-            license->set_license(licenseResponse);
-            break;
-        default:
-            ALOGW("StoreLicense: Unknown license state: %u", state);
-            return false;
-    }
-
-    std::string serializedFile;
-    file.SerializeToString(&serializedFile);
-
-    return StoreFileWithHash(keySetId + kLicenseFileNameExt, serializedFile);
-}
-
-bool DeviceFiles::StoreFileWithHash(const std::string& fileName,
-        const std::string& serializedFile) {
-    std::string hash;
-    if (!Hash(serializedFile, &hash)) {
-        ALOGE("StoreFileWithHash: Failed to compute hash");
-        return false;
-    }
-
-    HashedFile hashFile;
-    hashFile.set_file(serializedFile);
-    hashFile.set_hash(hash);
-
-    std::string serializedHashFile;
-    hashFile.SerializeToString(&serializedHashFile);
-
-    return StoreFileRaw(fileName, serializedHashFile);
-}
-
-bool DeviceFiles::StoreFileRaw(const std::string& fileName, const std::string& serializedHashFile) {
-    MemoryFileSystem::MemoryFile memFile;
-    memFile.setFileName(fileName);
-    memFile.setContent(serializedHashFile);
-    memFile.setFileSize(serializedHashFile.size());
-    size_t len = mFileHandle.Write(fileName, memFile);
-
-    if (len != static_cast<size_t>(serializedHashFile.size())) {
-        ALOGE("StoreFileRaw: Failed to write %s", fileName.c_str());
-        ALOGD("StoreFileRaw: expected=%zd, actual=%zu", serializedHashFile.size(), len);
-        return false;
-    }
-
-    ALOGD("StoreFileRaw: wrote %zu bytes to %s", serializedHashFile.size(), fileName.c_str());
-    return true;
-}
-
-bool DeviceFiles::RetrieveLicense(
-    const std::string& keySetId, LicenseState* state, std::string* offlineLicense) {
-
-    OfflineFile file;
-    if (!RetrieveHashedFile(keySetId + kLicenseFileNameExt, &file)) {
-        return false;
-    }
-
-    if (file.type() != OfflineFile::LICENSE) {
-        ALOGE("RetrieveLicense: Invalid file type");
-        return false;
-    }
-
-    if (file.version() != OfflineFile::VERSION_1) {
-        ALOGE("RetrieveLicense: Invalid file version");
-        return false;
-    }
-
-    if (!file.has_license()) {
-        ALOGE("RetrieveLicense: License not present");
-        return false;
-    }
-
-    License license = file.license();
-    switch (license.state()) {
-        case License_LicenseState_ACTIVE:
-            *state = kLicenseStateActive;
-            break;
-        case License_LicenseState_RELEASING:
-            *state = kLicenseStateReleasing;
-            break;
-        default:
-            ALOGW("RetrieveLicense: Unrecognized license state: %u",
-                    kLicenseStateUnknown);
-            *state = kLicenseStateUnknown;
-            break;
-    }
-    *offlineLicense = license.license();
-    return true;
-}
-
-bool DeviceFiles::DeleteLicense(const std::string& keySetId) {
-    return mFileHandle.RemoveFile(keySetId + kLicenseFileNameExt);
-}
-
-bool DeviceFiles::DeleteAllLicenses() {
-    return mFileHandle.RemoveAllFiles();
-}
-
-bool DeviceFiles::LicenseExists(const std::string& keySetId) {
-    return mFileHandle.FileExists(keySetId + kLicenseFileNameExt);
-}
-
-std::vector<std::string> DeviceFiles::ListLicenses() const {
-    std::vector<std::string> licenses = mFileHandle.ListFiles();
-    for (size_t i = 0; i < licenses.size(); i++) {
-        std::string& license = licenses[i];
-        license = license.substr(0, license.size() - strlen(kLicenseFileNameExt));
-    }
-    return licenses;
-}
-
-bool DeviceFiles::RetrieveHashedFile(const std::string& fileName, OfflineFile* deSerializedFile) {
-    if (!deSerializedFile) {
-        ALOGE("RetrieveHashedFile: invalid file parameter");
-        return false;
-    }
-
-    if (!FileExists(fileName)) {
-        ALOGE("RetrieveHashedFile: %s does not exist", fileName.c_str());
-        return false;
-    }
-
-    ssize_t bytes = GetFileSize(fileName);
-    if (bytes <= 0) {
-        ALOGE("RetrieveHashedFile: invalid file size: %s", fileName.c_str());
-        // Remove the corrupted file so the caller will not get the same error
-        // when trying to access the file repeatedly, causing the system to stall.
-        RemoveFile(fileName);
-        return false;
-    }
-
-    std::string serializedHashFile;
-    serializedHashFile.resize(bytes);
-    bytes = mFileHandle.Read(fileName, &serializedHashFile);
-
-    if (bytes != static_cast<ssize_t>(serializedHashFile.size())) {
-        ALOGE("RetrieveHashedFile: Failed to read from %s", fileName.c_str());
-        ALOGV("RetrieveHashedFile: expected: %zd, actual: %zd", serializedHashFile.size(), bytes);
-        // Remove the corrupted file so the caller will not get the same error
-        // when trying to access the file repeatedly, causing the system to stall.
-        RemoveFile(fileName);
-        return false;
-    }
-
-    ALOGV("RetrieveHashedFile: read %zd from %s", bytes, fileName.c_str());
-
-    HashedFile hashFile;
-    if (!hashFile.ParseFromString(serializedHashFile)) {
-        ALOGE("RetrieveHashedFile: Unable to parse hash file");
-        // Remove corrupt file.
-        RemoveFile(fileName);
-        return false;
-    }
-
-    std::string hash;
-    if (!Hash(hashFile.file(), &hash)) {
-        ALOGE("RetrieveHashedFile: Hash computation failed");
-        return false;
-    }
-
-    if (hash != hashFile.hash()) {
-        ALOGE("RetrieveHashedFile: Hash mismatch");
-        // Remove corrupt file.
-        RemoveFile(fileName);
-        return false;
-    }
-
-    if (!deSerializedFile->ParseFromString(hashFile.file())) {
-        ALOGE("RetrieveHashedFile: Unable to parse file");
-        // Remove corrupt file.
-        RemoveFile(fileName);
-        return false;
-    }
-
-    return true;
-}
-
-bool DeviceFiles::FileExists(const std::string& fileName) const {
-    return mFileHandle.FileExists(fileName);
-}
-
-bool DeviceFiles::RemoveFile(const std::string& fileName) {
-    return mFileHandle.RemoveFile(fileName);
-}
-
-ssize_t DeviceFiles::GetFileSize(const std::string& fileName) const {
-    return mFileHandle.GetFileSize(fileName);
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp
deleted file mode 100644
index 14cb5c1..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#include <vector>
-#define LOG_TAG "hidl_ClearKeyDrmFactory"
-#include <utils/Log.h>
-
-#include <utils/Errors.h>
-
-#include "DrmFactory.h"
-
-#include "DrmPlugin.h"
-#include "ClearKeyUUID.h"
-#include "MimeType.h"
-#include "SessionLibrary.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_1::SecurityLevel;
-using ::android::hardware::drm::V1_4::clearkey::DrmPlugin;
-using ::android::hardware::drm::V1_4::clearkey::SessionLibrary;
-using ::android::hardware::Void;
-
-Return<bool> DrmFactory::isCryptoSchemeSupported(
-        const hidl_array<uint8_t, 16>& uuid) {
-    return clearkeydrm::isClearKeyUUID(uuid.data());
-}
-
-Return<bool> DrmFactory::isCryptoSchemeSupported_1_2(const hidl_array<uint8_t, 16>& uuid,
-                                                     const hidl_string &mimeType,
-                                                     SecurityLevel level) {
-    return isCryptoSchemeSupported(uuid) && isContentTypeSupported(mimeType) &&
-            level == SecurityLevel::SW_SECURE_CRYPTO;
-}
-
-Return<bool> DrmFactory::isContentTypeSupported(const hidl_string &mimeType) {
-    // This should match the mimeTypes handed by InitDataParser.
-    return mimeType == kIsoBmffVideoMimeType ||
-            mimeType == kIsoBmffAudioMimeType ||
-            mimeType == kCencInitDataFormat ||
-            mimeType == kWebmVideoMimeType ||
-            mimeType == kWebmAudioMimeType ||
-            mimeType == kWebmInitDataFormat;
-}
-
-Return<void> DrmFactory::createPlugin(
-    const hidl_array<uint8_t, 16>& uuid,
-    const hidl_string& appPackageName,
-    createPlugin_cb _hidl_cb) {
-    UNUSED(appPackageName);
-
-    DrmPlugin *plugin = NULL;
-    if (!isCryptoSchemeSupported(uuid.data())) {
-        ALOGE("Clear key Drm HAL: failed to create drm plugin, " \
-                "invalid crypto scheme");
-        _hidl_cb(Status::BAD_VALUE, plugin);
-        return Void();
-    }
-
-    plugin = new DrmPlugin(SessionLibrary::get());
-    _hidl_cb(Status::OK, plugin);
-    return Void();
-}
-
-Return<void> DrmFactory::getSupportedCryptoSchemes(
-        getSupportedCryptoSchemes_cb _hidl_cb) {
-    std::vector<hidl_array<uint8_t, 16>> schemes;
-    for (const auto &scheme : clearkeydrm::getSupportedCryptoSchemes()) {
-        schemes.push_back(scheme);
-    }
-    _hidl_cb(schemes);
-    return Void();
-}
-
-Return<void> DrmFactory::debug(const hidl_handle& fd, const hidl_vec<hidl_string>& /*args*/) {
-    if (fd.getNativeHandle() == nullptr || fd->numFds < 1) {
-        ALOGE("%s: missing fd for writing", __FUNCTION__);
-        return Void();
-    }
-
-    FILE* out = fdopen(dup(fd->data[0]), "w");
-    uint32_t currentSessions = SessionLibrary::get()->numOpenSessions();
-    fprintf(out, "current open sessions: %u\n", currentSessions);
-    fclose(out);
-    return Void();
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
deleted file mode 100644
index 32d7723..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ /dev/null
@@ -1,962 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeyPlugin"
-#include <utils/Log.h>
-
-#include <chrono>
-#include <stdio.h>
-#include <inttypes.h>
-
-#include "DrmPlugin.h"
-#include "ClearKeyDrmProperties.h"
-#include "Session.h"
-#include "TypeConvert.h"
-#include "Utils.h"
-
-namespace {
-const std::string kKeySetIdPrefix("ckid");
-const int kKeySetIdLength = 16;
-const int kSecureStopIdStart = 100;
-const std::string kOfflineLicense("\"type\":\"persistent-license\"");
-const std::string kStreaming("Streaming");
-const std::string kTemporaryLicense("\"type\":\"temporary\"");
-const std::string kTrue("True");
-
-const std::string kQueryKeyLicenseType("LicenseType");
-    // Value: "Streaming" or "Offline"
-const std::string kQueryKeyPlayAllowed("PlayAllowed");
-    // Value: "True" or "False"
-const std::string kQueryKeyRenewAllowed("RenewAllowed");
-    // Value: "True" or "False"
-
-const int kSecureStopIdSize = 10;
-
-std::vector<uint8_t> uint32ToVector(uint32_t value) {
-    // 10 bytes to display max value 4294967295 + one byte null terminator
-    char buffer[kSecureStopIdSize];
-    memset(buffer, 0, kSecureStopIdSize);
-    snprintf(buffer, kSecureStopIdSize, "%" PRIu32, value);
-    return std::vector<uint8_t>(buffer, buffer + sizeof(buffer));
-}
-
-}; // unnamed namespace
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-KeyRequestType toKeyRequestType_V1_0(KeyRequestType_V1_1 keyRequestType) {
-  switch (keyRequestType) {
-    case KeyRequestType_V1_1::NONE:
-    case KeyRequestType_V1_1::UPDATE:
-      return KeyRequestType::UNKNOWN;
-    default:
-      return static_cast<KeyRequestType>(keyRequestType);
-  }
-}
-
-DrmPlugin::DrmPlugin(SessionLibrary* sessionLibrary)
-        : mSessionLibrary(sessionLibrary),
-          mOpenSessionOkCount(0),
-          mCloseSessionOkCount(0),
-          mCloseSessionNotOpenedCount(0),
-          mNextSecureStopId(kSecureStopIdStart),
-          mMockError(Status_V1_2::OK) {
-    mPlayPolicy.clear();
-    initProperties();
-    mSecureStops.clear();
-    mReleaseKeysMap.clear();
-    std::srand(std::time(nullptr));
-}
-
-void DrmPlugin::initProperties() {
-    mStringProperties.clear();
-    mStringProperties[kVendorKey] = kVendorValue;
-    mStringProperties[kVersionKey] = kVersionValue;
-    mStringProperties[kPluginDescriptionKey] = kPluginDescriptionValue;
-    mStringProperties[kAlgorithmsKey] = kAlgorithmsValue;
-    mStringProperties[kListenerTestSupportKey] = kListenerTestSupportValue;
-    mStringProperties[kDrmErrorTestKey] = kDrmErrorTestValue;
-
-    std::vector<uint8_t> valueVector;
-    valueVector.clear();
-    valueVector.insert(valueVector.end(),
-            kTestDeviceIdData, kTestDeviceIdData + sizeof(kTestDeviceIdData) / sizeof(uint8_t));
-    mByteArrayProperties[kDeviceIdKey] = valueVector;
-
-    valueVector.clear();
-    valueVector.insert(valueVector.end(),
-            kMetricsData, kMetricsData + sizeof(kMetricsData) / sizeof(uint8_t));
-    mByteArrayProperties[kMetricsKey] = valueVector;
-}
-
-// The secure stop in ClearKey implementation is not installed securely.
-// This function merely creates a test environment for testing secure stops APIs.
-// The content in this secure stop is implementation dependent, the clearkey
-// secureStop does not serve as a reference implementation.
-void DrmPlugin::installSecureStop(const hidl_vec<uint8_t>& sessionId) {
-    Mutex::Autolock lock(mSecureStopLock);
-
-    ClearkeySecureStop clearkeySecureStop;
-    clearkeySecureStop.id = uint32ToVector(++mNextSecureStopId);
-    clearkeySecureStop.data.assign(sessionId.begin(), sessionId.end());
-
-    mSecureStops.insert(std::pair<std::vector<uint8_t>, ClearkeySecureStop>(
-            clearkeySecureStop.id, clearkeySecureStop));
-}
-
-Return<void> DrmPlugin::openSession(openSession_cb _hidl_cb) {
-    sp<Session> session = mSessionLibrary->createSession();
-    processMockError(session);
-    std::vector<uint8_t> sessionId = session->sessionId();
-
-    Status status = setSecurityLevel(sessionId, SecurityLevel::SW_SECURE_CRYPTO);
-    _hidl_cb(status, toHidlVec(sessionId));
-    mOpenSessionOkCount++;
-    return Void();
-}
-
-Return<void> DrmPlugin::openSession_1_1(SecurityLevel securityLevel,
-        openSession_1_1_cb _hidl_cb) {
-    sp<Session> session = mSessionLibrary->createSession();
-    processMockError(session);
-    std::vector<uint8_t> sessionId = session->sessionId();
-
-    Status status = setSecurityLevel(sessionId, securityLevel);
-    if (status == Status::OK) {
-        mOpenSessionOkCount++;
-    } else {
-        mSessionLibrary->destroySession(session);
-        sessionId.clear();
-    }
-    _hidl_cb(status, toHidlVec(sessionId));
-    return Void();
-}
-
-Return<Status> DrmPlugin::closeSession(const hidl_vec<uint8_t>& sessionId) {
-    if (sessionId.size() == 0) {
-        return Status::BAD_VALUE;
-    }
-
-    sp<Session> session = mSessionLibrary->findSession(toVector(sessionId));
-    if (session.get()) {
-        mSessionLibrary->destroySession(session);
-        if (session->getMockError() != Status_V1_2::OK) {
-            sendSessionLostState(sessionId);
-            return Status::ERROR_DRM_INVALID_STATE;
-        }
-        mCloseSessionOkCount++;
-        return Status::OK;
-    }
-    mCloseSessionNotOpenedCount++;
-    return Status::ERROR_DRM_SESSION_NOT_OPENED;
-}
-
-Status_V1_2 DrmPlugin::getKeyRequestCommon(const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& initData,
-        const hidl_string& mimeType,
-        KeyType keyType,
-        const hidl_vec<KeyValue>& optionalParameters,
-        std::vector<uint8_t> *request,
-        KeyRequestType_V1_1 *keyRequestType,
-        std::string *defaultUrl) {
-        UNUSED(optionalParameters);
-
-    // GetKeyRequestOfflineKeyTypeNotSupported() in vts 1.0 and 1.1 expects
-    // KeyType::OFFLINE to return ERROR_DRM_CANNOT_HANDLE in clearkey plugin.
-    // Those tests pass in an empty initData, we use the empty initData to
-    // signal such specific use case.
-    if (keyType == KeyType::OFFLINE && 0 == initData.size()) {
-        return Status_V1_2::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    *defaultUrl = "https://default.url";
-    *keyRequestType = KeyRequestType_V1_1::UNKNOWN;
-    *request = std::vector<uint8_t>();
-
-    if (scope.size() == 0 ||
-            (keyType != KeyType::STREAMING &&
-            keyType != KeyType::OFFLINE &&
-            keyType != KeyType::RELEASE)) {
-        return Status_V1_2::BAD_VALUE;
-    }
-
-    const std::vector<uint8_t> scopeId = toVector(scope);
-    sp<Session> session;
-    if (keyType == KeyType::STREAMING || keyType == KeyType::OFFLINE) {
-        std::vector<uint8_t> sessionId(scopeId.begin(), scopeId.end());
-        session = mSessionLibrary->findSession(sessionId);
-        if (!session.get()) {
-            return Status_V1_2::ERROR_DRM_SESSION_NOT_OPENED;
-        } else if (session->getMockError() != Status_V1_2::OK) {
-            return session->getMockError();
-        }
-
-        *keyRequestType = KeyRequestType_V1_1::INITIAL;
-    }
-
-    Status_V1_2 status = static_cast<Status_V1_2>(
-            session->getKeyRequest(initData, mimeType, keyType, request));
-
-    if (keyType == KeyType::RELEASE) {
-        std::vector<uint8_t> keySetId(scopeId.begin(), scopeId.end());
-        std::string requestString(request->begin(), request->end());
-        if (requestString.find(kOfflineLicense) != std::string::npos) {
-            std::string emptyResponse;
-            std::string keySetIdString(keySetId.begin(), keySetId.end());
-            if (!mFileHandle.StoreLicense(keySetIdString,
-                    DeviceFiles::kLicenseStateReleasing,
-                    emptyResponse)) {
-                ALOGE("Problem releasing offline license");
-                return Status_V1_2::ERROR_DRM_UNKNOWN;
-            }
-            if (mReleaseKeysMap.find(keySetIdString) == mReleaseKeysMap.end()) {
-                sp<Session> session = mSessionLibrary->createSession();
-                mReleaseKeysMap[keySetIdString] = session->sessionId();
-            } else {
-                ALOGI("key is in use, ignore release request");
-            }
-        } else {
-            ALOGE("Offline license not found, nothing to release");
-        }
-        *keyRequestType = KeyRequestType_V1_1::RELEASE;
-    }
-    return status;
-}
-
-Return<void> DrmPlugin::getKeyRequest(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& initData,
-        const hidl_string& mimeType,
-        KeyType keyType,
-        const hidl_vec<KeyValue>& optionalParameters,
-        getKeyRequest_cb _hidl_cb) {
-    UNUSED(optionalParameters);
-
-    KeyRequestType_V1_1 keyRequestType = KeyRequestType_V1_1::UNKNOWN;
-    std::string defaultUrl("");
-    std::vector<uint8_t> request;
-    Status_V1_2 status = getKeyRequestCommon(
-            scope, initData, mimeType, keyType, optionalParameters,
-            &request, &keyRequestType, &defaultUrl);
-
-    _hidl_cb(toStatus_1_0(status), toHidlVec(request),
-            toKeyRequestType_V1_0(keyRequestType),
-            hidl_string(defaultUrl));
-    return Void();
-}
-
-Return<void> DrmPlugin::getKeyRequest_1_1(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& initData,
-        const hidl_string& mimeType,
-        KeyType keyType,
-        const hidl_vec<KeyValue>& optionalParameters,
-        getKeyRequest_1_1_cb _hidl_cb) {
-    UNUSED(optionalParameters);
-
-    KeyRequestType_V1_1 keyRequestType = KeyRequestType_V1_1::UNKNOWN;
-    std::string defaultUrl("");
-    std::vector<uint8_t> request;
-    Status_V1_2 status = getKeyRequestCommon(
-            scope, initData, mimeType, keyType, optionalParameters,
-            &request, &keyRequestType, &defaultUrl);
-
-    _hidl_cb(toStatus_1_0(status), toHidlVec(request),
-            keyRequestType, hidl_string(defaultUrl));
-    return Void();
-}
-
-Return<void> DrmPlugin::getKeyRequest_1_2(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& initData,
-        const hidl_string& mimeType,
-        KeyType keyType,
-        const hidl_vec<KeyValue>& optionalParameters,
-        getKeyRequest_1_2_cb _hidl_cb) {
-    UNUSED(optionalParameters);
-
-    KeyRequestType_V1_1 keyRequestType = KeyRequestType_V1_1::UNKNOWN;
-    std::string defaultUrl("");
-    std::vector<uint8_t> request;
-    Status_V1_2 status = getKeyRequestCommon(
-            scope, initData, mimeType, keyType, optionalParameters,
-            &request, &keyRequestType, &defaultUrl);
-
-    _hidl_cb(status, toHidlVec(request), keyRequestType, hidl_string(defaultUrl));
-    return Void();
-}
-
-void DrmPlugin::setPlayPolicy() {
-    android::Mutex::Autolock lock(mPlayPolicyLock);
-    mPlayPolicy.clear();
-
-    KeyValue policy;
-    policy.key = kQueryKeyLicenseType;
-    policy.value = kStreaming;
-    mPlayPolicy.push_back(policy);
-
-    policy.key = kQueryKeyPlayAllowed;
-    policy.value = kTrue;
-    mPlayPolicy.push_back(policy);
-
-    policy.key = kQueryKeyRenewAllowed;
-    mPlayPolicy.push_back(policy);
-}
-
-bool DrmPlugin::makeKeySetId(std::string* keySetId) {
-    if (!keySetId) {
-        ALOGE("keySetId destination not provided");
-        return false;
-    }
-    std::vector<uint8_t> ksid(kKeySetIdPrefix.begin(), kKeySetIdPrefix.end());
-    ksid.resize(kKeySetIdLength);
-    std::vector<uint8_t> randomData((kKeySetIdLength - kKeySetIdPrefix.size()) / 2, 0);
-
-    while (keySetId->empty()) {
-        for (auto itr = randomData.begin(); itr != randomData.end(); ++itr) {
-            *itr = std::rand() % 0xff;
-        }
-        *keySetId = kKeySetIdPrefix + ByteArrayToHexString(
-                reinterpret_cast<const uint8_t*>(randomData.data()), randomData.size());
-        if (mFileHandle.LicenseExists(*keySetId)) {
-            // collision, regenerate
-            ALOGV("Retry generating KeySetId");
-            keySetId->clear();
-        }
-    }
-    return true;
-}
-
-Return<void> DrmPlugin::provideKeyResponse(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& response,
-        provideKeyResponse_cb _hidl_cb) {
-    if (scope.size() == 0 || response.size() == 0) {
-        // Returns empty keySetId
-        _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
-        return Void();
-    }
-
-    std::string responseString(
-            reinterpret_cast<const char*>(response.data()), response.size());
-    const std::vector<uint8_t> scopeId = toVector(scope);
-    std::vector<uint8_t> sessionId;
-    std::string keySetId;
-
-    Status status = Status::OK;
-    bool isOfflineLicense = responseString.find(kOfflineLicense) != std::string::npos;
-    if (scopeId.size() < kKeySetIdPrefix.size()) {
-        android_errorWriteLog(0x534e4554, "144507096");
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
-        return Void();
-    }
-    bool isRelease = (memcmp(scopeId.data(), kKeySetIdPrefix.data(), kKeySetIdPrefix.size()) == 0);
-    if (isRelease) {
-        keySetId.assign(scopeId.begin(), scopeId.end());
-
-        auto iter = mReleaseKeysMap.find(std::string(keySetId.begin(), keySetId.end()));
-        if (iter != mReleaseKeysMap.end()) {
-            sessionId.assign(iter->second.begin(), iter->second.end());
-        }
-    } else {
-        sessionId.assign(scopeId.begin(), scopeId.end());
-        // non offline license returns empty keySetId
-        keySetId.clear();
-    }
-
-    sp<Session> session = mSessionLibrary->findSession(sessionId);
-    if (!session.get()) {
-        _hidl_cb(Status::ERROR_DRM_SESSION_NOT_OPENED, hidl_vec<uint8_t>());
-        return Void();
-    }
-    setPlayPolicy();
-
-    status = session->provideKeyResponse(response);
-    if (status == Status::OK) {
-        if (isOfflineLicense) {
-            if (isRelease) {
-                mFileHandle.DeleteLicense(keySetId);
-                mSessionLibrary->destroySession(session);
-            } else {
-                if (!makeKeySetId(&keySetId)) {
-                    _hidl_cb(Status::ERROR_DRM_UNKNOWN, hidl_vec<uint8_t>());
-                    return Void();
-                }
-
-                bool ok = mFileHandle.StoreLicense(
-                        keySetId,
-                        DeviceFiles::kLicenseStateActive,
-                        std::string(response.begin(), response.end()));
-                if (!ok) {
-                    ALOGE("Failed to store offline license");
-                }
-            }
-        }
-
-        // Test calling AMediaDrm listeners.
-        sendEvent(EventType::VENDOR_DEFINED, sessionId, sessionId);
-
-        sendExpirationUpdate(sessionId, 100);
-
-        std::vector<KeyStatus_V1_2> keysStatus;
-        KeyStatus_V1_2 keyStatus;
-
-        std::vector<uint8_t> keyId1 = { 0xA, 0xB, 0xC };
-        keyStatus.keyId = keyId1;
-        keyStatus.type = V1_2::KeyStatusType::USABLE;
-        keysStatus.push_back(keyStatus);
-
-        std::vector<uint8_t> keyId2 = { 0xD, 0xE, 0xF };
-        keyStatus.keyId = keyId2;
-        keyStatus.type = V1_2::KeyStatusType::EXPIRED;
-        keysStatus.push_back(keyStatus);
-
-        std::vector<uint8_t> keyId3 = { 0x0, 0x1, 0x2 };
-        keyStatus.keyId = keyId3;
-        keyStatus.type = V1_2::KeyStatusType::USABLEINFUTURE;
-        keysStatus.push_back(keyStatus);
-
-        sendKeysChange_1_2(sessionId, keysStatus, true);
-
-        installSecureStop(sessionId);
-    } else {
-        ALOGE("provideKeyResponse returns error=%d", status);
-    }
-
-    std::vector<uint8_t> keySetIdVec(keySetId.begin(), keySetId.end());
-    _hidl_cb(status, toHidlVec(keySetIdVec));
-    return Void();
-}
-
-Return<Status> DrmPlugin::restoreKeys(
-        const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& keySetId) {
-        if (sessionId.size() == 0 || keySetId.size() == 0) {
-            return Status::BAD_VALUE;
-        }
-
-        DeviceFiles::LicenseState licenseState;
-        std::string offlineLicense;
-        Status status = Status::OK;
-        if (!mFileHandle.RetrieveLicense(std::string(keySetId.begin(), keySetId.end()),
-                &licenseState, &offlineLicense)) {
-            ALOGE("Failed to restore offline license");
-            return Status::ERROR_DRM_NO_LICENSE;
-        }
-
-        if (DeviceFiles::kLicenseStateUnknown == licenseState ||
-                DeviceFiles::kLicenseStateReleasing == licenseState) {
-            ALOGE("Invalid license state=%d", licenseState);
-            return Status::ERROR_DRM_NO_LICENSE;
-        }
-
-        sp<Session> session = mSessionLibrary->findSession(toVector(sessionId));
-        if (!session.get()) {
-            return Status::ERROR_DRM_SESSION_NOT_OPENED;
-        }
-        status = session->provideKeyResponse(std::vector<uint8_t>(offlineLicense.begin(),
-                offlineLicense.end()));
-        if (status != Status::OK) {
-            ALOGE("Failed to restore keys");
-        }
-        return status;
-}
-
-Return<void> DrmPlugin::getPropertyString(
-        const hidl_string& propertyName, getPropertyString_cb _hidl_cb) {
-    std::string name(propertyName.c_str());
-    std::string value;
-
-    if (name == kVendorKey) {
-        value = mStringProperties[kVendorKey];
-    } else if (name == kVersionKey) {
-        value = mStringProperties[kVersionKey];
-    } else if (name == kPluginDescriptionKey) {
-        value = mStringProperties[kPluginDescriptionKey];
-    } else if (name == kAlgorithmsKey) {
-        value = mStringProperties[kAlgorithmsKey];
-    } else if (name == kListenerTestSupportKey) {
-        value = mStringProperties[kListenerTestSupportKey];
-    } else if (name == kDrmErrorTestKey) {
-        value = mStringProperties[kDrmErrorTestKey];
-    } else {
-        ALOGE("App requested unknown string property %s", name.c_str());
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, "");
-        return Void();
-    }
-    _hidl_cb(Status::OK, value.c_str());
-    return Void();
-}
-
-Return<void> DrmPlugin::getPropertyByteArray(
-        const hidl_string& propertyName, getPropertyByteArray_cb _hidl_cb) {
-    std::map<std::string, std::vector<uint8_t> >::iterator itr =
-            mByteArrayProperties.find(std::string(propertyName.c_str()));
-    if (itr == mByteArrayProperties.end()) {
-        ALOGE("App requested unknown property: %s", propertyName.c_str());
-        _hidl_cb(Status::BAD_VALUE, std::vector<uint8_t>());
-        return Void();
-    }
-    _hidl_cb(Status::OK, itr->second);
-    return Void();
-
-}
-
-Return<Status> DrmPlugin::setPropertyString(
-    const hidl_string& name, const hidl_string& value) {
-    std::string immutableKeys;
-    immutableKeys.append(kAlgorithmsKey + ",");
-    immutableKeys.append(kPluginDescriptionKey + ",");
-    immutableKeys.append(kVendorKey + ",");
-    immutableKeys.append(kVersionKey + ",");
-
-    std::string key = std::string(name.c_str());
-    if (immutableKeys.find(key) != std::string::npos) {
-        ALOGD("Cannot set immutable property: %s", key.c_str());
-        return Status::BAD_VALUE;
-    }
-
-    std::map<std::string, std::string>::iterator itr =
-            mStringProperties.find(key);
-    if (itr == mStringProperties.end()) {
-        ALOGE("Cannot set undefined property string, key=%s", key.c_str());
-        return Status::BAD_VALUE;
-    }
-
-    if (name == kDrmErrorTestKey) {
-        if (value == kResourceContentionValue) {
-            mMockError = Status_V1_2::ERROR_DRM_RESOURCE_CONTENTION;
-        } else if (value == kLostStateValue) {
-            mMockError = Status_V1_2::ERROR_DRM_SESSION_LOST_STATE;
-        } else if (value == kFrameTooLargeValue) {
-            mMockError = Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE;
-        } else if (value == kInvalidStateValue)  {
-            mMockError = Status_V1_2::ERROR_DRM_INVALID_STATE;
-        } else {
-            mMockError = Status_V1_2::ERROR_DRM_UNKNOWN;
-        }
-    }
-
-    mStringProperties[key] = std::string(value.c_str());
-    return Status::OK;
-}
-
-Return<Status> DrmPlugin::setPropertyByteArray(
-    const hidl_string& name, const hidl_vec<uint8_t>& value) {
-   UNUSED(value);
-   if (name == kDeviceIdKey) {
-      ALOGD("Cannot set immutable property: %s", name.c_str());
-      return Status::BAD_VALUE;
-   } else if (name == kClientIdKey) {
-       mByteArrayProperties[kClientIdKey] = toVector(value);
-       return Status::OK;
-   }
-
-   // Setting of undefined properties is not supported
-   ALOGE("Failed to set property byte array, key=%s", name.c_str());
-   return Status::ERROR_DRM_CANNOT_HANDLE;
-}
-
-Return<void> DrmPlugin::queryKeyStatus(
-        const hidl_vec<uint8_t>& sessionId,
-        queryKeyStatus_cb _hidl_cb) {
-    if (sessionId.size() == 0) {
-        // Returns empty key status KeyValue pair
-        _hidl_cb(Status::BAD_VALUE, hidl_vec<KeyValue>());
-        return Void();
-    }
-
-    std::vector<KeyValue> infoMapVec;
-    infoMapVec.clear();
-
-    mPlayPolicyLock.lock();
-    KeyValue keyValuePair;
-    for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
-        keyValuePair.key = mPlayPolicy[i].key;
-        keyValuePair.value = mPlayPolicy[i].value;
-        infoMapVec.push_back(keyValuePair);
-    }
-    mPlayPolicyLock.unlock();
-    _hidl_cb(Status::OK, toHidlVec(infoMapVec));
-    return Void();
-}
-
-Return<void> DrmPlugin::getNumberOfSessions(getNumberOfSessions_cb _hidl_cb) {
-        uint32_t currentSessions = mSessionLibrary->numOpenSessions();
-        uint32_t maxSessions = 10;
-        _hidl_cb(Status::OK, currentSessions, maxSessions);
-        return Void();
-}
-
-Return<void> DrmPlugin::getSecurityLevel(const hidl_vec<uint8_t>& sessionId,
-            getSecurityLevel_cb _hidl_cb) {
-    if (sessionId.size() == 0) {
-        _hidl_cb(Status::BAD_VALUE, SecurityLevel::UNKNOWN);
-        return Void();
-    }
-
-    std::vector<uint8_t> sid = toVector(sessionId);
-    sp<Session> session = mSessionLibrary->findSession(sid);
-    if (!session.get()) {
-        _hidl_cb(Status::ERROR_DRM_SESSION_NOT_OPENED, SecurityLevel::UNKNOWN);
-        return Void();
-    }
-
-    std::map<std::vector<uint8_t>, SecurityLevel>::iterator itr =
-            mSecurityLevel.find(sid);
-    if (itr == mSecurityLevel.end()) {
-        ALOGE("Session id not found");
-        _hidl_cb(Status::ERROR_DRM_INVALID_STATE, SecurityLevel::UNKNOWN);
-        return Void();
-    }
-
-    _hidl_cb(Status::OK, itr->second);
-    return Void();
-}
-
-Return<void> DrmPlugin::getLogMessages(
-        getLogMessages_cb _hidl_cb) {
-    using std::chrono::duration_cast;
-    using std::chrono::milliseconds;
-    using std::chrono::system_clock;
-
-    auto timeMillis = duration_cast<milliseconds>(
-            system_clock::now().time_since_epoch()).count();
-
-    std::vector<LogMessage> logs = {
-            { timeMillis, LogPriority::ERROR, std::string("Not implemented") }};
-    _hidl_cb(drm::V1_4::Status::OK, toHidlVec(logs));
-    return Void();
-}
-
-Return<bool> DrmPlugin::requiresSecureDecoder(
-        const hidl_string& mime, SecurityLevel level) {
-    UNUSED(mime);
-    UNUSED(level);
-    return false;
-}
-
-Return<bool> DrmPlugin::requiresSecureDecoderDefault(const hidl_string& mime) {
-    UNUSED(mime);
-    // Clearkey only supports SW_SECURE_CRYPTO, so we always returns false
-    // regardless of mime type.
-    return false;
-}
-
-Return<Status> DrmPlugin::setPlaybackId(
-    const hidl_vec<uint8_t>& sessionId,
-    const hidl_string& playbackId) {
-    if (sessionId.size() == 0) {
-        ALOGE("Invalid empty session id");
-        return Status::BAD_VALUE;
-    }
-
-    std::vector<uint8_t> sid = toVector(sessionId);
-    mPlaybackId[sid] = playbackId;
-    return Status::OK;
-}
-
-Return<Status> DrmPlugin::setSecurityLevel(const hidl_vec<uint8_t>& sessionId,
-            SecurityLevel level) {
-    if (sessionId.size() == 0) {
-        ALOGE("Invalid empty session id");
-        return Status::BAD_VALUE;
-    }
-
-    if (level > SecurityLevel::SW_SECURE_CRYPTO) {
-        ALOGE("Cannot set security level > max");
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    std::vector<uint8_t> sid = toVector(sessionId);
-    sp<Session> session = mSessionLibrary->findSession(sid);
-    if (!session.get()) {
-        return Status::ERROR_DRM_SESSION_NOT_OPENED;
-    }
-
-    std::map<std::vector<uint8_t>, SecurityLevel>::iterator itr =
-            mSecurityLevel.find(sid);
-    if (itr != mSecurityLevel.end()) {
-        mSecurityLevel[sid] = level;
-    } else {
-        if (!mSecurityLevel.insert(
-                std::pair<std::vector<uint8_t>, SecurityLevel>(sid, level)).second) {
-            ALOGE("Failed to set security level");
-            return Status::ERROR_DRM_INVALID_STATE;
-        }
-    }
-    return Status::OK;
-}
-
-Return<void> DrmPlugin::getMetrics(getMetrics_cb _hidl_cb) {
-    // Set the open session count metric.
-    DrmMetricGroup::Attribute openSessionOkAttribute = {
-      "status", DrmMetricGroup::ValueType::INT64_TYPE, (int64_t) Status::OK, 0.0, ""
-    };
-    DrmMetricGroup::Value openSessionMetricValue = {
-      "count", DrmMetricGroup::ValueType::INT64_TYPE, mOpenSessionOkCount, 0.0, ""
-    };
-    DrmMetricGroup::Metric openSessionMetric = {
-      "open_session", { openSessionOkAttribute }, { openSessionMetricValue }
-    };
-
-    // Set the close session count metric.
-    DrmMetricGroup::Attribute closeSessionOkAttribute = {
-      "status", DrmMetricGroup::ValueType::INT64_TYPE, (int64_t) Status::OK, 0.0, ""
-    };
-    DrmMetricGroup::Value closeSessionMetricValue = {
-      "count", DrmMetricGroup::ValueType::INT64_TYPE, mCloseSessionOkCount, 0.0, ""
-    };
-    DrmMetricGroup::Metric closeSessionMetric = {
-      "close_session", { closeSessionOkAttribute }, { closeSessionMetricValue }
-    };
-
-    // Set the close session, not opened metric.
-    DrmMetricGroup::Attribute closeSessionNotOpenedAttribute = {
-      "status", DrmMetricGroup::ValueType::INT64_TYPE,
-      (int64_t) Status::ERROR_DRM_SESSION_NOT_OPENED, 0.0, ""
-    };
-    DrmMetricGroup::Value closeSessionNotOpenedMetricValue = {
-      "count", DrmMetricGroup::ValueType::INT64_TYPE, mCloseSessionNotOpenedCount, 0.0, ""
-    };
-    DrmMetricGroup::Metric closeSessionNotOpenedMetric = {
-      "close_session", { closeSessionNotOpenedAttribute }, { closeSessionNotOpenedMetricValue }
-    };
-
-    // Set the setPlaybackId metric.
-    std::vector<DrmMetricGroup::Attribute> sids;
-    std::vector<DrmMetricGroup::Value> playbackIds;
-    for (const auto&[key, value] : mPlaybackId) {
-        std::string sid(key.begin(), key.end());
-        DrmMetricGroup::Attribute sessionIdAttribute = {
-            "sid", DrmMetricGroup::ValueType::STRING_TYPE, 0, 0, sid };
-        sids.push_back(sessionIdAttribute);
-
-        DrmMetricGroup::Value playbackIdMetricValue = {
-            "playbackId", DrmMetricGroup::ValueType::STRING_TYPE, 0, 0, value };
-        playbackIds.push_back(playbackIdMetricValue);
-    }
-    DrmMetricGroup::Metric setPlaybackIdMetric = {
-            "set_playback_id", { sids }, { playbackIds }};
-
-    DrmMetricGroup metrics = {
-            { openSessionMetric, closeSessionMetric,
-              closeSessionNotOpenedMetric, setPlaybackIdMetric }};
-    _hidl_cb(Status::OK, hidl_vec<DrmMetricGroup>({metrics}));
-    return Void();
-}
-
-Return<void> DrmPlugin::getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) {
-    std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
-    std::vector<KeySetId> keySetIds;
-    if (mMockError != Status_V1_2::OK) {
-        _hidl_cb(toStatus_1_0(mMockError), keySetIds);
-        return Void();
-    }
-    for (const auto& name : licenseNames) {
-        std::vector<uint8_t> keySetId(name.begin(), name.end());
-        keySetIds.push_back(keySetId);
-    }
-    _hidl_cb(Status::OK, keySetIds);
-    return Void();
-}
-
-
-Return<Status> DrmPlugin::removeOfflineLicense(const KeySetId& keySetId) {
-    if (mMockError != Status_V1_2::OK) {
-        return toStatus_1_0(mMockError);
-    }
-    std::string licenseName(keySetId.begin(), keySetId.end());
-    if (mFileHandle.DeleteLicense(licenseName)) {
-        return Status::OK;
-    }
-    return Status::BAD_VALUE;
-}
-
-Return<void> DrmPlugin::getOfflineLicenseState(const KeySetId& keySetId,
-        getOfflineLicenseState_cb _hidl_cb) {
-    std::string licenseName(keySetId.begin(), keySetId.end());
-    DeviceFiles::LicenseState state;
-    std::string license;
-    OfflineLicenseState hLicenseState;
-    if (mMockError != Status_V1_2::OK) {
-        _hidl_cb(toStatus_1_0(mMockError), OfflineLicenseState::UNKNOWN);
-    } else if (mFileHandle.RetrieveLicense(licenseName, &state, &license)) {
-        switch (state) {
-        case DeviceFiles::kLicenseStateActive:
-            hLicenseState = OfflineLicenseState::USABLE;
-            break;
-        case DeviceFiles::kLicenseStateReleasing:
-            hLicenseState = OfflineLicenseState::INACTIVE;
-            break;
-        case DeviceFiles::kLicenseStateUnknown:
-            hLicenseState = OfflineLicenseState::UNKNOWN;
-            break;
-        }
-        _hidl_cb(Status::OK, hLicenseState);
-    } else {
-        _hidl_cb(Status::BAD_VALUE, OfflineLicenseState::UNKNOWN);
-    }
-    return Void();
-}
-
-Return<void> DrmPlugin::getSecureStops(getSecureStops_cb _hidl_cb) {
-    mSecureStopLock.lock();
-    std::vector<SecureStop> stops;
-    for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
-        ClearkeySecureStop clearkeyStop = itr->second;
-        std::vector<uint8_t> stopVec;
-        stopVec.insert(stopVec.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
-        stopVec.insert(stopVec.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
-
-        SecureStop stop;
-        stop.opaqueData = toHidlVec(stopVec);
-        stops.push_back(stop);
-    }
-    mSecureStopLock.unlock();
-
-    _hidl_cb(Status::OK, stops);
-    return Void();
-}
-
-Return<void> DrmPlugin::getSecureStop(const hidl_vec<uint8_t>& secureStopId,
-        getSecureStop_cb _hidl_cb) {
-    std::vector<uint8_t> stopVec;
-
-    mSecureStopLock.lock();
-    auto itr = mSecureStops.find(toVector(secureStopId));
-    if (itr != mSecureStops.end()) {
-        ClearkeySecureStop clearkeyStop = itr->second;
-        stopVec.insert(stopVec.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
-        stopVec.insert(stopVec.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
-    }
-    mSecureStopLock.unlock();
-
-    SecureStop stop;
-    if (!stopVec.empty()) {
-        stop.opaqueData = toHidlVec(stopVec);
-        _hidl_cb(Status::OK, stop);
-    } else {
-        _hidl_cb(Status::BAD_VALUE, stop);
-    }
-    return Void();
-}
-
-Return<Status> DrmPlugin::releaseSecureStop(const hidl_vec<uint8_t>& secureStopId) {
-    return removeSecureStop(secureStopId);
-}
-
-Return<Status> DrmPlugin::releaseAllSecureStops() {
-    return removeAllSecureStops();
-}
-
-Return<void> DrmPlugin::getSecureStopIds(getSecureStopIds_cb _hidl_cb) {
-    mSecureStopLock.lock();
-    std::vector<SecureStopId> ids;
-    for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
-        ids.push_back(itr->first);
-    }
-    mSecureStopLock.unlock();
-
-    _hidl_cb(Status::OK, toHidlVec(ids));
-    return Void();
-}
-
-Return<Status> DrmPlugin::releaseSecureStops(const SecureStopRelease& ssRelease) {
-    // OpaqueData starts with 4 byte decimal integer string
-    const size_t kFourBytesOffset = 4;
-    if (ssRelease.opaqueData.size() < kFourBytesOffset) {
-        ALOGE("Invalid secureStopRelease length");
-        return Status::BAD_VALUE;
-    }
-
-    Status status = Status::OK;
-    std::vector<uint8_t> input = toVector(ssRelease.opaqueData);
-
-    if (input.size() < kSecureStopIdSize + kFourBytesOffset) {
-        // The minimum size of SecureStopRelease has to contain
-        // a 4 bytes count and one secureStop id
-        ALOGE("Total size of secureStops is too short");
-        return Status::BAD_VALUE;
-    }
-
-    // The format of opaqueData is shared between the server
-    // and the drm service. The clearkey implementation consists of:
-    //    count - number of secure stops
-    //    list of fixed length secure stops
-    size_t countBufferSize = sizeof(uint32_t);
-    if (input.size() < countBufferSize) {
-        // SafetyNet logging
-        android_errorWriteLog(0x534e4554, "144766455");
-        return Status::BAD_VALUE;
-    }
-    uint32_t count = 0;
-    sscanf(reinterpret_cast<char*>(input.data()), "%04" PRIu32, &count);
-
-    // Avoid divide by 0 below.
-    if (count == 0) {
-        ALOGE("Invalid 0 secureStop count");
-        return Status::BAD_VALUE;
-    }
-
-    // Computes the fixed length secureStop size
-    size_t secureStopSize = (input.size() - kFourBytesOffset) / count;
-    if (secureStopSize < kSecureStopIdSize) {
-        // A valid secureStop contains the id plus data
-        ALOGE("Invalid secureStop size");
-        return Status::BAD_VALUE;
-    }
-    uint8_t* buffer = new uint8_t[secureStopSize];
-    size_t offset = kFourBytesOffset; // skip the count
-    for (size_t i = 0; i < count; ++i, offset += secureStopSize) {
-        memcpy(buffer, input.data() + offset, secureStopSize);
-
-        // A secureStop contains id+data, we only use the id for removal
-        std::vector<uint8_t> id(buffer, buffer + kSecureStopIdSize);
-        status = removeSecureStop(toHidlVec(id));
-        if (Status::OK != status) break;
-    }
-
-    delete[] buffer;
-    return status;
-}
-
-Return<Status> DrmPlugin::removeSecureStop(const hidl_vec<uint8_t>& secureStopId) {
-    Mutex::Autolock lock(mSecureStopLock);
-
-    if (1 != mSecureStops.erase(toVector(secureStopId))) {
-        return Status::BAD_VALUE;
-    }
-    return Status::OK;
-}
-
-Return<Status> DrmPlugin::removeAllSecureStops() {
-    Mutex::Autolock lock(mSecureStopLock);
-
-    mSecureStops.clear();
-    mNextSecureStopId = kSecureStopIdStart;
-    return Status::OK;
-}
-
-}  // namespace clearkey
-}  // namespace V1_4
-}  // namespace drm
-}  // namespace hardware
-}  // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp b/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
deleted file mode 100644
index eccc843..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_InitDataParser"
-
-#include <algorithm>
-#include <utils/Log.h>
-
-#include "InitDataParser.h"
-
-#include "Base64.h"
-
-#include "ClearKeyUUID.h"
-#include "MimeType.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace {
-    const size_t kKeyIdSize = 16;
-    const size_t kSystemIdSize = 16;
-}
-
-std::vector<uint8_t> StrToVector(const std::string& str) {
-    std::vector<uint8_t> vec(str.begin(), str.end());
-    return vec;
-}
-
-Status InitDataParser::parse(const std::vector<uint8_t>& initData,
-        const std::string& mimeType,
-        V1_0::KeyType keyType,
-        std::vector<uint8_t>* licenseRequest) {
-    // Build a list of the key IDs
-    std::vector<const uint8_t*> keyIds;
-
-    if (mimeType == kIsoBmffVideoMimeType.c_str() ||
-        mimeType == kIsoBmffAudioMimeType.c_str() ||
-        mimeType == kCencInitDataFormat.c_str()) {
-        Status res = parsePssh(initData, &keyIds);
-        if (res != Status::OK) {
-            return res;
-        }
-    } else if (mimeType == kWebmVideoMimeType.c_str() ||
-        mimeType == kWebmAudioMimeType.c_str() ||
-        mimeType == kWebmInitDataFormat.c_str()) {
-        // WebM "init data" is just a single key ID
-        if (initData.size() != kKeyIdSize) {
-            return Status::ERROR_DRM_CANNOT_HANDLE;
-        }
-        keyIds.push_back(initData.data());
-    } else {
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    if (keyType == V1_0::KeyType::RELEASE) {
-        // restore key
-    }
-
-    // Build the request
-    std::string requestJson = generateRequest(keyType, keyIds);
-    std::vector<uint8_t> requestJsonVec = StrToVector(requestJson);
-
-    licenseRequest->clear();
-    licenseRequest->insert(licenseRequest->end(), requestJsonVec.begin(), requestJsonVec.end());
-    return Status::OK;
-}
-
-Status InitDataParser::parsePssh(const std::vector<uint8_t>& initData,
-        std::vector<const uint8_t*>* keyIds) {
-    // Description of PSSH format:
-    // https://w3c.github.io/encrypted-media/format-registry/initdata/cenc.html
-    size_t readPosition = 0;
-
-    uint32_t expectedSize = initData.size();
-    const char psshIdentifier[4] = {'p', 's', 's', 'h'};
-    const uint8_t psshVersion1[4] = {1, 0, 0, 0};
-    uint32_t keyIdCount = 0;
-    size_t headerSize = sizeof(expectedSize) + sizeof(psshIdentifier) +
-                        sizeof(psshVersion1) + kSystemIdSize + sizeof(keyIdCount);
-    if (initData.size() < headerSize) {
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    // Validate size field
-    expectedSize = htonl(expectedSize);
-    if (memcmp(&initData[readPosition], &expectedSize,
-               sizeof(expectedSize)) != 0) {
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-    readPosition += sizeof(expectedSize);
-
-    // Validate PSSH box identifier
-    if (memcmp(&initData[readPosition], psshIdentifier,
-               sizeof(psshIdentifier)) != 0) {
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-    readPosition += sizeof(psshIdentifier);
-
-    // Validate EME version number
-    if (memcmp(&initData[readPosition], psshVersion1,
-               sizeof(psshVersion1)) != 0) {
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-    readPosition += sizeof(psshVersion1);
-
-    // Validate system ID
-    if (!clearkeydrm::isClearKeyUUID(&initData[readPosition])) {
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-    readPosition += kSystemIdSize;
-
-    // Read key ID count
-    memcpy(&keyIdCount, &initData[readPosition], sizeof(keyIdCount));
-    keyIdCount = ntohl(keyIdCount);
-    readPosition += sizeof(keyIdCount);
-
-    uint64_t psshSize = 0;
-    if (__builtin_mul_overflow(keyIdCount, kKeyIdSize, &psshSize) ||
-        __builtin_add_overflow(readPosition, psshSize, &psshSize) ||
-        psshSize != initData.size() - sizeof(uint32_t) /* DataSize(0) */) {
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    // Calculate the key ID offsets
-    for (uint32_t i = 0; i < keyIdCount; ++i) {
-        size_t keyIdPosition = readPosition + (i * kKeyIdSize);
-        keyIds->push_back(&initData[keyIdPosition]);
-    }
-    return Status::OK;
-}
-
-std::string InitDataParser::generateRequest(V1_0::KeyType keyType,
-        const std::vector<const uint8_t*>& keyIds) {
-    const std::string kRequestPrefix("{\"kids\":[");
-    const std::string kTemporarySession("],\"type\":\"temporary\"}");
-    const std::string kPersistentSession("],\"type\":\"persistent-license\"}");
-
-    std::string request(kRequestPrefix);
-    std::string encodedId;
-    for (size_t i = 0; i < keyIds.size(); ++i) {
-        encodedId.clear();
-        encodeBase64Url(keyIds[i], kKeyIdSize, &encodedId);
-        if (i != 0) {
-            request.append(",");
-        }
-        request.push_back('\"');
-        request.append(encodedId);
-        request.push_back('\"');
-    }
-    if (keyType == V1_0::KeyType::STREAMING) {
-        request.append(kTemporarySession);
-    } else if (keyType == V1_0::KeyType::OFFLINE ||
-                   keyType == V1_0::KeyType::RELEASE) {
-            request.append(kPersistentSession);
-    }
-
-    // Android's Base64 encoder produces padding. EME forbids padding.
-    const char kBase64Padding = '=';
-    request.erase(std::remove(request.begin(), request.end(), kBase64Padding), request.end());
-
-    return request;
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
deleted file mode 100644
index 45cc775..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_TAG "hidl_JsonWebKey"
-
-#include <utils/Log.h>
-
-#include "JsonWebKey.h"
-
-#include "Base64.h"
-
-namespace {
-const std::string kBase64Padding("=");
-const std::string kKeysTag("keys");
-const std::string kKeyTypeTag("kty");
-const std::string kKeyTag("k");
-const std::string kKeyIdTag("kid");
-const std::string kMediaSessionType("type");
-const std::string kPersistentLicenseSession("persistent-license");
-const std::string kSymmetricKeyValue("oct");
-const std::string kTemporaryLicenseSession("temporary");
-}
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-JsonWebKey::JsonWebKey() {
-}
-
-JsonWebKey::~JsonWebKey() {
-}
-
-/*
- * Parses a JSON Web Key Set string, initializes a KeyMap with key id:key
- * pairs from the JSON Web Key Set. Both key ids and keys are base64url
- * encoded. The KeyMap contains base64url decoded key id:key pairs.
- *
- * @return Returns false for errors, true for success.
- */
-bool JsonWebKey::extractKeysFromJsonWebKeySet(const std::string& jsonWebKeySet,
-        KeyMap* keys) {
-
-    keys->clear();
-
-    if (!parseJsonWebKeySet(jsonWebKeySet, &mJsonObjects)) {
-        return false;
-    }
-
-    // mJsonObjects[0] contains the entire JSON Web Key Set, including
-    // all the base64 encoded keys. Each key is also stored separately as
-    // a JSON object in mJsonObjects[1..n] where n is the total
-    // number of keys in the set.
-    if (mJsonObjects.size() == 0 || !isJsonWebKeySet(mJsonObjects[0])) {
-        return false;
-    }
-
-    std::string encodedKey, encodedKeyId;
-    std::vector<uint8_t> decodedKey, decodedKeyId;
-
-    // mJsonObjects[1] contains the first JSON Web Key in the set
-    for (size_t i = 1; i < mJsonObjects.size(); ++i) {
-        encodedKeyId.clear();
-        encodedKey.clear();
-
-        if (!parseJsonObject(mJsonObjects[i], &mTokens))
-            return false;
-
-        if (findKey(mJsonObjects[i], &encodedKeyId, &encodedKey)) {
-            if (encodedKeyId.empty() || encodedKey.empty()) {
-                ALOGE("Must have both key id and key in the JsonWebKey set.");
-                continue;
-            }
-
-            if (!decodeBase64String(encodedKeyId, &decodedKeyId)) {
-                ALOGE("Failed to decode key id(%s)", encodedKeyId.c_str());
-                continue;
-            }
-
-            if (!decodeBase64String(encodedKey, &decodedKey)) {
-                ALOGE("Failed to decode key(%s)", encodedKey.c_str());
-                continue;
-            }
-
-            keys->insert(std::pair<std::vector<uint8_t>,
-                    std::vector<uint8_t> >(decodedKeyId, decodedKey));
-        }
-    }
-    return true;
-}
-
-bool JsonWebKey::decodeBase64String(const std::string& encodedText,
-        std::vector<uint8_t>* decodedText) {
-
-    decodedText->clear();
-
-    // encodedText should not contain padding characters as per EME spec.
-    if (encodedText.find(kBase64Padding) != std::string::npos) {
-        return false;
-    }
-
-    // Since decodeBase64() requires padding characters,
-    // add them so length of encodedText is exactly a multiple of 4.
-    int remainder = encodedText.length() % 4;
-    std::string paddedText(encodedText);
-    if (remainder > 0) {
-        for (int i = 0; i < 4 - remainder; ++i) {
-            paddedText.append(kBase64Padding);
-        }
-    }
-
-    sp<Buffer> buffer = decodeBase64(paddedText);
-    if (buffer == nullptr) {
-        ALOGE("Malformed base64 encoded content found.");
-        return false;
-    }
-
-    decodedText->insert(decodedText->end(), buffer->base(), buffer->base() + buffer->size());
-    return true;
-}
-
-bool JsonWebKey::findKey(const std::string& jsonObject, std::string* keyId,
-        std::string* encodedKey) {
-
-    std::string key, value;
-
-    // Only allow symmetric key, i.e. "kty":"oct" pair.
-    if (jsonObject.find(kKeyTypeTag) != std::string::npos) {
-        findValue(kKeyTypeTag, &value);
-        if (0 != value.compare(kSymmetricKeyValue))
-            return false;
-    }
-
-    if (jsonObject.find(kKeyIdTag) != std::string::npos) {
-        findValue(kKeyIdTag, keyId);
-    }
-
-    if (jsonObject.find(kKeyTag) != std::string::npos) {
-        findValue(kKeyTag, encodedKey);
-    }
-    return true;
-}
-
-void JsonWebKey::findValue(const std::string &key, std::string* value) {
-    value->clear();
-    const char* valueToken;
-    for (std::vector<std::string>::const_iterator nextToken = mTokens.begin();
-        nextToken != mTokens.end(); ++nextToken) {
-        if (0 == (*nextToken).compare(key)) {
-            if (nextToken + 1 == mTokens.end())
-                break;
-            valueToken = (*(nextToken + 1)).c_str();
-            value->assign(valueToken);
-            nextToken++;
-            break;
-        }
-    }
-}
-
-bool JsonWebKey::isJsonWebKeySet(const std::string& jsonObject) const {
-    if (jsonObject.find(kKeysTag) == std::string::npos) {
-        ALOGE("JSON Web Key does not contain keys.");
-        return false;
-    }
-    return true;
-}
-
-/*
- * Parses a JSON objects string and initializes a vector of tokens.
- *
- * @return Returns false for errors, true for success.
- */
-bool JsonWebKey::parseJsonObject(const std::string& jsonObject,
-        std::vector<std::string>* tokens) {
-    jsmn_parser parser;
-
-    jsmn_init(&parser);
-    int numTokens = jsmn_parse(&parser,
-        jsonObject.c_str(), jsonObject.size(), nullptr, 0);
-    if (numTokens < 0) {
-        ALOGE("Parser returns error code=%d", numTokens);
-        return false;
-    }
-
-    unsigned int jsmnTokensSize = numTokens * sizeof(jsmntok_t);
-    mJsmnTokens.clear();
-    mJsmnTokens.resize(jsmnTokensSize);
-
-    jsmn_init(&parser);
-    int status = jsmn_parse(&parser, jsonObject.c_str(),
-        jsonObject.size(), mJsmnTokens.data(), numTokens);
-    if (status < 0) {
-        ALOGE("Parser returns error code=%d", status);
-        return false;
-    }
-
-    tokens->clear();
-    std::string token;
-    const char *pjs;
-    for (int j = 0; j < numTokens; ++j) {
-        pjs = jsonObject.c_str() + mJsmnTokens[j].start;
-        if (mJsmnTokens[j].type == JSMN_STRING ||
-                mJsmnTokens[j].type == JSMN_PRIMITIVE) {
-            token.assign(pjs, mJsmnTokens[j].end - mJsmnTokens[j].start);
-            tokens->push_back(token);
-        }
-    }
-    return true;
-}
-
-/*
- * Parses JSON Web Key Set string and initializes a vector of JSON objects.
- *
- * @return Returns false for errors, true for success.
- */
-bool JsonWebKey::parseJsonWebKeySet(const std::string& jsonWebKeySet,
-        std::vector<std::string>* jsonObjects) {
-    if (jsonWebKeySet.empty()) {
-        ALOGE("Empty JSON Web Key");
-        return false;
-    }
-
-    // The jsmn parser only supports unicode encoding.
-    jsmn_parser parser;
-
-    // Computes number of tokens. A token marks the type, offset in
-    // the original string.
-    jsmn_init(&parser);
-    int numTokens = jsmn_parse(&parser,
-            jsonWebKeySet.c_str(), jsonWebKeySet.size(), nullptr, 0);
-    if (numTokens < 0) {
-        ALOGE("Parser returns error code=%d", numTokens);
-        return false;
-    }
-
-    unsigned int jsmnTokensSize = numTokens * sizeof(jsmntok_t);
-    mJsmnTokens.resize(jsmnTokensSize);
-
-    jsmn_init(&parser);
-    int status = jsmn_parse(&parser, jsonWebKeySet.c_str(),
-            jsonWebKeySet.size(), mJsmnTokens.data(), numTokens);
-    if (status < 0) {
-        ALOGE("Parser returns error code=%d", status);
-        return false;
-    }
-
-    std::string token;
-    const char *pjs;
-    for (int i = 0; i < numTokens; ++i) {
-        pjs = jsonWebKeySet.c_str() + mJsmnTokens[i].start;
-        if (mJsmnTokens[i].type == JSMN_OBJECT) {
-            token.assign(pjs, mJsmnTokens[i].end - mJsmnTokens[i].start);
-            jsonObjects->push_back(token);
-        }
-    }
-    return true;
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
deleted file mode 100644
index 56910be..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-
-#include <utils/Log.h>
-#include <string>
-
-#include "MemoryFileSystem.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-std::string MemoryFileSystem::GetFileName(const std::string& path) {
-    size_t index = path.find_last_of('/');
-    if (index != std::string::npos) {
-        return path.substr(index+1);
-    } else {
-        return path;
-    }
-}
-
-bool MemoryFileSystem::FileExists(const std::string& fileName) const {
-    auto result = mMemoryFileSystem.find(fileName);
-    return result != mMemoryFileSystem.end();
-}
-
-ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
-    auto result = mMemoryFileSystem.find(fileName);
-    if (result != mMemoryFileSystem.end()) {
-        return static_cast<ssize_t>(result->second.getFileSize());
-    } else {
-        ALOGE("Failed to get size for %s", fileName.c_str());
-        return -1;
-    }
-}
-
-std::vector<std::string> MemoryFileSystem::ListFiles() const {
-    std::vector<std::string> list;
-    for (const auto& filename : mMemoryFileSystem) {
-        list.push_back(filename.first);
-    }
-    return list;
-}
-
-size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
-    std::string key = GetFileName(path);
-    auto result = mMemoryFileSystem.find(key);
-    if (result != mMemoryFileSystem.end()) {
-        std::string serializedHashFile = result->second.getContent();
-        buffer->assign(serializedHashFile);
-        return buffer->size();
-    } else {
-        ALOGE("Failed to read from %s", path.c_str());
-        return -1;
-    }
-}
-
-size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
-    std::string key = GetFileName(path);
-    auto result = mMemoryFileSystem.find(key);
-    if (result != mMemoryFileSystem.end()) {
-        mMemoryFileSystem.erase(key);
-    }
-    mMemoryFileSystem.insert(std::pair<std::string, MemoryFile>(key, memoryFile));
-    return memoryFile.getFileSize();
-}
-
-bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
-    auto result = mMemoryFileSystem.find(fileName);
-    if (result != mMemoryFileSystem.end()) {
-        mMemoryFileSystem.erase(result);
-        return true;
-    } else {
-        ALOGE("Cannot find license to remove: %s", fileName.c_str());
-        return false;
-    }
-}
-
-bool MemoryFileSystem::RemoveAllFiles() {
-    mMemoryFileSystem.clear();
-    return mMemoryFileSystem.empty();
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Session.cpp b/drm/mediadrm/plugins/clearkey/hidl/Session.cpp
deleted file mode 100644
index cf668d4..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Session.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeySession"
-#include <utils/Log.h>
-
-#include "Session.h"
-#include "Utils.h"
-
-#include "AesCtrDecryptor.h"
-#include "InitDataParser.h"
-#include "JsonWebKey.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::KeyValue;
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_0::SubSample;
-using ::android::hardware::Return;
-using ::android::sp;
-
-using android::Mutex;
-
-Status Session::getKeyRequest(
-        const std::vector<uint8_t>& initData,
-        const std::string& mimeType,
-        V1_0::KeyType keyType,
-        std::vector<uint8_t>* keyRequest) const {
-    InitDataParser parser;
-    return parser.parse(initData, mimeType, keyType, keyRequest);
-}
-
-Status Session::provideKeyResponse(const std::vector<uint8_t>& response) {
-    std::string responseString(
-            reinterpret_cast<const char*>(response.data()), response.size());
-    KeyMap keys;
-
-    Mutex::Autolock lock(mMapLock);
-    JsonWebKey parser;
-    if (parser.extractKeysFromJsonWebKeySet(responseString, &keys)) {
-        for (auto &key : keys) {
-            std::string first(key.first.begin(), key.first.end());
-            std::string second(key.second.begin(), key.second.end());
-            mKeyMap.insert(std::pair<std::vector<uint8_t>,
-                    std::vector<uint8_t> >(key.first, key.second));
-        }
-        return Status::OK;
-    } else {
-        return Status::ERROR_DRM_UNKNOWN;
-    }
-}
-
-Status_V1_2 Session::decrypt(
-        const KeyId keyId, const Iv iv, const uint8_t* srcPtr,
-        uint8_t* destPtr, const std::vector<SubSample> subSamples,
-        size_t* bytesDecryptedOut) {
-    Mutex::Autolock lock(mMapLock);
-
-    if (getMockError() != Status_V1_2::OK) {
-        return getMockError();
-    }
-
-    std::vector<uint8_t> keyIdVector;
-    keyIdVector.clear();
-    keyIdVector.insert(keyIdVector.end(), keyId, keyId + kBlockSize);
-    std::map<std::vector<uint8_t>, std::vector<uint8_t> >::iterator itr;
-    itr = mKeyMap.find(keyIdVector);
-    if (itr == mKeyMap.end()) {
-        return Status_V1_2::ERROR_DRM_NO_LICENSE;
-    }
-
-    AesCtrDecryptor decryptor;
-    Status status = decryptor.decrypt(
-            itr->second /*key*/, iv, srcPtr, destPtr, subSamples,
-            subSamples.size(), bytesDecryptedOut);
-    return static_cast<Status_V1_2>(status);
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp b/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp
deleted file mode 100644
index 88afcc4..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeySessionLibrary"
-#include <utils/Log.h>
-
-#include "SessionLibrary.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::sp;
-
-Mutex SessionLibrary::sSingletonLock;
-SessionLibrary* SessionLibrary::sSingleton = NULL;
-
-SessionLibrary* SessionLibrary::get() {
-    Mutex::Autolock lock(sSingletonLock);
-
-    if (sSingleton == NULL) {
-        ALOGD("Instantiating Session Library Singleton.");
-        sSingleton = new SessionLibrary();
-    }
-
-    return sSingleton;
-}
-
-sp<Session> SessionLibrary::createSession() {
-    Mutex::Autolock lock(mSessionsLock);
-
-    char sessionIdRaw[16];
-    snprintf(sessionIdRaw, sizeof(sessionIdRaw), "%u", mNextSessionId);
-
-    mNextSessionId += 1;
-
-    std::vector<uint8_t> sessionId;
-    sessionId.insert(sessionId.end(), sessionIdRaw,
-            sessionIdRaw + sizeof(sessionIdRaw) / sizeof(uint8_t));
-
-    mSessions.insert(std::pair<std::vector<uint8_t>,
-            sp<Session> >(sessionId, new Session(sessionId)));
-    std::map<std::vector<uint8_t>, sp<Session> >::iterator itr =
-            mSessions.find(sessionId);
-    if (itr != mSessions.end()) {
-        return itr->second;
-    } else {
-        return nullptr;
-    }
-}
-
-sp<Session> SessionLibrary::findSession(
-        const std::vector<uint8_t>& sessionId) {
-    Mutex::Autolock lock(mSessionsLock);
-    std::map<std::vector<uint8_t>, sp<Session> >::iterator itr =
-            mSessions.find(sessionId);
-    if (itr != mSessions.end()) {
-        return itr->second;
-    } else {
-        return nullptr;
-    }
-}
-
-void SessionLibrary::destroySession(const sp<Session>& session) {
-    Mutex::Autolock lock(mSessionsLock);
-    mSessions.erase(session->sessionId());
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
deleted file mode 100644
index ec4517d..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
+++ /dev/null
@@ -1,14 +0,0 @@
-service vendor.drm-clearkey-hal-1-2 /vendor/bin/hw/android.hardware.drm@1.2-service-lazy.clearkey
-    interface android.hardware.drm@1.0::ICryptoFactory clearkey
-    interface android.hardware.drm@1.0::IDrmFactory clearkey
-    interface android.hardware.drm@1.1::ICryptoFactory clearkey
-    interface android.hardware.drm@1.1::IDrmFactory clearkey
-    interface android.hardware.drm@1.2::ICryptoFactory clearkey
-    interface android.hardware.drm@1.2::IDrmFactory clearkey
-    disabled
-    oneshot
-    class hal
-    user media
-    group media mediadrm
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
deleted file mode 100644
index 3b48cf2..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
+++ /dev/null
@@ -1,13 +0,0 @@
-service vendor.drm-clearkey-hal-1-2 /vendor/bin/hw/android.hardware.drm@1.2-service.clearkey
-    interface android.hardware.drm@1.0::ICryptoFactory clearkey
-    interface android.hardware.drm@1.0::IDrmFactory clearkey
-    interface android.hardware.drm@1.1::ICryptoFactory clearkey
-    interface android.hardware.drm@1.1::IDrmFactory clearkey
-    interface android.hardware.drm@1.2::ICryptoFactory clearkey
-    interface android.hardware.drm@1.2::IDrmFactory clearkey
-    disabled
-    class hal
-    user media
-    group media mediadrm
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
deleted file mode 100644
index 6e64978..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
+++ /dev/null
@@ -1,16 +0,0 @@
-service vendor.drm-clearkey-hal-1-3 /vendor/bin/hw/android.hardware.drm@1.3-service-lazy.clearkey
-    interface android.hardware.drm@1.0::ICryptoFactory clearkey
-    interface android.hardware.drm@1.0::IDrmFactory clearkey
-    interface android.hardware.drm@1.1::ICryptoFactory clearkey
-    interface android.hardware.drm@1.1::IDrmFactory clearkey
-    interface android.hardware.drm@1.2::ICryptoFactory clearkey
-    interface android.hardware.drm@1.2::IDrmFactory clearkey
-    interface android.hardware.drm@1.3::ICryptoFactory clearkey
-    interface android.hardware.drm@1.3::IDrmFactory clearkey
-    disabled
-    oneshot
-    class hal
-    user media
-    group media mediadrm
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
deleted file mode 100644
index e302e1b..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
+++ /dev/null
@@ -1,14 +0,0 @@
-service vendor.drm-clearkey-hal-1-3 /vendor/bin/hw/android.hardware.drm@1.3-service.clearkey
-    interface android.hardware.drm@1.0::ICryptoFactory clearkey
-    interface android.hardware.drm@1.0::IDrmFactory clearkey
-    interface android.hardware.drm@1.1::ICryptoFactory clearkey
-    interface android.hardware.drm@1.1::IDrmFactory clearkey
-    interface android.hardware.drm@1.2::ICryptoFactory clearkey
-    interface android.hardware.drm@1.2::IDrmFactory clearkey
-    interface android.hardware.drm@1.3::ICryptoFactory clearkey
-    interface android.hardware.drm@1.3::IDrmFactory clearkey
-    class hal
-    user media
-    group media mediadrm
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
deleted file mode 100644
index 84a63a1..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
+++ /dev/null
@@ -1,18 +0,0 @@
-service vendor.drm-clearkey-hal-1-4 /vendor/bin/hw/android.hardware.drm@1.4-service-lazy.clearkey
-    interface android.hardware.drm@1.0::ICryptoFactory clearkey
-    interface android.hardware.drm@1.0::IDrmFactory clearkey
-    interface android.hardware.drm@1.1::ICryptoFactory clearkey
-    interface android.hardware.drm@1.1::IDrmFactory clearkey
-    interface android.hardware.drm@1.2::ICryptoFactory clearkey
-    interface android.hardware.drm@1.2::IDrmFactory clearkey
-    interface android.hardware.drm@1.3::ICryptoFactory clearkey
-    interface android.hardware.drm@1.3::IDrmFactory clearkey
-    interface android.hardware.drm@1.4::ICryptoFactory clearkey
-    interface android.hardware.drm@1.4::IDrmFactory clearkey
-    disabled
-    oneshot
-    class hal
-    user media
-    group media mediadrm
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
deleted file mode 100644
index 649599e..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
+++ /dev/null
@@ -1,16 +0,0 @@
-service vendor.drm-clearkey-hal-1-4 /vendor/bin/hw/android.hardware.drm@1.4-service.clearkey
-    interface android.hardware.drm@1.0::ICryptoFactory clearkey
-    interface android.hardware.drm@1.0::IDrmFactory clearkey
-    interface android.hardware.drm@1.1::ICryptoFactory clearkey
-    interface android.hardware.drm@1.1::IDrmFactory clearkey
-    interface android.hardware.drm@1.2::ICryptoFactory clearkey
-    interface android.hardware.drm@1.2::IDrmFactory clearkey
-    interface android.hardware.drm@1.3::ICryptoFactory clearkey
-    interface android.hardware.drm@1.3::IDrmFactory clearkey
-    interface android.hardware.drm@1.4::ICryptoFactory clearkey
-    interface android.hardware.drm@1.4::IDrmFactory clearkey
-    class hal
-    user media
-    group media mediadrm
-    ioprio rt 4
-    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md
deleted file mode 100644
index cb45460..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# Fuzzer for android.hardware.drm@1.4-service.clearkey
-
-## Plugin Design Considerations
-The fuzzer plugin for android.hardware.drm@1.4-service.clearkey is designed based on the understanding of the
-source code and tries to achieve the following:
-
-##### Maximize code coverage
-The configuration parameters are not hardcoded, but instead selected based on
-incoming data. This ensures more code paths are reached by the fuzzer.
-
-android.hardware.drm@1.4-service.clearkey supports the following parameters:
-1. Security Level (parameter name: `securityLevel`)
-2. Mime Type (parameter name: `mimeType`)
-3. Key Type (parameter name: `keyType`)
-4. Crypto Mode (parameter name: `cryptoMode`)
-
-| Parameter| Valid Values| Configured Value|
-|------------- |-------------| ----- |
-| `securityLevel` | 0.`SecurityLevel::UNKNOWN` 1.`SecurityLevel::SW_SECURE_CRYPTO` 2.`SecurityLevel::SW_SECURE_DECODE` 3.`SecurityLevel::HW_SECURE_CRYPTO`  4.`SecurityLevel::HW_SECURE_DECODE` 5.`SecurityLevel::HW_SECURE_ALL`| Value obtained from FuzzedDataProvider in the range 0 to 5|
-| `mimeType` | 0.`video/mp4` 1.`video/mpeg` 2.`video/x-flv` 3.`video/mj2` 4.`video/3gp2` 5.`video/3gpp` 6.`video/3gpp2` 7.`audio/mp4` 8.`audio/mpeg` 9.`audio/aac` 10.`audio/3gp2` 11.`audio/3gpp` 12.`audio/3gpp2` 13.`audio/webm` 14.`video/webm` 15.`webm` 16.`cenc` 17.`video/unknown` 18.`audio/unknown`| Value obtained from FuzzedDataProvider in the range 0 to 18|
-| `keyType` | 0.`KeyType::OFFLINE` 1.`KeyType::STREAMING` 2.`KeyType::RELEASE` | Value obtained from FuzzedDataProvider in the range 0 to 2|
-| `cryptoMode` | 0.`Mode::UNENCRYPTED` 1.`Mode::AES_CTR` 2.`Mode::AES_CBC_CTS` 3.`Mode::AES_CBC` | Value obtained from FuzzedDataProvider in the range 0 to 3|
-
-This also ensures that the plugin is always deterministic for any given input.
-
-##### Maximize utilization of input data
-The plugin feeds the entire input data to the module.
-This ensures that the plugin tolerates any kind of input (empty, huge,
-malformed, etc) and doesnt `exit()` on any input and thereby increasing the
-chance of identifying vulnerabilities.
-
-## Build
-
-This describes steps to build clearkeyV1.4_fuzzer binary.
-
-### Android
-
-#### Steps to build
-Build the fuzzer
-```
-  $ mm -j$(nproc) clearkeyV1.4_fuzzer
-```
-#### Steps to run
-To run on device
-```
-  $ adb sync data
-  $ adb shell /data/fuzz/${TARGET_ARCH}/clearkeyV1.4_fuzzer/vendor/hw/clearkeyV1.4_fuzzer
-```
-
-## References:
- * http://llvm.org/docs/LibFuzzer.html
- * https://github.com/google/oss-fuzz
diff --git a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp
deleted file mode 100644
index afe0e6c..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp
+++ /dev/null
@@ -1,719 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#include <include/CreatePluginFactories.h>
-
-#include <android/hidl/allocator/1.0/IAllocator.h>
-#include <fuzzer/FuzzedDataProvider.h>
-#include <hidlmemory/mapping.h>
-#include <include/ClearKeyDrmProperties.h>
-#include <include/CryptoFactory.h>
-#include <include/CryptoPlugin.h>
-#include <include/DrmPlugin.h>
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-namespace drm = ::android::hardware::drm;
-using namespace std;
-using namespace android;
-using ::android::sp;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hidl::allocator::V1_0::IAllocator;
-using ::android::hidl::memory::V1_0::IMemory;
-using drm::V1_0::BufferType;
-using drm::V1_0::DestinationBuffer;
-using drm::V1_0::EventType;
-using drm::V1_0::ICryptoPlugin;
-using drm::V1_0::IDrmPlugin;
-using drm::V1_0::IDrmPluginListener;
-using drm::V1_0::KeyedVector;
-using drm::V1_0::KeyStatus;
-using drm::V1_0::KeyStatusType;
-using drm::V1_0::KeyType;
-using drm::V1_0::Mode;
-using drm::V1_0::Pattern;
-using drm::V1_0::SecureStop;
-using drm::V1_0::SharedBuffer;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-using drm::V1_1::DrmMetricGroup;
-using drm::V1_1::HdcpLevel;
-using drm::V1_1::SecureStopRelease;
-using drm::V1_1::SecurityLevel;
-using drm::V1_2::KeySetId;
-using drm::V1_2::OfflineLicenseState;
-using drm::V1_4::clearkey::ICryptoFactory;
-using drm::V1_4::clearkey::IDrmFactory;
-using drm::V1_4::clearkey::kAlgorithmsKey;
-using drm::V1_4::clearkey::kClientIdKey;
-using drm::V1_4::clearkey::kDeviceIdKey;
-using drm::V1_4::clearkey::kDrmErrorTestKey;
-using drm::V1_4::clearkey::kListenerTestSupportKey;
-using drm::V1_4::clearkey::kMetricsKey;
-using drm::V1_4::clearkey::kPluginDescriptionKey;
-using drm::V1_4::clearkey::kVendorKey;
-using drm::V1_4::clearkey::kVersionKey;
-
-typedef ::android::hardware::hidl_vec<uint8_t> SessionId;
-typedef ::android::hardware::hidl_vec<uint8_t> SecureStopId;
-
-static const uint8_t kInvalidUUID[] = {0x10, 0x20, 0x30, 0x40, 0x50, 0x60,
-                                       0x70, 0x80, 0x10, 0x20, 0x30, 0x40,
-                                       0x50, 0x60, 0x70, 0x80};
-
-static const uint8_t kClearKeyUUID[] = {0xE2, 0x71, 0x9D, 0x58, 0xA9, 0x85,
-                                        0xB3, 0xC9, 0x78, 0x1A, 0xB0, 0x30,
-                                        0xAF, 0x78, 0xD3, 0x0E};
-
-const SecurityLevel kSecurityLevel[] = {
-    SecurityLevel::UNKNOWN,          SecurityLevel::SW_SECURE_CRYPTO,
-    SecurityLevel::SW_SECURE_DECODE, SecurityLevel::HW_SECURE_CRYPTO,
-    SecurityLevel::HW_SECURE_DECODE, SecurityLevel::HW_SECURE_ALL};
-
-const char *kMimeType[] = {
-    "video/mp4",  "video/mpeg",  "video/x-flv",   "video/mj2",    "video/3gp2",
-    "video/3gpp", "video/3gpp2", "audio/mp4",     "audio/mpeg",   "audio/aac",
-    "audio/3gp2", "audio/3gpp",  "audio/3gpp2",   "audio/webm",   "video/webm",
-    "webm",       "cenc",        "video/unknown", "audio/unknown"};
-
-const char *kCipherAlgorithm[] = {"AES/CBC/NoPadding", ""};
-
-const char *kMacAlgorithm[] = {"HmacSHA256", ""};
-
-const char *kRSAAlgorithm[] = {"RSASSA-PSS-SHA1", ""};
-
-const std::string kProperty[] = {kVendorKey,
-                                 kVersionKey,
-                                 kPluginDescriptionKey,
-                                 kAlgorithmsKey,
-                                 kListenerTestSupportKey,
-                                 kDrmErrorTestKey,
-                                 kDeviceIdKey,
-                                 kClientIdKey,
-                                 kMetricsKey,
-                                 "placeholder"};
-
-const KeyType kKeyType[] = {KeyType::OFFLINE, KeyType::STREAMING,
-                            KeyType::RELEASE};
-
-const Mode kCryptoMode[] = {Mode::UNENCRYPTED, Mode::AES_CTR, Mode::AES_CBC_CTS,
-                            Mode::AES_CBC};
-
-const hidl_vec<uint8_t> validInitData = {
-    // BMFF box header (4 bytes size + 'pssh')
-    0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
-    // full box header (version = 1 flags = 0)
-    0x01, 0x00, 0x00, 0x00,
-    // system id
-    0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02, 0xac, 0xe3, 0x3c, 0x1e,
-    0x52, 0xe2, 0xfb, 0x4b,
-    // number of key ids
-    0x00, 0x00, 0x00, 0x01,
-    // key id
-    0x60, 0x06, 0x1e, 0x01, 0x7e, 0x47, 0x7e, 0x87, 0x7e, 0x57, 0xd0, 0x0d,
-    0x1e, 0xd0, 0x0d, 0x1e,
-    // size of data, must be zero
-    0x00, 0x00, 0x00, 0x00};
-
-const hidl_vec<uint8_t> validKeyResponse = {
-    0x7b, 0x22, 0x6b, 0x65, 0x79, 0x73, 0x22, 0x3a, 0x5b, 0x7b, 0x22,
-    0x6b, 0x74, 0x79, 0x22, 0x3a, 0x22, 0x6f, 0x63, 0x74, 0x22, 0x2c,
-    0x22, 0x6b, 0x69, 0x64, 0x22, 0x3a, 0x22, 0x59, 0x41, 0x59, 0x65,
-    0x41, 0x58, 0x35, 0x48, 0x66, 0x6f, 0x64, 0x2d, 0x56, 0x39, 0x41,
-    0x4e, 0x48, 0x74, 0x41, 0x4e, 0x48, 0x67, 0x22, 0x2c, 0x22, 0x6b,
-    0x22, 0x3a, 0x22, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x54, 0x65,
-    0x73, 0x74, 0x4b, 0x65, 0x79, 0x42, 0x61, 0x73, 0x65, 0x36, 0x34,
-    0x67, 0x67, 0x67, 0x22, 0x7d, 0x5d, 0x7d, 0x0a};
-
-const size_t kAESBlockSize = 16;
-const size_t kMaxStringLength = 100;
-const size_t kMaxSubSamples = 10;
-const size_t kMaxNumBytes = 1000;
-const size_t kSegmentIndex = 0;
-
-template <typename T, size_t size>
-T getValueFromArray(FuzzedDataProvider *fdp, const T (&arr)[size]) {
-  return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
-}
-
-class TestDrmPluginListener : public IDrmPluginListener {
-public:
-  TestDrmPluginListener() {}
-  virtual ~TestDrmPluginListener() {}
-
-  virtual Return<void> sendEvent(EventType /*eventType*/,
-                                 const hidl_vec<uint8_t> & /*sessionId*/,
-                                 const hidl_vec<uint8_t> & /*data*/) override {
-    return Return<void>();
-  }
-
-  virtual Return<void>
-  sendExpirationUpdate(const hidl_vec<uint8_t> & /*sessionId*/,
-                       int64_t /*expiryTimeInMS*/) override {
-    return Return<void>();
-  }
-
-  virtual Return<void>
-  sendKeysChange(const hidl_vec<uint8_t> & /*sessionId*/,
-                 const hidl_vec<KeyStatus> & /*keyStatusList*/,
-                 bool /*hasNewUsableKey*/) override {
-    return Return<void>();
-  }
-};
-
-class ClearKeyFuzzer {
-public:
-  ~ClearKeyFuzzer() { deInit(); }
-  bool init();
-  void process(const uint8_t *data, size_t size);
-
-private:
-  void deInit();
-  void invokeDrmPlugin(const uint8_t *data, size_t size);
-  void invokeCryptoPlugin(const uint8_t *data);
-  void invokeDrm(const uint8_t *data, size_t size);
-  void invokeCrypto(const uint8_t *data);
-  void invokeDrmDecryptEncryptAPI(const uint8_t *data, size_t size);
-  bool invokeDrmFactory();
-  bool invokeCryptoFactory();
-  void invokeDrmV1_4API();
-  void invokeDrmSetAlgorithmAPI();
-  void invokeDrmPropertyAPI();
-  void invokeDrmSecureStopAPI();
-  void invokeDrmOfflineLicenseAPI(const uint8_t *data, size_t size);
-  SessionId getSessionId();
-  SecureStopRelease makeSecureRelease(const SecureStop &stop);
-  sp<IDrmFactory> mDrmFactory = nullptr;
-  sp<ICryptoFactory> mCryptoFactory = nullptr;
-  sp<IDrmPlugin> mDrmPlugin = nullptr;
-  sp<drm::V1_1::IDrmPlugin> mDrmPluginV1_1 = nullptr;
-  sp<drm::V1_2::IDrmPlugin> mDrmPluginV1_2 = nullptr;
-  sp<drm::V1_4::IDrmPlugin> mDrmPluginV1_4 = nullptr;
-  sp<drm::V1_4::ICryptoPlugin> mCryptoPluginV1_4 = nullptr;
-  sp<ICryptoPlugin> mCryptoPlugin = nullptr;
-  FuzzedDataProvider *mFDP = nullptr;
-  SessionId mSessionId = {};
-  SessionId mSessionIdV1 = {};
-};
-
-void ClearKeyFuzzer::deInit() {
-  if (mDrmPluginV1_1) {
-    mDrmPluginV1_1->closeSession(mSessionIdV1);
-  }
-  if (mDrmPluginV1_2) {
-    mDrmPluginV1_2->closeSession(mSessionId);
-  }
-  mDrmFactory.clear();
-  mCryptoFactory.clear();
-  mDrmPlugin.clear();
-  mDrmPluginV1_1.clear();
-  mDrmPluginV1_2.clear();
-  mDrmPluginV1_4.clear();
-  mCryptoPlugin.clear();
-  mCryptoPluginV1_4.clear();
-  mSessionId = {};
-  mSessionIdV1 = {};
-}
-
-void ClearKeyFuzzer::invokeDrmV1_4API() {
-  mDrmPluginV1_4->requiresSecureDecoderDefault(
-      getValueFromArray(mFDP, kMimeType));
-  mDrmPluginV1_4->requiresSecureDecoder(
-      getValueFromArray(mFDP, kMimeType),
-      getValueFromArray(mFDP, kSecurityLevel));
-  mDrmPluginV1_4->setPlaybackId(
-      mSessionId, mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str());
-  drm::V1_4::IDrmPlugin::getLogMessages_cb cb =
-      [&]([[maybe_unused]] drm::V1_4::Status status,
-          [[maybe_unused]] hidl_vec<drm::V1_4::LogMessage> logs) {};
-  mDrmPluginV1_4->getLogMessages(cb);
-}
-
-void ClearKeyFuzzer::invokeDrmSetAlgorithmAPI() {
-  const hidl_string cipherAlgo =
-      mFDP->ConsumeBool()
-          ? mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str()
-          : hidl_string(kCipherAlgorithm[mFDP->ConsumeBool()]);
-  mDrmPluginV1_2->setCipherAlgorithm(mSessionId, cipherAlgo);
-
-  const hidl_string macAlgo =
-      mFDP->ConsumeBool()
-          ? mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str()
-          : hidl_string(kMacAlgorithm[mFDP->ConsumeBool()]);
-  mDrmPluginV1_2->setMacAlgorithm(mSessionId, macAlgo);
-}
-
-void ClearKeyFuzzer::invokeDrmPropertyAPI() {
-  mDrmPluginV1_2->setPropertyString(
-      hidl_string(getValueFromArray(mFDP, kProperty)), hidl_string("value"));
-
-  hidl_string stringValue;
-  mDrmPluginV1_2->getPropertyString(
-      getValueFromArray(mFDP, kProperty),
-      [&](Status status, const hidl_string &hValue) {
-        if (status == Status::OK) {
-          stringValue = hValue;
-        }
-      });
-
-  hidl_vec<uint8_t> value = {};
-  mDrmPluginV1_2->setPropertyByteArray(
-      hidl_string(getValueFromArray(mFDP, kProperty)), value);
-
-  hidl_vec<uint8_t> byteValue;
-  mDrmPluginV1_2->getPropertyByteArray(
-      getValueFromArray(mFDP, kProperty),
-      [&](Status status, const hidl_vec<uint8_t> &hValue) {
-        if (status == Status::OK) {
-          byteValue = hValue;
-        }
-      });
-}
-
-SessionId ClearKeyFuzzer::getSessionId() {
-  SessionId emptySessionId = {};
-  return mFDP->ConsumeBool() ? mSessionId : emptySessionId;
-}
-
-void ClearKeyFuzzer::invokeDrmDecryptEncryptAPI(const uint8_t *data,
-                                                size_t size) {
-  uint32_t currSessions, maximumSessions;
-  mDrmPluginV1_2->getNumberOfSessions(
-      [&](Status status, uint32_t hCurrentSessions, uint32_t hMaxSessions) {
-        if (status == Status::OK) {
-          currSessions = hCurrentSessions;
-          maximumSessions = hMaxSessions;
-        }
-      });
-
-  HdcpLevel connected, maximum;
-  mDrmPluginV1_2->getHdcpLevels([&](Status status,
-                                    const HdcpLevel &hConnectedLevel,
-                                    const HdcpLevel &hMaxLevel) {
-    if (status == Status::OK) {
-      connected = hConnectedLevel;
-      maximum = hMaxLevel;
-    }
-  });
-
-  drm::V1_2::HdcpLevel connectedV1_2, maximumV1_2;
-  mDrmPluginV1_2->getHdcpLevels_1_2(
-      [&](drm::V1_2::Status status, const drm::V1_2::HdcpLevel &connectedLevel,
-          const drm::V1_2::HdcpLevel &maxLevel) {
-        if (status == drm::V1_2::Status::OK) {
-          connectedV1_2 = connectedLevel;
-          maximumV1_2 = maxLevel;
-        }
-      });
-
-  SecurityLevel securityLevel;
-  mDrmPluginV1_2->getSecurityLevel(mSessionId,
-                                   [&](Status status, SecurityLevel hLevel) {
-                                     if (status == Status::OK) {
-                                       securityLevel = hLevel;
-                                     }
-                                   });
-
-  hidl_vec<DrmMetricGroup> metrics;
-  mDrmPluginV1_2->getMetrics(
-      [&](Status status, hidl_vec<DrmMetricGroup> hMetricGroups) {
-        if (status == Status::OK) {
-          metrics = hMetricGroups;
-        }
-      });
-
-  hidl_string certificateType;
-  hidl_string certificateAuthority;
-  mDrmPluginV1_2->getProvisionRequest(certificateType, certificateAuthority,
-                                      [&]([[maybe_unused]] Status status,
-                                          const hidl_vec<uint8_t> &,
-                                          const hidl_string &) {});
-
-  mDrmPluginV1_2->getProvisionRequest_1_2(
-      certificateType, certificateAuthority,
-      [&]([[maybe_unused]] drm::V1_2::Status status, const hidl_vec<uint8_t> &,
-          const hidl_string &) {});
-
-  hidl_vec<uint8_t> response;
-  mDrmPluginV1_2->provideProvisionResponse(
-      response, [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &,
-                    const hidl_vec<uint8_t> &) {});
-
-  hidl_vec<uint8_t> initData = {};
-  if (mFDP->ConsumeBool()) {
-    initData = validInitData;
-  } else {
-    initData.setToExternal(const_cast<uint8_t *>(data), kAESBlockSize);
-  }
-  hidl_string mimeType = getValueFromArray(mFDP, kMimeType);
-  KeyType keyType = mFDP->ConsumeBool()
-                        ? static_cast<KeyType>(mFDP->ConsumeIntegral<size_t>())
-                        : getValueFromArray(mFDP, kKeyType);
-  KeyedVector optionalParameters;
-  mDrmPluginV1_2->getKeyRequest_1_2(
-      mSessionId, initData, mimeType, keyType, optionalParameters,
-      [&]([[maybe_unused]] drm::V1_2::Status status, const hidl_vec<uint8_t> &,
-          drm::V1_1::KeyRequestType, const hidl_string &) {});
-  mDrmPluginV1_1->getKeyRequest_1_1(
-      mSessionIdV1, initData, mimeType, keyType, optionalParameters,
-      [&]([[maybe_unused]] drm::V1_0::Status status, const hidl_vec<uint8_t> &,
-          drm::V1_1::KeyRequestType, const hidl_string &) {});
-  hidl_vec<uint8_t> emptyInitData = {};
-  mDrmPlugin->getKeyRequest(
-      mSessionId, mFDP->ConsumeBool() ? initData : emptyInitData, mimeType,
-      keyType, optionalParameters,
-      [&]([[maybe_unused]] drm::V1_0::Status status, const hidl_vec<uint8_t> &,
-          drm::V1_0::KeyRequestType, const hidl_string &) {});
-
-  hidl_vec<uint8_t> keyResponse = {};
-  if (mFDP->ConsumeBool()) {
-    keyResponse = validKeyResponse;
-  } else {
-    keyResponse.setToExternal(const_cast<uint8_t *>(data), size);
-  }
-  hidl_vec<uint8_t> keySetId;
-  hidl_vec<uint8_t> emptyKeyResponse = {};
-  mDrmPluginV1_2->provideKeyResponse(
-      getSessionId(), mFDP->ConsumeBool() ? keyResponse : emptyKeyResponse,
-      [&](Status status, const hidl_vec<uint8_t> &hKeySetId) {
-        if (status == Status::OK) {
-          keySetId = hKeySetId;
-        }
-      });
-
-  mDrmPluginV1_2->restoreKeys(getSessionId(), keySetId);
-
-  mDrmPluginV1_2->queryKeyStatus(
-      getSessionId(),
-      [&]([[maybe_unused]] Status status, KeyedVector /* info */) {});
-
-  hidl_vec<uint8_t> keyId, input, iv;
-  keyId.setToExternal(const_cast<uint8_t *>(data), size);
-  input.setToExternal(const_cast<uint8_t *>(data), size);
-  iv.setToExternal(const_cast<uint8_t *>(data), size);
-  mDrmPluginV1_2->encrypt(
-      getSessionId(), keyId, input, iv,
-      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
-  mDrmPluginV1_2->decrypt(
-      getSessionId(), keyId, input, iv,
-      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
-  hidl_vec<uint8_t> message;
-  message.setToExternal(const_cast<uint8_t *>(data), size);
-  mDrmPluginV1_2->sign(
-      getSessionId(), keyId, message,
-      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
-  hidl_vec<uint8_t> signature;
-  signature.setToExternal(const_cast<uint8_t *>(data), size);
-  mDrmPluginV1_2->verify(getSessionId(), keyId, message, signature,
-                         [&]([[maybe_unused]] Status status, bool) {});
-
-  hidl_vec<uint8_t> wrappedKey;
-  signature.setToExternal(const_cast<uint8_t *>(data), size);
-  mDrmPluginV1_2->signRSA(
-      getSessionId(), kRSAAlgorithm[mFDP->ConsumeBool()], message, wrappedKey,
-      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
-  mDrmPluginV1_2->removeKeys(getSessionId());
-}
-
-/**
- * Helper function to create a secure release message for
- * a secure stop. The clearkey secure stop release format
- * is just a count followed by the secure stop opaque data.
- */
-SecureStopRelease ClearKeyFuzzer::makeSecureRelease(const SecureStop &stop) {
-  std::vector<uint8_t> stopData = stop.opaqueData;
-  std::vector<uint8_t> buffer;
-  std::string count = "0001";
-
-  auto it = buffer.insert(buffer.begin(), count.begin(), count.end());
-  buffer.insert(it + count.size(), stopData.begin(), stopData.end());
-  SecureStopRelease release = {.opaqueData = hidl_vec<uint8_t>(buffer)};
-  return release;
-}
-
-void ClearKeyFuzzer::invokeDrmSecureStopAPI() {
-  SecureStopId ssid;
-  mDrmPluginV1_2->getSecureStop(
-      ssid, [&]([[maybe_unused]] Status status, const SecureStop &) {});
-
-  mDrmPluginV1_2->getSecureStopIds(
-      [&]([[maybe_unused]] Status status,
-          [[maybe_unused]] const hidl_vec<SecureStopId> &secureStopIds) {});
-
-  SecureStopRelease release;
-  mDrmPluginV1_2->getSecureStops(
-      [&]([[maybe_unused]] Status status, const hidl_vec<SecureStop> &stops) {
-        if (stops.size() > 0) {
-          release = makeSecureRelease(
-              stops[mFDP->ConsumeIntegralInRange<size_t>(0, stops.size() - 1)]);
-        }
-      });
-
-  mDrmPluginV1_2->releaseSecureStops(release);
-
-  mDrmPluginV1_2->removeSecureStop(ssid);
-
-  mDrmPluginV1_2->removeAllSecureStops();
-
-  mDrmPluginV1_2->releaseSecureStop(ssid);
-
-  mDrmPluginV1_2->releaseAllSecureStops();
-}
-
-void ClearKeyFuzzer::invokeDrmOfflineLicenseAPI(const uint8_t *data,
-                                                size_t size) {
-  hidl_vec<KeySetId> keySetIds = {};
-  mDrmPluginV1_2->getOfflineLicenseKeySetIds(
-      [&](Status status, const hidl_vec<KeySetId> &hKeySetIds) {
-        if (status == Status::OK) {
-          keySetIds = hKeySetIds;
-        }
-      });
-
-  OfflineLicenseState licenseState;
-  KeySetId keySetId = {};
-  if (keySetIds.size() > 0) {
-    keySetId = keySetIds[mFDP->ConsumeIntegralInRange<size_t>(
-        0, keySetIds.size() - 1)];
-  } else {
-    keySetId.setToExternal(const_cast<uint8_t *>(data), size);
-  }
-  mDrmPluginV1_2->getOfflineLicenseState(
-      keySetId, [&](Status status, OfflineLicenseState hLicenseState) {
-        if (status == Status::OK) {
-          licenseState = hLicenseState;
-        }
-      });
-
-  mDrmPluginV1_2->removeOfflineLicense(keySetId);
-}
-
-void ClearKeyFuzzer::invokeDrmPlugin(const uint8_t *data, size_t size) {
-  SecurityLevel secLevel =
-      mFDP->ConsumeBool()
-          ? getValueFromArray(mFDP, kSecurityLevel)
-          : static_cast<SecurityLevel>(mFDP->ConsumeIntegral<uint32_t>());
-  mDrmPluginV1_1->openSession_1_1(
-      secLevel, [&]([[maybe_unused]] Status status, const SessionId &id) {
-        mSessionIdV1 = id;
-      });
-  mDrmPluginV1_2->openSession([&]([[maybe_unused]] Status status,
-                                  const SessionId &id) { mSessionId = id; });
-
-  sp<TestDrmPluginListener> listener = new TestDrmPluginListener();
-  mDrmPluginV1_2->setListener(listener);
-  const hidl_vec<KeyStatus> keyStatusList = {
-      {{1}, KeyStatusType::USABLE},
-      {{2}, KeyStatusType::EXPIRED},
-      {{3}, KeyStatusType::OUTPUTNOTALLOWED},
-      {{4}, KeyStatusType::STATUSPENDING},
-      {{5}, KeyStatusType::INTERNALERROR},
-  };
-  mDrmPluginV1_2->sendKeysChange(mSessionId, keyStatusList, true);
-
-  invokeDrmV1_4API();
-  invokeDrmSetAlgorithmAPI();
-  invokeDrmPropertyAPI();
-  invokeDrmDecryptEncryptAPI(data, size);
-  invokeDrmSecureStopAPI();
-  invokeDrmOfflineLicenseAPI(data, size);
-}
-
-void ClearKeyFuzzer::invokeCryptoPlugin(const uint8_t *data) {
-  mCryptoPlugin->requiresSecureDecoderComponent(
-      getValueFromArray(mFDP, kMimeType));
-
-  const uint32_t width = mFDP->ConsumeIntegral<uint32_t>();
-  const uint32_t height = mFDP->ConsumeIntegral<uint32_t>();
-  mCryptoPlugin->notifyResolution(width, height);
-
-  mCryptoPlugin->setMediaDrmSession(mSessionId);
-
-  size_t totalSize = 0;
-  const size_t numSubSamples =
-      mFDP->ConsumeIntegralInRange<size_t>(1, kMaxSubSamples);
-
-  const Pattern pattern = {0, 0};
-  hidl_vec<SubSample> subSamples;
-  subSamples.resize(numSubSamples);
-
-  for (size_t i = 0; i < numSubSamples; ++i) {
-    const uint32_t clearBytes =
-        mFDP->ConsumeIntegralInRange<uint32_t>(0, kMaxNumBytes);
-    const uint32_t encryptedBytes =
-        mFDP->ConsumeIntegralInRange<uint32_t>(0, kMaxNumBytes);
-    subSamples[i].numBytesOfClearData = clearBytes;
-    subSamples[i].numBytesOfEncryptedData = encryptedBytes;
-    totalSize += subSamples[i].numBytesOfClearData;
-    totalSize += subSamples[i].numBytesOfEncryptedData;
-  }
-
-  // The first totalSize bytes of shared memory is the encrypted
-  // input, the second totalSize bytes is the decrypted output.
-  size_t memoryBytes = totalSize * 2;
-
-  sp<IAllocator> ashmemAllocator = IAllocator::getService("ashmem");
-  if (!ashmemAllocator.get()) {
-    return;
-  }
-
-  hidl_memory hidlMemory;
-  ashmemAllocator->allocate(memoryBytes, [&]([[maybe_unused]] bool success,
-                                             const hidl_memory &memory) {
-    mCryptoPlugin->setSharedBufferBase(memory, kSegmentIndex);
-    hidlMemory = memory;
-  });
-
-  sp<IMemory> mappedMemory = mapMemory(hidlMemory);
-  if (!mappedMemory.get()) {
-    return;
-  }
-  mCryptoPlugin->setSharedBufferBase(hidlMemory, kSegmentIndex);
-
-  uint32_t srcBufferId =
-      mFDP->ConsumeBool() ? kSegmentIndex : mFDP->ConsumeIntegral<uint32_t>();
-  const SharedBuffer sourceBuffer = {
-      .bufferId = srcBufferId, .offset = 0, .size = totalSize};
-
-  BufferType type = mFDP->ConsumeBool() ? BufferType::SHARED_MEMORY
-                                        : BufferType::NATIVE_HANDLE;
-  uint32_t destBufferId =
-      mFDP->ConsumeBool() ? kSegmentIndex : mFDP->ConsumeIntegral<uint32_t>();
-  const DestinationBuffer destBuffer = {
-      .type = type,
-      {.bufferId = destBufferId, .offset = totalSize, .size = totalSize},
-      .secureMemory = nullptr};
-
-  const uint64_t offset = 0;
-  uint32_t bytesWritten = 0;
-  hidl_array<uint8_t, kAESBlockSize> keyId =
-      hidl_array<uint8_t, kAESBlockSize>(data);
-  hidl_array<uint8_t, kAESBlockSize> iv =
-      hidl_array<uint8_t, kAESBlockSize>(data);
-  Mode mode = getValueFromArray(mFDP, kCryptoMode);
-  mCryptoPlugin->decrypt(
-      mFDP->ConsumeBool(), keyId, iv, mode, pattern, subSamples, sourceBuffer,
-      offset, destBuffer,
-      [&]([[maybe_unused]] Status status, uint32_t count,
-          [[maybe_unused]] string detailedError) { bytesWritten = count; });
-  drm::V1_4::IDrmPlugin::getLogMessages_cb cb =
-      [&]([[maybe_unused]] drm::V1_4::Status status,
-          [[maybe_unused]] hidl_vec<drm::V1_4::LogMessage> logs) {};
-  mCryptoPluginV1_4->getLogMessages(cb);
-}
-
-bool ClearKeyFuzzer::invokeDrmFactory() {
-  hidl_string packageName(
-      mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str());
-  hidl_string mimeType(getValueFromArray(mFDP, kMimeType));
-  SecurityLevel securityLevel =
-      mFDP->ConsumeBool()
-          ? getValueFromArray(mFDP, kSecurityLevel)
-          : static_cast<SecurityLevel>(mFDP->ConsumeIntegral<uint32_t>());
-  const hidl_array<uint8_t, 16> uuid =
-      mFDP->ConsumeBool() ? kClearKeyUUID : kInvalidUUID;
-  mDrmFactory->isCryptoSchemeSupported_1_2(uuid, mimeType, securityLevel);
-  mDrmFactory->createPlugin(
-      uuid, packageName, [&](Status status, const sp<IDrmPlugin> &plugin) {
-        if (status == Status::OK) {
-          mDrmPlugin = plugin.get();
-          mDrmPluginV1_1 = drm::V1_1::IDrmPlugin::castFrom(mDrmPlugin);
-          mDrmPluginV1_2 = drm::V1_2::IDrmPlugin::castFrom(mDrmPlugin);
-          mDrmPluginV1_4 = drm::V1_4::IDrmPlugin::castFrom(mDrmPlugin);
-        }
-      });
-
-  std::vector<hidl_array<uint8_t, 16>> supportedSchemes;
-  mDrmFactory->getSupportedCryptoSchemes(
-      [&](const hidl_vec<hidl_array<uint8_t, 16>> &schemes) {
-        for (const auto &scheme : schemes) {
-          supportedSchemes.push_back(scheme);
-        }
-      });
-
-  if (!(mDrmPlugin && mDrmPluginV1_1 && mDrmPluginV1_2 && mDrmPluginV1_4)) {
-    return false;
-  }
-  return true;
-}
-
-bool ClearKeyFuzzer::invokeCryptoFactory() {
-  const hidl_array<uint8_t, 16> uuid =
-      mFDP->ConsumeBool() ? kClearKeyUUID : kInvalidUUID;
-  mCryptoFactory->createPlugin(
-      uuid, mSessionId, [this](Status status, const sp<ICryptoPlugin> &plugin) {
-        if (status == Status::OK) {
-          mCryptoPlugin = plugin;
-          mCryptoPluginV1_4 = drm::V1_4::ICryptoPlugin::castFrom(mCryptoPlugin);
-        }
-      });
-
-  if (!mCryptoPlugin && !mCryptoPluginV1_4) {
-    return false;
-  }
-  return true;
-}
-
-void ClearKeyFuzzer::invokeDrm(const uint8_t *data, size_t size) {
-  if (!invokeDrmFactory()) {
-    return;
-  }
-  invokeDrmPlugin(data, size);
-}
-
-void ClearKeyFuzzer::invokeCrypto(const uint8_t *data) {
-  if (!invokeCryptoFactory()) {
-    return;
-  }
-  invokeCryptoPlugin(data);
-}
-
-void ClearKeyFuzzer::process(const uint8_t *data, size_t size) {
-  mFDP = new FuzzedDataProvider(data, size);
-  invokeDrm(data, size);
-  invokeCrypto(data);
-  delete mFDP;
-}
-
-bool ClearKeyFuzzer::init() {
-  mCryptoFactory =
-      android::hardware::drm::V1_4::clearkey::createCryptoFactory();
-  mDrmFactory = android::hardware::drm::V1_4::clearkey::createDrmFactory();
-  if (!mDrmFactory && !mCryptoFactory) {
-    return false;
-  }
-  return true;
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  if (size < kAESBlockSize) {
-    return 0;
-  }
-  ClearKeyFuzzer clearKeyFuzzer;
-  if (clearKeyFuzzer.init()) {
-    clearKeyFuzzer.process(data, size);
-  }
-  return 0;
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h b/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h
deleted file mode 100644
index 97794f7..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_AES_CTR_DECRYPTOR_H_
-#define CLEARKEY_AES_CTR_DECRYPTOR_H_
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_0::SubSample;
-
-class AesCtrDecryptor {
-public:
-    AesCtrDecryptor() {}
-
-    Status decrypt(const std::vector<uint8_t>& key, const Iv iv,
-            const uint8_t* source, uint8_t* destination,
-            const std::vector<SubSample> subSamples, size_t numSubSamples,
-            size_t* bytesDecryptedOut);
-
-private:
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(AesCtrDecryptor);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_AES_CTR_DECRYPTOR_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h b/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h
deleted file mode 100644
index 2349f23..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef BASE_64_H_
-
-#define BASE_64_H_
-
-#include <android/hardware/drm/1.0/types.h>
-
-#include "Buffer.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::sp;
-
-struct Buffer;
-
-sp<Buffer> decodeBase64(const std::string &s);
-void encodeBase64(const void *data, size_t size, std::string *out);
-
-void encodeBase64Url(const void *data, size_t size, std::string *out);
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif  // BASE_64_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h b/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h
deleted file mode 100644
index 66aaa73..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef BUFFER_H_
-#define BUFFER_H_
-
-#include <android/hardware/drm/1.0/types.h>
-#include <utils/RefBase.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::sp;
-
-struct Buffer : public RefBase {
-    explicit Buffer(size_t capacity);
-
-    uint8_t *base() { return reinterpret_cast<uint8_t *>(mData); }
-    uint8_t *data() { return reinterpret_cast<uint8_t *>(mData) + mRangeOffset; }
-    size_t capacity() const { return mCapacity; }
-    size_t size() const { return mRangeLength; }
-    size_t offset() const { return mRangeOffset; }
-
-protected:
-    virtual ~Buffer();
-
-private:
-    void *mData;
-    size_t mCapacity;
-    size_t mRangeOffset;
-    size_t mRangeLength;
-
-    bool mOwnsData;
-
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(Buffer);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif  // BUFFER_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h
deleted file mode 100644
index 8e47c45..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_DRM_PROPERTIES_H_
-#define CLEARKEY_DRM_PROPERTIES_H_
-
-#include <string.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-static const std::string kVendorKey("vendor");
-static const std::string kVendorValue("Google");
-static const std::string kVersionKey("version");
-static const std::string kVersionValue("1.2");
-static const std::string kPluginDescriptionKey("description");
-static const std::string kPluginDescriptionValue("ClearKey CDM");
-static const std::string kAlgorithmsKey("algorithms");
-static const std::string kAlgorithmsValue("");
-static const std::string kListenerTestSupportKey("listenerTestSupport");
-static const std::string kListenerTestSupportValue("true");
-static const std::string kDrmErrorTestKey("drmErrorTest");
-static const std::string kDrmErrorTestValue("");
-static const std::string kResourceContentionValue("resourceContention");
-static const std::string kLostStateValue("lostState");
-static const std::string kFrameTooLargeValue("frameTooLarge");
-static const std::string kInvalidStateValue("invalidState");
-
-static const std::string kDeviceIdKey("deviceId");
-static const uint8_t kTestDeviceIdData[] =
-        {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7,
-         0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf};
-
-// settable byte array property
-static const std::string kClientIdKey("clientId");
-
-// TODO stub out metrics for nw
-static const std::string kMetricsKey("metrics");
-static const uint8_t kMetricsData[] = { 0 };
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_DRM_PROPERTIES_H_
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h
deleted file mode 100644
index cd18029..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_MACROS_H_
-#define CLEARKEY_MACROS_H_
-
-#include <android/hardware/drm/1.2/types.h>
-
-#include <map>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::KeyValue;
-using ::android::hardware::drm::V1_1::SecurityLevel;
-using ::android::hardware::hidl_vec;
-
-const uint8_t kBlockSize = 16; //AES_BLOCK_SIZE;
-typedef uint8_t KeyId[kBlockSize];
-typedef uint8_t Iv[kBlockSize];
-
-typedef ::android::hardware::drm::V1_0::SubSample SubSample;
-typedef std::map<std::vector<uint8_t>, std::vector<uint8_t> > KeyMap;
-
-#define CLEARKEY_DISALLOW_COPY_AND_ASSIGN(TypeName) \
-  TypeName(const TypeName&) = delete;      \
-  void operator=(const TypeName&) = delete;
-
-#define CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(TypeName) \
-  TypeName() = delete;                     \
-  TypeName(const TypeName&) = delete;      \
-  void operator=(const TypeName&) = delete;
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_MACROS_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h b/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h
deleted file mode 100644
index d4a8a17..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
-#define CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
-
-#include <android/hardware/drm/1.4/ICryptoFactory.h>
-#include <android/hardware/drm/1.4/IDrmFactory.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_4::ICryptoFactory;
-using ::android::hardware::drm::V1_4::IDrmFactory;
-
-extern "C" {
-    IDrmFactory* createDrmFactory();
-    ICryptoFactory* createCryptoFactory();
-}
-
-}  // namespace clearkey
-}  // namespace V1_4
-}  // namespace drm
-}  // namespace hardware
-}  // namespace android
-#endif // CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h
deleted file mode 100644
index e6b541f..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_CRYPTO_FACTORY_H_
-#define CLEARKEY_CRYPTO_FACTORY_H_
-
-#include <android/hardware/drm/1.0/ICryptoPlugin.h>
-#include <android/hardware/drm/1.4/ICryptoFactory.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_4::ICryptoFactory;
-using ::android::hardware::drm::V1_0::ICryptoPlugin;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_string;
-using ::android::hardware::Return;
-
-struct CryptoFactory : public ICryptoFactory {
-    CryptoFactory() {}
-    virtual ~CryptoFactory() {}
-
-    Return<bool> isCryptoSchemeSupported(const hidl_array<uint8_t, 16>& uuid)
-            override;
-
-    Return<void> createPlugin(
-            const hidl_array<uint8_t, 16>& uuid,
-            const hidl_vec<uint8_t>& initData,
-            createPlugin_cb _hidl_cb) override;
-
-private:
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoFactory);
-
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_CRYPTO_FACTORY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
deleted file mode 100644
index b272a83..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_CRYPTO_PLUGIN_H_
-#define CLEARKEY_CRYPTO_PLUGIN_H_
-
-#include <android/hardware/drm/1.4/ICryptoPlugin.h>
-#include <android/hidl/memory/1.0/IMemory.h>
-
-#include <mutex>
-
-#include "ClearKeyTypes.h"
-#include "Session.h"
-#include "Utils.h"
-
-namespace {
-    static const size_t KEY_ID_SIZE = 16;
-    static const size_t KEY_IV_SIZE = 16;
-}
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::DestinationBuffer;
-using drm::V1_0::Mode;
-using drm::V1_0::Pattern;
-using drm::V1_0::SharedBuffer;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hidl::memory::V1_0::IMemory;
-using ::android::sp;
-
-typedef drm::V1_2::Status Status_V1_2;
-
-struct CryptoPlugin : public drm::V1_4::ICryptoPlugin {
-    explicit CryptoPlugin(const hidl_vec<uint8_t>& sessionId) {
-        mInitStatus = setMediaDrmSession(sessionId);
-    }
-    virtual ~CryptoPlugin() {}
-
-    Return<bool> requiresSecureDecoderComponent(const hidl_string& mime) {
-        UNUSED(mime);
-        return false;
-    }
-
-    Return<void> notifyResolution(uint32_t width, uint32_t height) {
-        UNUSED(width);
-        UNUSED(height);
-        return Void();
-    }
-
-    Return<void> decrypt(
-            bool secure,
-            const hidl_array<uint8_t, KEY_ID_SIZE>& keyId,
-            const hidl_array<uint8_t, KEY_IV_SIZE>& iv,
-            Mode mode,
-            const Pattern& pattern,
-            const hidl_vec<SubSample>& subSamples,
-            const SharedBuffer& source,
-            uint64_t offset,
-            const DestinationBuffer& destination,
-            decrypt_cb _hidl_cb);
-
-    Return<void> decrypt_1_2(
-            bool secure,
-            const hidl_array<uint8_t, KEY_ID_SIZE>& keyId,
-            const hidl_array<uint8_t, KEY_IV_SIZE>& iv,
-            Mode mode,
-            const Pattern& pattern,
-            const hidl_vec<SubSample>& subSamples,
-            const SharedBuffer& source,
-            uint64_t offset,
-            const DestinationBuffer& destination,
-            decrypt_1_2_cb _hidl_cb) NO_THREAD_SAFETY_ANALYSIS; // use unique_lock
-
-    Return<void> setSharedBufferBase(const hidl_memory& base,
-            uint32_t bufferId);
-
-    Return<Status> setMediaDrmSession(const hidl_vec<uint8_t>& sessionId);
-
-    Return<Status> getInitStatus() const { return mInitStatus; }
-
-    Return<void> getLogMessages(
-            getLogMessages_cb _hidl_cb);
-private:
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoPlugin);
-
-    std::mutex mSharedBufferLock;
-    std::map<uint32_t, sp<IMemory>> mSharedBufferMap GUARDED_BY(mSharedBufferLock);
-    sp<Session> mSession;
-    Status mInitStatus;
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_CRYPTO_PLUGIN_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h b/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
deleted file mode 100644
index 6466ac3..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-//
-#ifndef CLEARKEY_DEVICE_FILES_H_
-#define CLEARKEY_DEVICE_FILES_H_
-
-#include <errno.h>
-#include <stdio.h>
-#include <unistd.h>
-
-#include <set>
-#include <string>
-#include <vector>
-
-#include "protos/DeviceFiles.pb.h"
-#include "ClearKeyTypes.h"
-#include "MemoryFileSystem.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_2::clearkey::OfflineFile;
-
-class DeviceFiles {
- public:
-    typedef enum {
-        kLicenseStateUnknown,
-        kLicenseStateActive,
-        kLicenseStateReleasing,
-    } LicenseState;
-
-    DeviceFiles() {};
-    virtual ~DeviceFiles() {};
-
-    virtual bool StoreLicense(const std::string& keySetId, LicenseState state,
-            const std::string& keyResponse);
-
-    virtual bool RetrieveLicense(
-            const std::string& key_set_id, LicenseState* state, std::string* offlineLicense);
-
-    virtual bool LicenseExists(const std::string& keySetId);
-
-    virtual std::vector<std::string> ListLicenses() const;
-
-    virtual bool DeleteLicense(const std::string& keySetId);
-
-    virtual bool DeleteAllLicenses();
-
- private:
-    bool FileExists(const std::string& path) const;
-    ssize_t GetFileSize(const std::string& fileName) const;
-    bool RemoveFile(const std::string& fileName);
-
-    bool RetrieveHashedFile(const std::string& fileName, OfflineFile* deSerializedFile);
-    bool StoreFileRaw(const std::string& fileName, const std::string& serializedFile);
-    bool StoreFileWithHash(const std::string& fileName, const std::string& serializedFile);
-
-    MemoryFileSystem mFileHandle;
-
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(DeviceFiles);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif  // CLEARKEY_DEVICE_FILES_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h
deleted file mode 100644
index fea1ec8..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_DRM_FACTORY_H_
-#define CLEARKEY_DRM_FACTORY_H_
-
-#include <android/hardware/drm/1.4/IDrmPlugin.h>
-#include <android/hardware/drm/1.4/IDrmFactory.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_1::SecurityLevel;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_handle;
-using ::android::hardware::hidl_string;
-using ::android::hardware::Return;
-
-struct DrmFactory : public IDrmFactory {
-    DrmFactory() {}
-    virtual ~DrmFactory() {}
-
-    Return<bool> isCryptoSchemeSupported(const hidl_array<uint8_t, 16>& uuid)
-            override;
-
-    Return<bool> isCryptoSchemeSupported_1_2(const hidl_array<uint8_t, 16>& uuid,
-                                             const hidl_string& mimeType,
-                                             SecurityLevel level) override;
-
-    Return<bool> isContentTypeSupported(const hidl_string &mimeType)
-            override;
-
-    Return<void> createPlugin(
-            const hidl_array<uint8_t, 16>& uuid,
-            const hidl_string& appPackageName,
-            createPlugin_cb _hidl_cb) override;
-
-    Return<void> getSupportedCryptoSchemes(
-            getSupportedCryptoSchemes_cb _hidl_cb) override;
-
-    Return<void> debug(const hidl_handle& fd, const hidl_vec<hidl_string>& args);
-
-private:
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(DrmFactory);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_DRM_FACTORY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
deleted file mode 100644
index cb5c9fe..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ /dev/null
@@ -1,447 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_DRM_PLUGIN_H_
-#define CLEARKEY_DRM_PLUGIN_H_
-
-#include <android/hardware/drm/1.4/IDrmPlugin.h>
-#include <android/hardware/drm/1.2/IDrmPluginListener.h>
-
-#include <map>
-#include <stdio.h>
-
-#include <utils/List.h>
-
-#include "DeviceFiles.h"
-#include "SessionLibrary.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::EventType;
-using drm::V1_0::IDrmPluginListener;
-using drm::V1_0::KeyRequestType;
-using drm::V1_0::KeyStatus;
-using drm::V1_0::KeyType;
-using drm::V1_0::KeyValue;
-using drm::V1_0::SecureStop;
-using drm::V1_0::SecureStopId;
-using drm::V1_0::SessionId;
-using drm::V1_0::Status;
-using drm::V1_1::DrmMetricGroup;
-using drm::V1_1::HdcpLevel;
-using drm::V1_1::SecureStopRelease;
-using drm::V1_1::SecurityLevel;
-using drm::V1_2::KeySetId;
-using drm::V1_2::OfflineLicenseState;
-using drm::V1_4::clearkey::DeviceFiles;
-using drm::V1_4::clearkey::Session;
-using drm::V1_4::clearkey::SessionLibrary;
-using drm::V1_4::IDrmPlugin;
-
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::sp;
-
-typedef drm::V1_1::KeyRequestType KeyRequestType_V1_1;
-typedef drm::V1_2::IDrmPluginListener IDrmPluginListener_V1_2;
-typedef drm::V1_2::KeyStatus KeyStatus_V1_2;
-typedef drm::V1_2::Status Status_V1_2;
-typedef drm::V1_2::HdcpLevel HdcpLevel_V1_2;
-
-struct DrmPlugin : public IDrmPlugin {
-    explicit DrmPlugin(SessionLibrary* sessionLibrary);
-
-    virtual ~DrmPlugin() { mFileHandle.DeleteAllLicenses(); }
-
-    Return<void> openSession(openSession_cb _hidl_cb) override;
-    Return<void> openSession_1_1(SecurityLevel securityLevel,
-            openSession_cb _hidl_cb) override;
-
-    Return<Status> closeSession(const hidl_vec<uint8_t>& sessionId) override;
-
-    Return<void> getKeyRequest(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& initData,
-        const hidl_string& mimeType,
-        KeyType keyType,
-        const hidl_vec<KeyValue>& optionalParameters,
-        getKeyRequest_cb _hidl_cb) override;
-
-    Return<void> getKeyRequest_1_1(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& initData,
-        const hidl_string& mimeType,
-        KeyType keyType,
-        const hidl_vec<KeyValue>& optionalParameters,
-        getKeyRequest_1_1_cb _hidl_cb) override;
-
-    Return<void> getKeyRequest_1_2(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& initData,
-        const hidl_string& mimeType,
-        KeyType keyType,
-        const hidl_vec<KeyValue>& optionalParameters,
-        getKeyRequest_1_2_cb _hidl_cb) override;
-
-    Return<void> provideKeyResponse(
-        const hidl_vec<uint8_t>& scope,
-        const hidl_vec<uint8_t>& response,
-        provideKeyResponse_cb _hidl_cb) override;
-
-    Return<Status> removeKeys(const hidl_vec<uint8_t>& sessionId) {
-        if (sessionId.size() == 0) {
-            return Status::BAD_VALUE;
-        }
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    Return<Status> restoreKeys(
-        const hidl_vec<uint8_t>& sessionId,
-        const hidl_vec<uint8_t>& keySetId) override;
-
-    Return<void> queryKeyStatus(
-        const hidl_vec<uint8_t>& sessionId,
-        queryKeyStatus_cb _hidl_cb) override;
-
-    Return<void> getProvisionRequest(
-        const hidl_string& certificateType,
-        const hidl_string& certificateAuthority,
-        getProvisionRequest_cb _hidl_cb) {
-        UNUSED(certificateType);
-        UNUSED(certificateAuthority);
-
-        hidl_string defaultUrl;
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>(), defaultUrl);
-        return Void();
-    }
-
-    Return<void> getProvisionRequest_1_2(
-        const hidl_string& certificateType,
-        const hidl_string& certificateAuthority,
-        getProvisionRequest_1_2_cb _hidl_cb) {
-        UNUSED(certificateType);
-        UNUSED(certificateAuthority);
-
-        hidl_string defaultUrl;
-        _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>(), defaultUrl);
-        return Void();
-    }
-
-    Return<void> provideProvisionResponse(
-        const hidl_vec<uint8_t>& response,
-        provideProvisionResponse_cb _hidl_cb) {
-
-        if (response.size() == 0) {
-            _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>(), hidl_vec<uint8_t>());
-            return Void();
-        }
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>(), hidl_vec<uint8_t>());
-        return Void();
-    }
-
-    Return<void> getHdcpLevels(getHdcpLevels_cb _hidl_cb) {
-        HdcpLevel connectedLevel = HdcpLevel::HDCP_NONE;
-        HdcpLevel maxLevel = HdcpLevel::HDCP_NO_OUTPUT;
-        _hidl_cb(Status::OK, connectedLevel, maxLevel);
-        return Void();
-    }
-
-    Return<void> getHdcpLevels_1_2(getHdcpLevels_1_2_cb _hidl_cb) {
-        HdcpLevel_V1_2 connectedLevel = HdcpLevel_V1_2::HDCP_NONE;
-        HdcpLevel_V1_2 maxLevel = HdcpLevel_V1_2::HDCP_NO_OUTPUT;
-        _hidl_cb(Status_V1_2::OK, connectedLevel, maxLevel);
-        return Void();
-    }
-
-    Return<void> getNumberOfSessions(getNumberOfSessions_cb _hidl_cb) override;
-
-    Return<void> getSecurityLevel(const hidl_vec<uint8_t>& sessionId,
-            getSecurityLevel_cb _hidl_cb) override;
-
-    Return<void> getMetrics(getMetrics_cb _hidl_cb) override;
-
-    Return<void> getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) override;
-
-    Return<Status> removeOfflineLicense(const KeySetId &keySetId) override;
-
-    Return<void> getOfflineLicenseState(const KeySetId &keySetId,
-            getOfflineLicenseState_cb _hidl_cb) override;
-
-    Return<void> getPropertyString(
-        const hidl_string& name,
-        getPropertyString_cb _hidl_cb) override;
-
-    Return<void> getPropertyByteArray(
-        const hidl_string& name,
-        getPropertyByteArray_cb _hidl_cb) override;
-
-    Return<Status> setPropertyString(
-            const hidl_string& name, const hidl_string& value) override;
-
-    Return<Status> setPropertyByteArray(
-            const hidl_string& name, const hidl_vec<uint8_t>& value) override;
-
-    Return<void> getLogMessages(
-        getLogMessages_cb _hidl_cb) override;
-
-    Return<Status> setPlaybackId(
-        const hidl_vec<uint8_t>& sessionId,
-        const hidl_string& playbackId) override;
-
-    Return<bool> requiresSecureDecoder(
-            const hidl_string& mime, SecurityLevel level) override;
-
-    Return<bool> requiresSecureDecoderDefault(const hidl_string& mime) override;
-
-    Return<Status> setCipherAlgorithm(
-            const hidl_vec<uint8_t>& sessionId, const hidl_string& algorithm) {
-        if (sessionId.size() == 0 || algorithm.size() == 0) {
-            return Status::BAD_VALUE;
-        }
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    Return<Status> setMacAlgorithm(
-            const hidl_vec<uint8_t>& sessionId, const hidl_string& algorithm) {
-        if (sessionId.size() == 0 || algorithm.size() == 0) {
-            return Status::BAD_VALUE;
-        }
-        return Status::ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    Return<void> encrypt(
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<uint8_t>& keyId,
-            const hidl_vec<uint8_t>& input,
-            const hidl_vec<uint8_t>& iv,
-            encrypt_cb _hidl_cb) {
-        if (sessionId.size() == 0 || keyId.size() == 0 ||
-                input.size() == 0 || iv.size() == 0) {
-            _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
-            return Void();
-        }
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
-        return Void();
-    }
-
-    Return<void> decrypt(
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<uint8_t>& keyId,
-            const hidl_vec<uint8_t>& input,
-            const hidl_vec<uint8_t>& iv,
-            decrypt_cb _hidl_cb) {
-        if (sessionId.size() == 0 || keyId.size() == 0 ||
-                input.size() == 0 || iv.size() == 0) {
-            _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
-            return Void();
-        }
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
-        return Void();
-    }
-
-    Return<void> sign(
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<uint8_t>& keyId,
-            const hidl_vec<uint8_t>& message,
-            sign_cb _hidl_cb) {
-        if (sessionId.size() == 0 || keyId.size() == 0 ||
-                message.size() == 0) {
-            _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
-            return Void();
-        }
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
-        return Void();
-    }
-
-    Return<void> verify(
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<uint8_t>& keyId,
-            const hidl_vec<uint8_t>& message,
-            const hidl_vec<uint8_t>& signature,
-            verify_cb _hidl_cb) {
-
-        if (sessionId.size() == 0 || keyId.size() == 0 ||
-                message.size() == 0 || signature.size() == 0) {
-            _hidl_cb(Status::BAD_VALUE, false);
-            return Void();
-        }
-        _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, false);
-        return Void();
-    }
-
-    Return<void> signRSA(
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_string& algorithm,
-            const hidl_vec<uint8_t>& message,
-            const hidl_vec<uint8_t>& wrappedKey,
-            signRSA_cb _hidl_cb) {
-        if (sessionId.size() == 0 || algorithm.size() == 0 ||
-                message.size() == 0 || wrappedKey.size() == 0) {
-             _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
-             return Void();
-         }
-         _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
-         return Void();
-    }
-
-    Return<void> setListener(const sp<IDrmPluginListener>& listener) {
-        mListener = listener;
-        mListenerV1_2 = IDrmPluginListener_V1_2::castFrom(listener);
-        return Void();
-    };
-
-    Return<void> sendEvent(
-            EventType eventType,
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<uint8_t>& data) {
-        if (mListenerV1_2 != NULL) {
-            mListenerV1_2->sendEvent(eventType, sessionId, data);
-        } else if (mListener != NULL) {
-            mListener->sendEvent(eventType, sessionId, data);
-        } else {
-            ALOGE("Null event listener, event not sent");
-        }
-        return Void();
-    }
-
-    Return<void> sendExpirationUpdate(
-            const hidl_vec<uint8_t>& sessionId,
-            int64_t expiryTimeInMS) {
-        if (mListenerV1_2 != NULL) {
-            mListenerV1_2->sendExpirationUpdate(sessionId, expiryTimeInMS);
-        } else if (mListener != NULL) {
-            mListener->sendExpirationUpdate(sessionId, expiryTimeInMS);
-        } else {
-            ALOGE("Null event listener, event not sent");
-        }
-        return Void();
-    }
-
-    Return<void> sendKeysChange(
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<KeyStatus>& keyStatusList, bool hasNewUsableKey) {
-        if (mListenerV1_2 != NULL) {
-            mListenerV1_2->sendKeysChange(sessionId, keyStatusList, hasNewUsableKey);
-        } else if (mListener != NULL) {
-            mListener->sendKeysChange(sessionId, keyStatusList, hasNewUsableKey);
-        } else {
-            ALOGE("Null event listener, event not sent");
-        }
-        return Void();
-    }
-
-    Return<void> sendKeysChange_1_2(
-            const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<KeyStatus_V1_2>& keyStatusList, bool hasNewUsableKey) {
-        if (mListenerV1_2 != NULL) {
-            mListenerV1_2->sendKeysChange_1_2(sessionId, keyStatusList, hasNewUsableKey);
-        }
-        return Void();
-    }
-
-    Return<void> sendSessionLostState(
-            const hidl_vec<uint8_t>& sessionId) {
-        if (mListenerV1_2 != NULL) {
-            mListenerV1_2->sendSessionLostState(sessionId);
-        }
-        return Void();
-    }
-
-    Return<void> getSecureStops(getSecureStops_cb _hidl_cb);
-
-    Return<void> getSecureStop(const hidl_vec<uint8_t>& secureStopId,
-            getSecureStop_cb _hidl_cb);
-
-    Return<Status> releaseSecureStop(const hidl_vec<uint8_t>& ssRelease);
-
-    Return<Status> releaseAllSecureStops();
-
-    Return<void> getSecureStopIds(getSecureStopIds_cb _hidl_cb);
-
-    Return<Status> releaseSecureStops(const SecureStopRelease& ssRelease);
-
-    Return<Status> removeSecureStop(const hidl_vec<uint8_t>& secureStopId);
-
-    Return<Status> removeAllSecureStops();
-
-private:
-    void initProperties();
-    void installSecureStop(const hidl_vec<uint8_t>& sessionId);
-    bool makeKeySetId(std::string* keySetId);
-    void setPlayPolicy();
-
-    Return<Status> setSecurityLevel(const hidl_vec<uint8_t>& sessionId,
-            SecurityLevel level);
-
-    Status_V1_2 getKeyRequestCommon(const hidl_vec<uint8_t>& scope,
-            const hidl_vec<uint8_t>& initData,
-            const hidl_string& mimeType,
-            KeyType keyType,
-            const hidl_vec<KeyValue>& optionalParameters,
-            std::vector<uint8_t> *request,
-            KeyRequestType_V1_1 *getKeyRequestType,
-            std::string *defaultUrl);
-
-    struct ClearkeySecureStop {
-        std::vector<uint8_t> id;
-        std::vector<uint8_t> data;
-    };
-
-    std::map<std::vector<uint8_t>, ClearkeySecureStop> mSecureStops;
-    std::vector<KeyValue> mPlayPolicy;
-    std::map<std::string, std::string> mStringProperties;
-    std::map<std::string, std::vector<uint8_t> > mByteArrayProperties;
-    std::map<std::string, std::vector<uint8_t> > mReleaseKeysMap;
-    std::map<std::vector<uint8_t>, std::string> mPlaybackId;
-    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel;
-    sp<IDrmPluginListener> mListener;
-    sp<IDrmPluginListener_V1_2> mListenerV1_2;
-    SessionLibrary *mSessionLibrary;
-    int64_t mOpenSessionOkCount;
-    int64_t mCloseSessionOkCount;
-    int64_t mCloseSessionNotOpenedCount;
-    uint32_t mNextSecureStopId;
-    android::Mutex mPlayPolicyLock;
-
-    // set by property to mock error scenarios
-    Status_V1_2 mMockError;
-
-    void processMockError(const sp<Session> &session) {
-        session->setMockError(mMockError);
-        mMockError = Status_V1_2::OK;
-    }
-
-    DeviceFiles mFileHandle;
-    Mutex mSecureStopLock;
-
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_DRM_PLUGIN_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h b/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h
deleted file mode 100644
index 59338c9..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *            http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_INIT_DATA_PARSER_H_
-#define CLEARKEY_INIT_DATA_PARSER_H_
-
-#include <android/hardware/drm/1.0/types.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-
-class InitDataParser {
-public:
-    InitDataParser() {}
-
-    Status parse(const std::vector<uint8_t>& initData,
-            const std::string& mimeType,
-            V1_0::KeyType keyType,
-            std::vector<uint8_t>* licenseRequest);
-
-private:
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(InitDataParser);
-
-    Status parsePssh(const std::vector<uint8_t>& initData,
-            std::vector<const uint8_t*>* keyIds);
-
-    std::string generateRequest(V1_0::KeyType keyType,
-            const std::vector<const uint8_t*>& keyIds);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_INIT_DATA_PARSER_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h b/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h
deleted file mode 100644
index 40a2d74..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef CLEARKEY_JSON_WEB_KEY_H_
-#define CLEARKEY_JSON_WEB_KEY_H_
-
-#include "jsmn.h"
-#include "Utils.h"
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-class JsonWebKey {
- public:
-    JsonWebKey();
-    virtual ~JsonWebKey();
-
-    bool extractKeysFromJsonWebKeySet(const std::string& jsonWebKeySet,
-            KeyMap* keys);
-
- private:
-    std::vector<jsmntok_t> mJsmnTokens;
-    std::vector<std::string> mJsonObjects;
-    std::vector<std::string> mTokens;
-
-    bool decodeBase64String(const std::string& encodedText,
-            std::vector<uint8_t>* decodedText);
-    bool findKey(const std::string& jsonObject, std::string* keyId,
-            std::string* encodedKey);
-    void findValue(const std::string &key, std::string* value);
-    bool isJsonWebKeySet(const std::string& jsonObject) const;
-    bool parseJsonObject(const std::string& jsonObject,
-            std::vector<std::string>* tokens);
-    bool parseJsonWebKeySet(const std::string& jsonWebKeySet,
-            std::vector<std::string>* jsonObjects);
-
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(JsonWebKey);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif  // CLEARKEY_JSON_WEB_KEY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
deleted file mode 100644
index 1d98860..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-//
-#ifndef CLEARKEY_MEMORY_FILE_SYSTEM_H_
-#define CLEARKEY_MEMORY_FILE_SYSTEM_H_
-
-#include <map>
-#include <string>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-// Using android file system requires clearkey plugin to update
-// its sepolicy. However, we are unable to update sepolicy for
-// older vendor partitions. To provide backward compatibility,
-// clearkey plugin implements a very simple file system in memory.
-// This memory file system does not support directory structure.
-class MemoryFileSystem {
- public:
-    struct MemoryFile {
-        std::string fileName;  // excludes path
-        std::string content;
-        size_t fileSize;
-
-        std::string getContent() const { return content; }
-        size_t getFileSize() const { return fileSize; }
-        void setContent(const std::string& file) { content = file; }
-        void setFileName(const std::string& name) { fileName = name; }
-        void setFileSize(size_t size) {
-            content.resize(size); fileSize = size;
-        }
-    };
-
-    MemoryFileSystem() {};
-    virtual ~MemoryFileSystem() {};
-
-    bool FileExists(const std::string& fileName) const;
-    ssize_t GetFileSize(const std::string& fileName) const;
-    std::vector<std::string> ListFiles() const;
-    size_t Read(const std::string& pathName, std::string* buffer);
-    bool RemoveAllFiles();
-    bool RemoveFile(const std::string& fileName);
-    size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
-
- private:
-    // License file name is made up of a unique keySetId, therefore,
-    // the filename can be used as the key to locate licenses in the
-    // memory file system.
-    std::map<std::string, MemoryFile> mMemoryFileSystem;
-
-    std::string GetFileName(const std::string& path);
-
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(MemoryFileSystem);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif  // CLEARKEY_MEMORY_FILE_SYSTEM_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Session.h b/drm/mediadrm/plugins/clearkey/hidl/include/Session.h
deleted file mode 100644
index 05cb8c8..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Session.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_SESSION_H_
-#define CLEARKEY_SESSION_H_
-
-#include <utils/Mutex.h>
-#include <utils/RefBase.h>
-#include <vector>
-
-#include "ClearKeyTypes.h"
-
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-
-typedef drm::V1_2::Status Status_V1_2;
-
-class Session : public RefBase {
-public:
-    explicit Session(const std::vector<uint8_t>& sessionId)
-        : mSessionId(sessionId), mMockError(Status_V1_2::OK) {}
-    virtual ~Session() {}
-
-    const std::vector<uint8_t>& sessionId() const { return mSessionId; }
-
-    Status getKeyRequest(
-            const std::vector<uint8_t>& initDataType,
-            const std::string& mimeType,
-            V1_0::KeyType keyType,
-            std::vector<uint8_t>* keyRequest) const;
-
-    Status provideKeyResponse(
-            const std::vector<uint8_t>& response);
-
-    Status_V1_2 decrypt(
-            const KeyId keyId, const Iv iv, const uint8_t* srcPtr,
-            uint8_t* dstPtr, const std::vector<SubSample> subSamples,
-            size_t* bytesDecryptedOut);
-
-    void setMockError(Status_V1_2 error) {mMockError = error;}
-    Status_V1_2 getMockError() const {return mMockError;}
-
-private:
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(Session);
-
-    const std::vector<uint8_t> mSessionId;
-    KeyMap mKeyMap;
-    Mutex mMapLock;
-
-    // For mocking error return scenarios
-    Status_V1_2 mMockError;
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_SESSION_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h
deleted file mode 100644
index 5e77438..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_SESSION_LIBRARY_H_
-#define CLEARKEY_SESSION_LIBRARY_H_
-
-#include <utils/RefBase.h>
-#include <utils/Mutex.h>
-
-#include "ClearKeyTypes.h"
-#include "Session.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::sp;
-
-class SessionLibrary : public RefBase {
-public:
-    static SessionLibrary* get();
-
-    sp<Session> createSession();
-
-    sp<Session> findSession(
-            const std::vector<uint8_t>& sessionId);
-
-    void destroySession(const sp<Session>& session);
-
-    size_t numOpenSessions() const { return mSessions.size(); }
-
-private:
-    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(SessionLibrary);
-
-    SessionLibrary() : mNextSessionId(1) {}
-
-    static Mutex sSingletonLock;
-    static SessionLibrary* sSingleton;
-
-    Mutex mSessionsLock;
-    uint32_t mNextSessionId;
-    std::map<std::vector<uint8_t>, sp<Session> > mSessions;
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_SESSION_LIBRARY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h b/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h
deleted file mode 100644
index 22eeccd..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
-#define CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
-
-#include <vector>
-
-#include <android/hardware/drm/1.0/types.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_vec;
-
-template<typename T> const hidl_vec<T> toHidlVec(const std::vector<T> &vec) {
-    hidl_vec<T> hVec;
-    hVec.setToExternal(const_cast<T *>(vec.data()), vec.size());
-    return hVec;
-}
-
-template<typename T> hidl_vec<T> toHidlVec(std::vector<T> &vec) {
-    hidl_vec<T> hVec;
-    hVec.setToExternal(vec.data(), vec.size());
-    return hVec;
-}
-
-template<typename T> const std::vector<T> toVector(const hidl_vec<T> &hVec) {
-    std::vector<T> vec;
-    vec.assign(hVec.data(), hVec.data() + hVec.size());
-    return *const_cast<const std::vector<T> *>(&vec);
-}
-
-template<typename T> std::vector<T> toVector(hidl_vec<T> &hVec) {
-    std::vector<T> vec;
-    vec.assign(hVec.data(), hVec.data() + hVec.size());
-    return vec;
-}
-
-template<typename T, size_t SIZE> const std::vector<T> toVector(
-        const hidl_array<T, SIZE> &hArray) {
-    std::vector<T> vec;
-    vec.assign(hArray.data(), hArray.data() + hArray.size());
-    return vec;
-}
-
-template<typename T, size_t SIZE> std::vector<T> toVector(
-        hidl_array<T, SIZE> &hArray) {
-    std::vector<T> vec;
-    vec.assign(hArray.data(), hArray.data() + hArray.size());
-    return vec;
-}
-
-inline Status toStatus_1_0(Status_V1_2 status) {
-  switch (status) {
-    case Status_V1_2::ERROR_DRM_INSUFFICIENT_SECURITY:
-    case Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE:
-    case Status_V1_2::ERROR_DRM_SESSION_LOST_STATE:
-      return Status::ERROR_DRM_UNKNOWN;
-    default:
-      return static_cast<Status>(status);
-  }
-}
-
-}  // namespace clearkey
-}  // namespace V1_4
-}  // namespace drm
-}  // namespace hardware
-}  // namespace android
-
-#endif // CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml
deleted file mode 100644
index 16cba11..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<manifest version="1.0" type="device">
-    <hal format="hidl">
-        <name>android.hardware.drm</name>
-        <transport>hwbinder</transport>
-        <fqname>@1.2::ICryptoFactory/clearkey</fqname>
-        <fqname>@1.2::IDrmFactory/clearkey</fqname>
-    </hal>
-</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.3-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.3-service.clearkey.xml
deleted file mode 100644
index 229ee96..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.3-service.clearkey.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<manifest version="1.0" type="device">
-    <hal format="hidl">
-        <name>android.hardware.drm</name>
-        <transport>hwbinder</transport>
-        <fqname>@1.3::ICryptoFactory/clearkey</fqname>
-        <fqname>@1.3::IDrmFactory/clearkey</fqname>
-    </hal>
-</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml
deleted file mode 100644
index 31ddb5f..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2021 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<manifest version="1.0" type="device">
-    <hal format="hidl">
-        <name>android.hardware.drm</name>
-        <transport>hwbinder</transport>
-        <fqname>@1.4::ICryptoFactory/clearkey</fqname>
-        <fqname>@1.4::IDrmFactory/clearkey</fqname>
-    </hal>
-</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto b/drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
deleted file mode 100644
index 3e11f0b..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
+++ /dev/null
@@ -1,47 +0,0 @@
-// ----------------------------------------------------------------------------
-// device_files.proto
-// ----------------------------------------------------------------------------
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-//
-// Description:
-//   Format of various files stored at the device.
-//
-syntax = "proto2";
-
-package android.hardware.drm.V1_2.clearkey;
-
-// need this if we are using libprotobuf-cpp-2.3.0-lite
-option optimize_for = LITE_RUNTIME;
-
-message License {
-  enum LicenseState {
-    ACTIVE = 1;
-    RELEASING = 2;
-  }
-
-  optional LicenseState state = 1;
-  optional bytes license = 2;
-}
-
-message OfflineFile {
-  enum FileType {
-    LICENSE = 1;
-  }
-
-  enum FileVersion {
-    VERSION_1 = 1;
-  }
-
-  optional FileType type = 1;
-  optional FileVersion version = 2 [default = VERSION_1];
-  optional License license = 3;
-
-}
-
-message HashedFile {
-  optional bytes file = 1;
-  // A raw (not hex-encoded) SHA256, taken over the bytes of 'file'.
-  optional bytes hash = 2;
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/service.cpp b/drm/mediadrm/plugins/clearkey/hidl/service.cpp
deleted file mode 100644
index d3d6905..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/service.cpp
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include <CryptoFactory.h>
-#include <DrmFactory.h>
-
-#include <android-base/logging.h>
-#include <binder/ProcessState.h>
-#include <hidl/HidlLazyUtils.h>
-#include <hidl/HidlTransportSupport.h>
-
-using ::android::hardware::configureRpcThreadpool;
-using ::android::hardware::joinRpcThreadpool;
-using ::android::sp;
-
-using android::hardware::drm::V1_4::ICryptoFactory;
-using android::hardware::drm::V1_4::IDrmFactory;
-using android::hardware::drm::V1_4::clearkey::CryptoFactory;
-using android::hardware::drm::V1_4::clearkey::DrmFactory;
-
-int main(int /* argc */, char** /* argv */) {
-    sp<IDrmFactory> drmFactory = new DrmFactory;
-    sp<ICryptoFactory> cryptoFactory = new CryptoFactory;
-
-    configureRpcThreadpool(8, true /* callerWillJoin */);
-
-    // Setup hwbinder service
-    CHECK_EQ(drmFactory->registerAsService("clearkey"), android::NO_ERROR)
-        << "Failed to register Clearkey Factory HAL";
-    CHECK_EQ(cryptoFactory->registerAsService("clearkey"), android::NO_ERROR)
-        << "Failed to register Clearkey Crypto  HAL";
-
-    joinRpcThreadpool();
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp b/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp
deleted file mode 100644
index 358b5cc..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include <CryptoFactory.h>
-#include <DrmFactory.h>
-
-#include <android-base/logging.h>
-#include <binder/ProcessState.h>
-#include <hidl/HidlLazyUtils.h>
-#include <hidl/HidlTransportSupport.h>
-
-using ::android::hardware::configureRpcThreadpool;
-using ::android::hardware::joinRpcThreadpool;
-using ::android::sp;
-
-using android::hardware::drm::V1_4::ICryptoFactory;
-using android::hardware::drm::V1_4::IDrmFactory;
-using android::hardware::drm::V1_4::clearkey::CryptoFactory;
-using android::hardware::drm::V1_4::clearkey::DrmFactory;
-using android::hardware::LazyServiceRegistrar;
-
-int main(int /* argc */, char** /* argv */) {
-    sp<IDrmFactory> drmFactory = new DrmFactory;
-    sp<ICryptoFactory> cryptoFactory = new CryptoFactory;
-
-    configureRpcThreadpool(8, true /* callerWillJoin */);
-
-    // Setup hwbinder service
-    auto serviceRegistrar = LazyServiceRegistrar::getInstance();
-
-    // Setup hwbinder service
-    CHECK_EQ(serviceRegistrar.registerService(drmFactory, "clearkey"), android::NO_ERROR)
-        << "Failed to register Clearkey Factory HAL";
-    CHECK_EQ(serviceRegistrar.registerService(cryptoFactory, "clearkey"), android::NO_ERROR)
-        << "Failed to register Clearkey Crypto  HAL";
-
-    joinRpcThreadpool();
-}
diff --git a/include/drm/TEST_MAPPING b/include/drm/TEST_MAPPING
index 74fa50d..8595f12 100644
--- a/include/drm/TEST_MAPPING
+++ b/include/drm/TEST_MAPPING
@@ -1,16 +1,16 @@
 {
   "presubmit": [
     {
-      "name": "GtsMediaTestCases",
+      "name": "WvtsDeviceTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+          "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
         },
         {
-          "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+          "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
         }
       ]
     }
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 71e7604..0ee8779 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -122,7 +122,7 @@
             // monotonic computation.
             // we use lazy computation here - if we precompute in
             // a single pass, duplicate secant computations may be avoided.
-            S sec, sec0, sec1;
+            S sec{}, sec0{}, sec1{};  // initialization not needed, used for clang-tidy
             if (!catmullRom || monotonic) {
                 sec = (high->second - low->second) / interval;
                 sec0 = low2 != this->end()
@@ -269,7 +269,7 @@
 
         // Note: We don't need to check size is within some bounds as
         // the Parcel read will fail if size is incorrectly specified too large.
-        float lastx;
+        float lastx = 0.f; // initialization not needed, used for clang tidy
         for (uint32_t i = 0; i < size; ++i) {
             float x = config.xy[i * 2];
             float y = config.xy[i * 2 + 1];
diff --git a/include/media/MicrophoneInfo.h b/include/media/MicrophoneInfo.h
deleted file mode 100644
index a5045b9..0000000
--- a/include/media/MicrophoneInfo.h
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_MICROPHONE_INFO_H
-#define ANDROID_MICROPHONE_INFO_H
-
-#include <android/media/MicrophoneInfoData.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
-#include <media/AidlConversionUtil.h>
-#include <system/audio.h>
-
-namespace android {
-namespace media {
-
-class MicrophoneInfo : public Parcelable {
-public:
-    MicrophoneInfo() = default;
-    MicrophoneInfo(const MicrophoneInfo& microphoneInfo) = default;
-    MicrophoneInfo(audio_microphone_characteristic_t& characteristic) {
-        mDeviceId = std::string(&characteristic.device_id[0]);
-        mPortId = characteristic.id;
-        mType = characteristic.device;
-        mAddress = std::string(&characteristic.address[0]);
-        mDeviceLocation = characteristic.location;
-        mDeviceGroup = characteristic.group;
-        mIndexInTheGroup = characteristic.index_in_the_group;
-        mGeometricLocation.push_back(characteristic.geometric_location.x);
-        mGeometricLocation.push_back(characteristic.geometric_location.y);
-        mGeometricLocation.push_back(characteristic.geometric_location.z);
-        mOrientation.push_back(characteristic.orientation.x);
-        mOrientation.push_back(characteristic.orientation.y);
-        mOrientation.push_back(characteristic.orientation.z);
-        std::vector<float> frequencies;
-        std::vector<float> responses;
-        for (size_t i = 0; i < characteristic.num_frequency_responses; i++) {
-            frequencies.push_back(characteristic.frequency_responses[0][i]);
-            responses.push_back(characteristic.frequency_responses[1][i]);
-        }
-        mFrequencyResponses.push_back(frequencies);
-        mFrequencyResponses.push_back(responses);
-        for (size_t i = 0; i < AUDIO_CHANNEL_COUNT_MAX; i++) {
-            mChannelMapping.push_back(characteristic.channel_mapping[i]);
-        }
-        mSensitivity = characteristic.sensitivity;
-        mMaxSpl = characteristic.max_spl;
-        mMinSpl = characteristic.min_spl;
-        mDirectionality = characteristic.directionality;
-    }
-
-    virtual ~MicrophoneInfo() = default;
-
-    virtual status_t writeToParcel(Parcel* parcel) const {
-        MicrophoneInfoData parcelable;
-        return writeToParcelable(&parcelable)
-               ?: parcelable.writeToParcel(parcel);
-    }
-
-    virtual status_t writeToParcelable(MicrophoneInfoData* parcelable) const {
-        parcelable->deviceId = mDeviceId;
-        parcelable->portId = mPortId;
-        parcelable->type = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(mType));
-        parcelable->address = mAddress;
-        parcelable->deviceGroup = mDeviceGroup;
-        parcelable->indexInTheGroup = mIndexInTheGroup;
-        parcelable->geometricLocation = mGeometricLocation;
-        parcelable->orientation = mOrientation;
-        if (mFrequencyResponses.size() != 2) {
-            return BAD_VALUE;
-        }
-        parcelable->frequencies = mFrequencyResponses[0];
-        parcelable->frequencyResponses = mFrequencyResponses[1];
-        parcelable->channelMapping = mChannelMapping;
-        parcelable->sensitivity = mSensitivity;
-        parcelable->maxSpl = mMaxSpl;
-        parcelable->minSpl = mMinSpl;
-        parcelable->directionality = mDirectionality;
-        return OK;
-    }
-
-    virtual status_t readFromParcel(const Parcel* parcel) {
-        MicrophoneInfoData data;
-        return data.readFromParcel(parcel)
-            ?: readFromParcelable(data);
-    }
-
-    virtual status_t readFromParcelable(const MicrophoneInfoData& parcelable) {
-        mDeviceId = parcelable.deviceId;
-        mPortId = parcelable.portId;
-        mType = VALUE_OR_RETURN_STATUS(convertReinterpret<uint32_t>(parcelable.type));
-        mAddress = parcelable.address;
-        mDeviceLocation = parcelable.deviceLocation;
-        mDeviceGroup = parcelable.deviceGroup;
-        mIndexInTheGroup = parcelable.indexInTheGroup;
-        if (parcelable.geometricLocation.size() != 3) {
-            return BAD_VALUE;
-        }
-        mGeometricLocation = parcelable.geometricLocation;
-        if (parcelable.orientation.size() != 3) {
-            return BAD_VALUE;
-        }
-        mOrientation = parcelable.orientation;
-        if (parcelable.frequencies.size() != parcelable.frequencyResponses.size()) {
-            return BAD_VALUE;
-        }
-
-        mFrequencyResponses.push_back(parcelable.frequencies);
-        mFrequencyResponses.push_back(parcelable.frequencyResponses);
-        if (parcelable.channelMapping.size() != AUDIO_CHANNEL_COUNT_MAX) {
-            return BAD_VALUE;
-        }
-        mChannelMapping = parcelable.channelMapping;
-        mSensitivity = parcelable.sensitivity;
-        mMaxSpl = parcelable.maxSpl;
-        mMinSpl = parcelable.minSpl;
-        mDirectionality = parcelable.directionality;
-        return OK;
-    }
-
-    std::string getDeviceId() const {
-        return mDeviceId;
-    }
-
-    int getPortId() const {
-        return mPortId;
-    }
-
-    unsigned int getType() const {
-        return mType;
-    }
-
-    std::string getAddress() const {
-        return mAddress;
-    }
-
-    int getDeviceLocation() const {
-        return mDeviceLocation;
-    }
-
-    int getDeviceGroup() const {
-        return mDeviceGroup;
-    }
-
-    int getIndexInTheGroup() const {
-        return mIndexInTheGroup;
-    }
-
-    const std::vector<float>& getGeometricLocation() const {
-        return mGeometricLocation;
-    }
-
-    const std::vector<float>& getOrientation() const {
-        return mOrientation;
-    }
-
-    const std::vector<std::vector<float>>& getFrequencyResponses() const {
-        return mFrequencyResponses;
-    }
-
-    const std::vector<int>& getChannelMapping() const {
-        return mChannelMapping;
-    }
-
-    float getSensitivity() const {
-        return mSensitivity;
-    }
-
-    float getMaxSpl() const {
-        return mMaxSpl;
-    }
-
-    float getMinSpl() const {
-        return mMinSpl;
-    }
-
-    int getDirectionality() const {
-        return mDirectionality;
-    }
-
-private:
-    std::string mDeviceId;
-    int32_t mPortId;
-    uint32_t mType;
-    std::string mAddress;
-    int32_t mDeviceLocation;
-    int32_t mDeviceGroup;
-    int32_t mIndexInTheGroup;
-    std::vector<float> mGeometricLocation;
-    std::vector<float> mOrientation;
-    std::vector<std::vector<float>> mFrequencyResponses;
-    std::vector<int> mChannelMapping;
-    float mSensitivity;
-    float mMaxSpl;
-    float mMinSpl;
-    int32_t mDirectionality;
-};
-
-// Conversion routines, according to AidlConversion.h conventions.
-inline ConversionResult<MicrophoneInfo>
-aidl2legacy_MicrophoneInfo(const media::MicrophoneInfoData& aidl) {
-    MicrophoneInfo legacy;
-    RETURN_IF_ERROR(legacy.readFromParcelable(aidl));
-    return legacy;
-}
-
-inline ConversionResult<media::MicrophoneInfoData>
-legacy2aidl_MicrophoneInfo(const MicrophoneInfo& legacy) {
-    media::MicrophoneInfoData aidl;
-    RETURN_IF_ERROR(legacy.writeToParcelable(&aidl));
-    return aidl;
-}
-
-} // namespace media
-} // namespace android
-
-#endif
diff --git a/include/media/MmapStreamCallback.h b/include/media/MmapStreamCallback.h
index 31b8eb5..76ee6d7 100644
--- a/include/media/MmapStreamCallback.h
+++ b/include/media/MmapStreamCallback.h
@@ -37,12 +37,9 @@
 
     /**
      * The volume to be applied to the use case specified when opening the stream has changed
-     * \param[in] channels a channel mask containing all channels the volume should be applied to.
-     * \param[in] values the volume values to be applied to each channel. The size of the vector
-     *                   should correspond to the channel count retrieved with
-     *                   audio_channel_count_from_in_mask() or audio_channel_count_from_out_mask()
+     * \param[in] volume the new target volume
      */
-    virtual void onVolumeChanged(audio_channel_mask_t channels, Vector<float> values) = 0;
+    virtual void onVolumeChanged(float volume) = 0;
 
     /**
      * The device the stream is routed to/from has changed
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index 61de987..7725175 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -155,6 +155,18 @@
      */
     virtual status_t standby() = 0;
 
+    /**
+     * Report when data being written to a playback buffer. Currently, this is used by mmap
+     * playback thread for sound dose computation.
+     *
+     * \param[in] buffer a pointer to the audio data
+     * \param[in] frameCount the number of frames written by the CPU
+     * \return OK in case of success.
+     *         NO_INIT in case of initialization error
+     *         INVALID_OPERATION in case of wrong thread type
+     */
+    virtual status_t reportData(const void* buffer, size_t frameCount) = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     MmapStreamInterface() {}
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index d4025e5..11e1704 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -42,9 +42,15 @@
         mWidth(width), mHeight(height),
         mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
         mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
-        mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
-        mSize(hasData ? (bpp * width * height) : 0),
-        mIccSize(iccSize), mBitDepth(bitDepth) {
+        mRotationAngle(angle), mBytesPerPixel(bpp), mIccSize(iccSize),
+        mBitDepth(bitDepth) {
+            uint32_t multVal;
+            mRowBytes = __builtin_mul_overflow(bpp, width, &multVal) ? 0 : multVal;
+            mSize = __builtin_mul_overflow(multVal, height, &multVal) ? 0 : multVal;
+            if (hasData && (mRowBytes == 0 || mSize == 0)) {
+                ALOGE("Frame rowBytes/ size overflow %dx%d bpp %d", width, height, bpp);
+                android_errorWriteLog(0x534e4554, "233006499");
+            }
     }
 
     void init(const VideoFrame& copy, const void* iccData, size_t iccSize) {
@@ -85,8 +91,6 @@
     uint32_t mSize;            // Number of bytes of frame data
     uint32_t mIccSize;         // Number of bytes of ICC data
     uint32_t mBitDepth;        // number of bits per R / G / B channel
-
-    // Adding new items must be 64-bit aligned.
 };
 
 }; // namespace android
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index a4c03ba..cd5d354 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,30 +1,48 @@
 // for frameworks/av/media
 {
-    // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
-    // presubmit-large once issues in cuttlefish are fixed
+    "presubmit-large": [
+        // runs whenever we change something in this tree
+        {
+            "name": "CtsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
+                }
+            ]
+        },
+        {
+            "name": "CtsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
+                }
+            ]
+        }
+    ],
     "presubmit": [
         {
-            "name": "GtsMediaTestCases",
+            "name": "WvtsDeviceTestCases",
             "options" : [
                 {
                     "include-annotation": "android.platform.test.annotations.Presubmit"
                 },
                 {
-                    "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+                    "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
                 },
                 {
-                    "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+                    "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
                 }
-            ]
-        }
-    ],
-
-    "imports": [
+            ],
+            "file_patterns": ["(?i)drm|crypto"]
+        },
         {
-            "path": "frameworks/av/drm/mediadrm/plugins"
+            "name": "CtsMediaDrmFrameworkTestCases",
+            "options" : [
+                {
+                    "include-annotation": "android.platform.test.annotations.Presubmit"
+                }
+            ],
+            "file_patterns": ["(?i)drm|crypto"]
         }
     ]
-
-    // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
-    // platinum-postsubmit once issues in cuttlefish are fixed
 }
diff --git a/media/audioaidlconversion/AidlConversionCore.cpp b/media/audioaidlconversion/AidlConversionCore.cpp
new file mode 100644
index 0000000..948e35d
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionCore.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlConversionCore"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <media/AidlConversionCore.h>
+#include <media/AidlConversionCppNdk.h>
+
+namespace aidl {
+namespace android {
+
+using MicrophoneDirection = hardware::audio::core::IStreamIn::MicrophoneDirection;
+using ::android::BAD_VALUE;
+using ::android::OK;
+using ::android::status_t;
+using ::android::base::unexpected;
+
+ConversionResult<audio_microphone_direction_t>
+aidl2legacy_MicrophoneDirection_audio_microphone_direction_t(MicrophoneDirection aidl) {
+    switch (aidl) {
+        case MicrophoneDirection::UNSPECIFIED:
+            return MIC_DIRECTION_UNSPECIFIED;
+        case MicrophoneDirection::FRONT:
+            return MIC_DIRECTION_FRONT;
+        case MicrophoneDirection::BACK:
+            return MIC_DIRECTION_BACK;
+        case MicrophoneDirection::EXTERNAL:
+            return MIC_DIRECTION_EXTERNAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<MicrophoneDirection>
+legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(audio_microphone_direction_t legacy) {
+    switch (legacy) {
+        case MIC_DIRECTION_UNSPECIFIED:
+            return MicrophoneDirection::UNSPECIFIED;
+        case MIC_DIRECTION_FRONT:
+            return MicrophoneDirection::FRONT;
+        case MIC_DIRECTION_BACK:
+            return MicrophoneDirection::BACK;
+        case MIC_DIRECTION_EXTERNAL:
+            return MicrophoneDirection::EXTERNAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+}  // namespace android
+}  // aidl
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
new file mode 100644
index 0000000..3b06245
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -0,0 +1,3139 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <map>
+#include <utility>
+#include <vector>
+
+#define LOG_TAG "AidlConversionCppNdk"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "media/AidlConversionCppNdk.h"
+
+#include <media/ShmemCompat.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AIDL CPP/NDK backend to legacy audio data structure conversion utilities.
+
+#if defined(BACKEND_NDK)
+/* AIDL String generated in NDK is different than CPP */
+#define GET_DEVICE_DESC_CONNECTION(x)  AudioDeviceDescription::CONNECTION_##x
+namespace aidl {
+#else
+#define GET_DEVICE_DESC_CONNECTION(x)  AudioDeviceDescription::CONNECTION_##x()
+#endif
+
+namespace android {
+
+using ::android::BAD_VALUE;
+using ::android::OK;
+using ::android::String16;
+using ::android::String8;
+using ::android::status_t;
+using ::android::base::unexpected;
+
+using media::audio::common::AudioChannelLayout;
+using media::audio::common::AudioConfig;
+using media::audio::common::AudioConfigBase;
+using media::audio::common::AudioContentType;
+using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceAddress;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioDeviceType;
+using media::audio::common::AudioDualMonoMode;
+using media::audio::common::AudioEncapsulationMetadataType;
+using media::audio::common::AudioEncapsulationMode;
+using media::audio::common::AudioEncapsulationType;
+using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioFormatType;
+using media::audio::common::AudioGain;
+using media::audio::common::AudioGainConfig;
+using media::audio::common::AudioGainMode;
+using media::audio::common::AudioInputFlags;
+using media::audio::common::AudioIoFlags;
+using media::audio::common::AudioLatencyMode;
+using media::audio::common::AudioMode;
+using media::audio::common::AudioOffloadInfo;
+using media::audio::common::AudioOutputFlags;
+using media::audio::common::AudioPlaybackRate;
+using media::audio::common::AudioPort;
+using media::audio::common::AudioPortConfig;
+using media::audio::common::AudioPortDeviceExt;
+using media::audio::common::AudioPortExt;
+using media::audio::common::AudioPortMixExt;
+using media::audio::common::AudioPortMixExtUseCase;
+using media::audio::common::AudioProfile;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioStandard;
+using media::audio::common::AudioStreamType;
+using media::audio::common::AudioUsage;
+using media::audio::common::AudioUuid;
+using media::audio::common::ExtraAudioDescriptor;
+using media::audio::common::Int;
+using media::audio::common::MicrophoneDynamicInfo;
+using media::audio::common::MicrophoneInfo;
+using media::audio::common::PcmType;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Converters
+
+::android::status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize) {
+    if (aidl.size() > maxSize - 1) {
+        return BAD_VALUE;
+    }
+    aidl.copy(dest, aidl.size());
+    dest[aidl.size()] = '\0';
+    return OK;
+}
+
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize) {
+    if (legacy == nullptr) {
+        return unexpected(BAD_VALUE);
+    }
+    if (strnlen(legacy, maxSize) == maxSize) {
+        // No null-terminator.
+        return unexpected(BAD_VALUE);
+    }
+    return std::string(legacy);
+}
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_module_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_io_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_port_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl) {
+    return convertReinterpret<audio_patch_handle_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl) {
+    return convertReinterpret<audio_unique_id_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl) {
+    return convertReinterpret<audio_hw_sync_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl) {
+    return convertReinterpret<pid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl) {
+    return convertReinterpret<uid_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl) {
+    return String16(aidl.data(), aidl.size());
+}
+
+ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy) {
+    return std::string(String8(legacy).c_str());
+}
+
+// TODO b/182392769: create an optional -> optional util
+ConversionResult<std::optional<String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl) {
+    if (!aidl.has_value()) {
+        return std::nullopt;
+    }
+    ConversionResult<String16> conversion =
+        VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.value()));
+    return conversion.value();
+}
+
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy) {
+  if (!legacy.has_value()) {
+    return std::nullopt;
+  }
+  ConversionResult<std::string> conversion =
+      VALUE_OR_RETURN(legacy2aidl_String16_string(legacy.value()));
+  return conversion.value();
+}
+
+ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl) {
+    return String8(aidl.data(), aidl.size());
+}
+
+ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy) {
+    return std::string(legacy.c_str());
+}
+
+namespace {
+
+namespace detail {
+using AudioChannelBitPair = std::pair<audio_channel_mask_t, int>;
+using AudioChannelBitPairs = std::vector<AudioChannelBitPair>;
+using AudioChannelPair = std::pair<audio_channel_mask_t, AudioChannelLayout>;
+using AudioChannelPairs = std::vector<AudioChannelPair>;
+using AudioDevicePair = std::pair<audio_devices_t, AudioDeviceDescription>;
+using AudioDevicePairs = std::vector<AudioDevicePair>;
+using AudioFormatPair = std::pair<audio_format_t, AudioFormatDescription>;
+using AudioFormatPairs = std::vector<AudioFormatPair>;
+}
+
+const detail::AudioChannelBitPairs& getInAudioChannelBits() {
+    static const detail::AudioChannelBitPairs pairs = {
+        { AUDIO_CHANNEL_IN_LEFT, AudioChannelLayout::CHANNEL_FRONT_LEFT },
+        { AUDIO_CHANNEL_IN_RIGHT, AudioChannelLayout::CHANNEL_FRONT_RIGHT },
+        // AUDIO_CHANNEL_IN_FRONT is at the end
+        { AUDIO_CHANNEL_IN_BACK, AudioChannelLayout::CHANNEL_BACK_CENTER },
+        // AUDIO_CHANNEL_IN_*_PROCESSED not supported
+        // AUDIO_CHANNEL_IN_PRESSURE not supported
+        // AUDIO_CHANNEL_IN_*_AXIS not supported
+        // AUDIO_CHANNEL_IN_VOICE_* not supported
+        { AUDIO_CHANNEL_IN_BACK_LEFT, AudioChannelLayout::CHANNEL_BACK_LEFT },
+        { AUDIO_CHANNEL_IN_BACK_RIGHT, AudioChannelLayout::CHANNEL_BACK_RIGHT },
+        { AUDIO_CHANNEL_IN_CENTER, AudioChannelLayout::CHANNEL_FRONT_CENTER },
+        { AUDIO_CHANNEL_IN_LOW_FREQUENCY, AudioChannelLayout::CHANNEL_LOW_FREQUENCY },
+        { AUDIO_CHANNEL_IN_TOP_LEFT, AudioChannelLayout::CHANNEL_TOP_SIDE_LEFT },
+        { AUDIO_CHANNEL_IN_TOP_RIGHT, AudioChannelLayout::CHANNEL_TOP_SIDE_RIGHT },
+        // When going from aidl to legacy, IN_CENTER is used
+        { AUDIO_CHANNEL_IN_FRONT, AudioChannelLayout::CHANNEL_FRONT_CENTER }
+    };
+    return pairs;
+}
+
+const detail::AudioChannelPairs& getInAudioChannelPairs() {
+    static const detail::AudioChannelPairs pairs = {
+#define DEFINE_INPUT_LAYOUT(n)                                                 \
+            {                                                                  \
+                AUDIO_CHANNEL_IN_##n,                                          \
+                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>( \
+                        AudioChannelLayout::LAYOUT_##n)                        \
+            }
+
+        DEFINE_INPUT_LAYOUT(MONO),
+        DEFINE_INPUT_LAYOUT(STEREO),
+        DEFINE_INPUT_LAYOUT(FRONT_BACK),
+        // AUDIO_CHANNEL_IN_6 not supported
+        DEFINE_INPUT_LAYOUT(2POINT0POINT2),
+        DEFINE_INPUT_LAYOUT(2POINT1POINT2),
+        DEFINE_INPUT_LAYOUT(3POINT0POINT2),
+        DEFINE_INPUT_LAYOUT(3POINT1POINT2),
+        DEFINE_INPUT_LAYOUT(5POINT1)
+#undef DEFINE_INPUT_LAYOUT
+    };
+    return pairs;
+}
+
+const detail::AudioChannelBitPairs& getOutAudioChannelBits() {
+    static const detail::AudioChannelBitPairs pairs = {
+#define DEFINE_OUTPUT_BITS(n)                                                  \
+            { AUDIO_CHANNEL_OUT_##n, AudioChannelLayout::CHANNEL_##n }
+
+        DEFINE_OUTPUT_BITS(FRONT_LEFT),
+        DEFINE_OUTPUT_BITS(FRONT_RIGHT),
+        DEFINE_OUTPUT_BITS(FRONT_CENTER),
+        DEFINE_OUTPUT_BITS(LOW_FREQUENCY),
+        DEFINE_OUTPUT_BITS(BACK_LEFT),
+        DEFINE_OUTPUT_BITS(BACK_RIGHT),
+        DEFINE_OUTPUT_BITS(FRONT_LEFT_OF_CENTER),
+        DEFINE_OUTPUT_BITS(FRONT_RIGHT_OF_CENTER),
+        DEFINE_OUTPUT_BITS(BACK_CENTER),
+        DEFINE_OUTPUT_BITS(SIDE_LEFT),
+        DEFINE_OUTPUT_BITS(SIDE_RIGHT),
+        DEFINE_OUTPUT_BITS(TOP_CENTER),
+        DEFINE_OUTPUT_BITS(TOP_FRONT_LEFT),
+        DEFINE_OUTPUT_BITS(TOP_FRONT_CENTER),
+        DEFINE_OUTPUT_BITS(TOP_FRONT_RIGHT),
+        DEFINE_OUTPUT_BITS(TOP_BACK_LEFT),
+        DEFINE_OUTPUT_BITS(TOP_BACK_CENTER),
+        DEFINE_OUTPUT_BITS(TOP_BACK_RIGHT),
+        DEFINE_OUTPUT_BITS(TOP_SIDE_LEFT),
+        DEFINE_OUTPUT_BITS(TOP_SIDE_RIGHT),
+        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_LEFT),
+        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_CENTER),
+        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_RIGHT),
+        DEFINE_OUTPUT_BITS(LOW_FREQUENCY_2),
+        DEFINE_OUTPUT_BITS(FRONT_WIDE_LEFT),
+        DEFINE_OUTPUT_BITS(FRONT_WIDE_RIGHT),
+#undef DEFINE_OUTPUT_BITS
+        { AUDIO_CHANNEL_OUT_HAPTIC_A, AudioChannelLayout::CHANNEL_HAPTIC_A },
+        { AUDIO_CHANNEL_OUT_HAPTIC_B, AudioChannelLayout::CHANNEL_HAPTIC_B }
+    };
+    return pairs;
+}
+
+const detail::AudioChannelPairs& getOutAudioChannelPairs() {
+    static const detail::AudioChannelPairs pairs = {
+#define DEFINE_OUTPUT_LAYOUT(n)                                                \
+            {                                                                  \
+                AUDIO_CHANNEL_OUT_##n,                                         \
+                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>( \
+                        AudioChannelLayout::LAYOUT_##n)                        \
+            }
+
+        DEFINE_OUTPUT_LAYOUT(MONO),
+        DEFINE_OUTPUT_LAYOUT(STEREO),
+        DEFINE_OUTPUT_LAYOUT(2POINT1),
+        DEFINE_OUTPUT_LAYOUT(TRI),
+        DEFINE_OUTPUT_LAYOUT(TRI_BACK),
+        DEFINE_OUTPUT_LAYOUT(3POINT1),
+        DEFINE_OUTPUT_LAYOUT(2POINT0POINT2),
+        DEFINE_OUTPUT_LAYOUT(2POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(3POINT0POINT2),
+        DEFINE_OUTPUT_LAYOUT(3POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(QUAD),
+        DEFINE_OUTPUT_LAYOUT(QUAD_SIDE),
+        DEFINE_OUTPUT_LAYOUT(SURROUND),
+        DEFINE_OUTPUT_LAYOUT(PENTA),
+        DEFINE_OUTPUT_LAYOUT(5POINT1),
+        DEFINE_OUTPUT_LAYOUT(5POINT1_SIDE),
+        DEFINE_OUTPUT_LAYOUT(5POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(5POINT1POINT4),
+        DEFINE_OUTPUT_LAYOUT(6POINT1),
+        DEFINE_OUTPUT_LAYOUT(7POINT1),
+        DEFINE_OUTPUT_LAYOUT(7POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(7POINT1POINT4),
+        DEFINE_OUTPUT_LAYOUT(13POINT_360RA),
+        DEFINE_OUTPUT_LAYOUT(22POINT2),
+        DEFINE_OUTPUT_LAYOUT(MONO_HAPTIC_A),
+        DEFINE_OUTPUT_LAYOUT(STEREO_HAPTIC_A),
+        DEFINE_OUTPUT_LAYOUT(HAPTIC_AB),
+        DEFINE_OUTPUT_LAYOUT(MONO_HAPTIC_AB),
+        DEFINE_OUTPUT_LAYOUT(STEREO_HAPTIC_AB)
+#undef DEFINE_OUTPUT_LAYOUT
+    };
+    return pairs;
+}
+
+const detail::AudioChannelPairs& getVoiceAudioChannelPairs() {
+    static const detail::AudioChannelPairs pairs = {
+#define DEFINE_VOICE_LAYOUT(n)                                                 \
+            {                                                                  \
+                AUDIO_CHANNEL_IN_VOICE_##n,                                    \
+                AudioChannelLayout::make<AudioChannelLayout::Tag::voiceMask>(  \
+                        AudioChannelLayout::VOICE_##n)                         \
+            }
+        DEFINE_VOICE_LAYOUT(UPLINK_MONO),
+        DEFINE_VOICE_LAYOUT(DNLINK_MONO),
+        DEFINE_VOICE_LAYOUT(CALL_MONO)
+#undef DEFINE_VOICE_LAYOUT
+    };
+    return pairs;
+}
+
+AudioDeviceDescription make_AudioDeviceDescription(AudioDeviceType type,
+        const std::string& connection = "") {
+    AudioDeviceDescription result;
+    result.type = type;
+    result.connection = connection;
+    return result;
+}
+
+void append_AudioDeviceDescription(detail::AudioDevicePairs& pairs,
+        audio_devices_t inputType, audio_devices_t outputType,
+        AudioDeviceType inType, AudioDeviceType outType,
+        const std::string& connection = "") {
+    pairs.push_back(std::make_pair(inputType, make_AudioDeviceDescription(inType, connection)));
+    pairs.push_back(std::make_pair(outputType, make_AudioDeviceDescription(outType, connection)));
+}
+
+const detail::AudioDevicePairs& getAudioDevicePairs() {
+    static const detail::AudioDevicePairs pairs = []() {
+        detail::AudioDevicePairs pairs = {{
+            {
+                AUDIO_DEVICE_NONE, AudioDeviceDescription{}
+            },
+            {
+                AUDIO_DEVICE_OUT_EARPIECE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER_EARPIECE)
+            },
+            {
+                AUDIO_DEVICE_OUT_SPEAKER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER)
+            },
+            {
+                AUDIO_DEVICE_OUT_WIRED_HEADPHONE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_HEADPHONE,
+                        GET_DEVICE_DESC_CONNECTION(ANALOG))
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_SCO, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_DEVICE,
+                        GET_DEVICE_DESC_CONNECTION(BT_SCO))
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_CARKIT,
+                        GET_DEVICE_DESC_CONNECTION(BT_SCO))
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_HEADPHONE,
+                        GET_DEVICE_DESC_CONNECTION(BT_A2DP))
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER,
+                        GET_DEVICE_DESC_CONNECTION(BT_A2DP))
+            },
+            {
+                AUDIO_DEVICE_OUT_TELEPHONY_TX, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_TELEPHONY_TX)
+            },
+            {
+                AUDIO_DEVICE_OUT_AUX_LINE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_LINE_AUX)
+            },
+            {
+                AUDIO_DEVICE_OUT_SPEAKER_SAFE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER_SAFE)
+            },
+            {
+                AUDIO_DEVICE_OUT_HEARING_AID, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_HEARING_AID,
+                        GET_DEVICE_DESC_CONNECTION(WIRELESS))
+            },
+            {
+                AUDIO_DEVICE_OUT_ECHO_CANCELLER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_ECHO_CANCELLER)
+            },
+            {
+                AUDIO_DEVICE_OUT_BLE_SPEAKER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER,
+                        GET_DEVICE_DESC_CONNECTION(BT_LE))
+            },
+            {
+                AUDIO_DEVICE_OUT_BLE_BROADCAST, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_BROADCAST,
+                        GET_DEVICE_DESC_CONNECTION(BT_LE))
+            },
+            // AUDIO_DEVICE_IN_AMBIENT and IN_COMMUNICATION are removed since they were deprecated.
+            {
+                AUDIO_DEVICE_IN_BUILTIN_MIC, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_MICROPHONE)
+            },
+            {
+                AUDIO_DEVICE_IN_BACK_MIC, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_MICROPHONE_BACK)
+            },
+            {
+                AUDIO_DEVICE_IN_TELEPHONY_RX, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_TELEPHONY_RX)
+            },
+            {
+                AUDIO_DEVICE_IN_TV_TUNER, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_TV_TUNER)
+            },
+            {
+                AUDIO_DEVICE_IN_LOOPBACK, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_LOOPBACK)
+            },
+            {
+                AUDIO_DEVICE_IN_BLUETOOTH_BLE, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_DEVICE,
+                        GET_DEVICE_DESC_CONNECTION(BT_LE))
+            },
+            {
+                AUDIO_DEVICE_IN_ECHO_REFERENCE, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_ECHO_REFERENCE)
+            },
+            {
+                AUDIO_DEVICE_IN_REMOTE_SUBMIX, make_AudioDeviceDescription(
+                         AudioDeviceType::IN_SUBMIX)
+            },
+            {
+                AUDIO_DEVICE_OUT_REMOTE_SUBMIX, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SUBMIX,
+                        GET_DEVICE_DESC_CONNECTION(VIRTUAL))
+            }
+        }};
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_DEFAULT, AUDIO_DEVICE_OUT_DEFAULT,
+                AudioDeviceType::IN_DEFAULT, AudioDeviceType::OUT_DEFAULT);
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                GET_DEVICE_DESC_CONNECTION(ANALOG));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                GET_DEVICE_DESC_CONNECTION(BT_SCO));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_HDMI, AUDIO_DEVICE_OUT_HDMI,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(HDMI));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET, AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET,
+                AudioDeviceType::IN_DOCK, AudioDeviceType::OUT_DOCK,
+                GET_DEVICE_DESC_CONNECTION(ANALOG));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET,
+                AudioDeviceType::IN_DOCK, AudioDeviceType::OUT_DOCK,
+                GET_DEVICE_DESC_CONNECTION(USB));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_USB_ACCESSORY, AUDIO_DEVICE_OUT_USB_ACCESSORY,
+                AudioDeviceType::IN_ACCESSORY, AudioDeviceType::OUT_ACCESSORY,
+                GET_DEVICE_DESC_CONNECTION(USB));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_OUT_USB_DEVICE,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(USB));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_FM_TUNER, AUDIO_DEVICE_OUT_FM,
+                AudioDeviceType::IN_FM_TUNER, AudioDeviceType::OUT_FM);
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_LINE, AUDIO_DEVICE_OUT_LINE,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(ANALOG));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_SPDIF, AUDIO_DEVICE_OUT_SPDIF,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(SPDIF));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(BT_A2DP));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_IP, AUDIO_DEVICE_OUT_IP,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(IP_V4));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BUS, AUDIO_DEVICE_OUT_BUS,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(BUS));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_PROXY, AUDIO_DEVICE_OUT_PROXY,
+                AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::OUT_AFE_PROXY);
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_OUT_USB_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                GET_DEVICE_DESC_CONNECTION(USB));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_ARC,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(HDMI_ARC));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_HDMI_EARC, AUDIO_DEVICE_OUT_HDMI_EARC,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                GET_DEVICE_DESC_CONNECTION(HDMI_EARC));
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_OUT_BLE_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                GET_DEVICE_DESC_CONNECTION(BT_LE));
+        return pairs;
+    }();
+    return pairs;
+}
+
+AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
+    AudioFormatDescription result;
+    result.type = type;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
+    auto result = make_AudioFormatDescription(AudioFormatType::PCM);
+    result.pcm = pcm;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
+    AudioFormatDescription result;
+    result.encoding = encoding;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType transport,
+        const std::string& encoding) {
+    auto result = make_AudioFormatDescription(encoding);
+    result.pcm = transport;
+    return result;
+}
+
+const detail::AudioFormatPairs& getAudioFormatPairs() {
+    static const detail::AudioFormatPairs pairs = {{
+            {AUDIO_FORMAT_INVALID,
+             make_AudioFormatDescription(AudioFormatType::SYS_RESERVED_INVALID)},
+            {AUDIO_FORMAT_DEFAULT, AudioFormatDescription{}},
+            {AUDIO_FORMAT_PCM_16_BIT, make_AudioFormatDescription(PcmType::INT_16_BIT)},
+            {AUDIO_FORMAT_PCM_8_BIT, make_AudioFormatDescription(PcmType::UINT_8_BIT)},
+            {AUDIO_FORMAT_PCM_32_BIT, make_AudioFormatDescription(PcmType::INT_32_BIT)},
+            {AUDIO_FORMAT_PCM_8_24_BIT, make_AudioFormatDescription(PcmType::FIXED_Q_8_24)},
+            {AUDIO_FORMAT_PCM_FLOAT, make_AudioFormatDescription(PcmType::FLOAT_32_BIT)},
+            {AUDIO_FORMAT_PCM_24_BIT_PACKED, make_AudioFormatDescription(PcmType::INT_24_BIT)},
+            {AUDIO_FORMAT_MP3, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEG)},
+            {AUDIO_FORMAT_AMR_NB,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AMR_NB)},
+            {AUDIO_FORMAT_AMR_WB,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AMR_WB)},
+            {AUDIO_FORMAT_AAC,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_MP4)},
+            {AUDIO_FORMAT_AAC_MAIN,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_MAIN)},
+            {AUDIO_FORMAT_AAC_LC,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_LC)},
+            {AUDIO_FORMAT_AAC_SSR,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_SSR)},
+            {AUDIO_FORMAT_AAC_LTP,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_LTP)},
+            {AUDIO_FORMAT_AAC_HE_V1,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_HE_V1)},
+            {AUDIO_FORMAT_AAC_SCALABLE,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE)},
+            {AUDIO_FORMAT_AAC_ERLC,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ERLC)},
+            {AUDIO_FORMAT_AAC_LD,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_LD)},
+            {AUDIO_FORMAT_AAC_HE_V2,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_HE_V2)},
+            {AUDIO_FORMAT_AAC_ELD,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ELD)},
+            {AUDIO_FORMAT_AAC_XHE,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_XHE)
+
+            },
+            // AUDIO_FORMAT_HE_AAC_V1 and HE_AAC_V2 are removed since they were deprecated long time
+            // ago.
+            {AUDIO_FORMAT_VORBIS,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_VORBIS)},
+            {AUDIO_FORMAT_OPUS, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_OPUS)},
+            {AUDIO_FORMAT_AC3, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AC3)},
+            {AUDIO_FORMAT_E_AC3, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_EAC3)},
+            {AUDIO_FORMAT_E_AC3_JOC,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_EAC3_JOC)},
+            {AUDIO_FORMAT_DTS, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DTS)},
+            {AUDIO_FORMAT_DTS_HD,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DTS_HD)},
+            {AUDIO_FORMAT_DTS_HD_MA,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DTS_HD_MA)},
+            {AUDIO_FORMAT_DTS_UHD,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1)},
+            {AUDIO_FORMAT_DTS_UHD_P2,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2)},
+            // In the future, we would like to represent encapsulated bitstreams as
+            // nested AudioFormatDescriptions. The legacy 'AUDIO_FORMAT_IEC61937' type doesn't
+            // specify the format of the encapsulated bitstream.
+            {AUDIO_FORMAT_IEC61937,
+             make_AudioFormatDescription(PcmType::INT_16_BIT,
+                                         ::android::MEDIA_MIMETYPE_AUDIO_IEC61937)},
+            {AUDIO_FORMAT_DOLBY_TRUEHD,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD)},
+            {AUDIO_FORMAT_EVRC, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_EVRC)},
+            {AUDIO_FORMAT_EVRCB,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_EVRCB)},
+            {AUDIO_FORMAT_EVRCWB,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_EVRCWB)},
+            {AUDIO_FORMAT_EVRCNW,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_EVRCNW)},
+            {AUDIO_FORMAT_AAC_ADIF,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADIF)},
+            {AUDIO_FORMAT_WMA, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_WMA)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_WMA_PRO, make_AudioFormatDescription("audio/x-ms-wma.pro")},
+            {AUDIO_FORMAT_AMR_WB_PLUS,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS)},
+            {AUDIO_FORMAT_MP2,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II)},
+            {AUDIO_FORMAT_QCELP,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_QCELP)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_DSD, make_AudioFormatDescription("audio/vnd.sony.dsd")},
+            {AUDIO_FORMAT_FLAC, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_FLAC)},
+            {AUDIO_FORMAT_ALAC, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_ALAC)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_APE, make_AudioFormatDescription("audio/x-ape")},
+            {AUDIO_FORMAT_AAC_ADTS,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS)},
+            {AUDIO_FORMAT_AAC_ADTS_MAIN,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN)},
+            {AUDIO_FORMAT_AAC_ADTS_LC,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC)},
+            {AUDIO_FORMAT_AAC_ADTS_SSR,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR)},
+            {AUDIO_FORMAT_AAC_ADTS_LTP,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP)},
+            {AUDIO_FORMAT_AAC_ADTS_HE_V1,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1)},
+            {AUDIO_FORMAT_AAC_ADTS_SCALABLE,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE)},
+            {AUDIO_FORMAT_AAC_ADTS_ERLC,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC)},
+            {AUDIO_FORMAT_AAC_ADTS_LD,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD)},
+            {AUDIO_FORMAT_AAC_ADTS_HE_V2,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2)},
+            {AUDIO_FORMAT_AAC_ADTS_ELD,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD)},
+            {AUDIO_FORMAT_AAC_ADTS_XHE,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE)},
+            {// Note: not in the IANA registry. "vnd.octel.sbc" is not BT SBC.
+             AUDIO_FORMAT_SBC, make_AudioFormatDescription("audio/x-sbc")},
+            {AUDIO_FORMAT_APTX, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_APTX)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_APTX_HD, make_AudioFormatDescription("audio/vnd.qcom.aptx.hd")},
+            {AUDIO_FORMAT_AC4, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AC4)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_LDAC, make_AudioFormatDescription("audio/vnd.sony.ldac")},
+            {AUDIO_FORMAT_MAT,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DOLBY_MAT)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_MAT_1_0,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DOLBY_MAT +
+                                         std::string(".1.0"))},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_MAT_2_0,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DOLBY_MAT +
+                                         std::string(".2.0"))},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_MAT_2_1,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DOLBY_MAT +
+                                         std::string(".2.1"))},
+            {AUDIO_FORMAT_AAC_LATM,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC)},
+            {AUDIO_FORMAT_AAC_LATM_LC,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC)},
+            {AUDIO_FORMAT_AAC_LATM_HE_V1,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1)},
+            {AUDIO_FORMAT_AAC_LATM_HE_V2,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_CELT, make_AudioFormatDescription("audio/x-celt")},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_APTX_ADAPTIVE,
+             make_AudioFormatDescription("audio/vnd.qcom.aptx.adaptive")},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_LHDC, make_AudioFormatDescription("audio/vnd.savitech.lhdc")},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_LHDC_LL, make_AudioFormatDescription("audio/vnd.savitech.lhdc.ll")},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_APTX_TWSP, make_AudioFormatDescription("audio/vnd.qcom.aptx.twsp")},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_LC3, make_AudioFormatDescription("audio/x-lc3")},
+            {AUDIO_FORMAT_MPEGH,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1)},
+            {AUDIO_FORMAT_MPEGH_BL_L3,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3)},
+            {AUDIO_FORMAT_MPEGH_BL_L4,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4)},
+            {AUDIO_FORMAT_MPEGH_LC_L3,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3)},
+            {AUDIO_FORMAT_MPEGH_LC_L4,
+             make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4)},
+            {AUDIO_FORMAT_IEC60958,
+             make_AudioFormatDescription(PcmType::INT_24_BIT,
+                                         ::android::MEDIA_MIMETYPE_AUDIO_IEC60958)},
+            {AUDIO_FORMAT_DRA, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_DRA)},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_APTX_ADAPTIVE_QLEA,
+             make_AudioFormatDescription("audio/vnd.qcom.aptx.adaptive.r3")},
+            {// Note: not in the IANA registry.
+             AUDIO_FORMAT_APTX_ADAPTIVE_R4,
+             make_AudioFormatDescription("audio/vnd.qcom.aptx.adaptive.r4")},
+    }};
+    return pairs;
+}
+
+template<typename S, typename T>
+std::map<S, T> make_DirectMap(const std::vector<std::pair<S, T>>& v) {
+    std::map<S, T> result(v.begin(), v.end());
+    LOG_ALWAYS_FATAL_IF(result.size() != v.size(), "Duplicate key elements detected");
+    return result;
+}
+
+template<typename S, typename T>
+std::map<S, T> make_DirectMap(
+        const std::vector<std::pair<S, T>>& v1, const std::vector<std::pair<S, T>>& v2) {
+    std::map<S, T> result(v1.begin(), v1.end());
+    LOG_ALWAYS_FATAL_IF(result.size() != v1.size(), "Duplicate key elements detected in v1");
+    result.insert(v2.begin(), v2.end());
+    LOG_ALWAYS_FATAL_IF(result.size() != v1.size() + v2.size(),
+            "Duplicate key elements detected in v1+v2");
+    return result;
+}
+
+template<typename S, typename T>
+std::map<T, S> make_ReverseMap(const std::vector<std::pair<S, T>>& v) {
+    std::map<T, S> result;
+    std::transform(v.begin(), v.end(), std::inserter(result, result.begin()),
+            [](const std::pair<S, T>& p) {
+                return std::make_pair(p.second, p.first);
+            });
+    LOG_ALWAYS_FATAL_IF(result.size() != v.size(), "Duplicate key elements detected");
+    return result;
+}
+
+}  // namespace
+
+audio_channel_mask_t aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+        int aidlLayout, bool isInput) {
+    auto& bitMapping = isInput ? getInAudioChannelBits() : getOutAudioChannelBits();
+    const int aidlLayoutInitial = aidlLayout; // for error message
+    audio_channel_mask_t legacy = AUDIO_CHANNEL_NONE;
+    for (const auto& bitPair : bitMapping) {
+        if ((aidlLayout & bitPair.second) == bitPair.second) {
+            legacy = static_cast<audio_channel_mask_t>(legacy | bitPair.first);
+            aidlLayout &= ~bitPair.second;
+            if (aidlLayout == 0) {
+                return legacy;
+            }
+        }
+    }
+    ALOGE("%s: aidl layout 0x%x contains bits 0x%x that have no match to legacy %s bits",
+            __func__, aidlLayoutInitial, aidlLayout, isInput ? "input" : "output");
+    return AUDIO_CHANNEL_NONE;
+}
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+        const AudioChannelLayout& aidl, bool isInput) {
+    using ReverseMap = std::map<AudioChannelLayout, audio_channel_mask_t>;
+    using Tag = AudioChannelLayout::Tag;
+    static const ReverseMap mIn = make_ReverseMap(getInAudioChannelPairs());
+    static const ReverseMap mOut = make_ReverseMap(getOutAudioChannelPairs());
+    static const ReverseMap mVoice = make_ReverseMap(getVoiceAudioChannelPairs());
+
+    auto convert = [](const AudioChannelLayout& aidl, const ReverseMap& m,
+            const char* func, const char* type) -> ConversionResult<audio_channel_mask_t> {
+        if (auto it = m.find(aidl); it != m.end()) {
+            return it->second;
+        } else {
+            ALOGW("%s: no legacy %s audio_channel_mask_t found for %s", func, type,
+                    aidl.toString().c_str());
+            return unexpected(BAD_VALUE);
+        }
+    };
+
+    switch (aidl.getTag()) {
+        case Tag::none:
+            return AUDIO_CHANNEL_NONE;
+        case Tag::invalid:
+            return AUDIO_CHANNEL_INVALID;
+        case Tag::indexMask:
+            // Index masks do not have pre-defined values.
+            if (const int bits = aidl.get<Tag::indexMask>();
+                __builtin_popcount(bits) != 0 &&
+                __builtin_popcount(bits) <= (int)AUDIO_CHANNEL_COUNT_MAX) {
+                return audio_channel_mask_from_representation_and_bits(
+                        AUDIO_CHANNEL_REPRESENTATION_INDEX, bits);
+            } else {
+                ALOGE("%s: invalid indexMask value 0x%x in %s",
+                        __func__, bits, aidl.toString().c_str());
+                return unexpected(BAD_VALUE);
+            }
+        case Tag::layoutMask:
+            // The fast path is to find a direct match for some known layout mask.
+            if (const auto layoutMatch = convert(aidl, isInput ? mIn : mOut, __func__,
+                    isInput ? "input" : "output");
+                    layoutMatch.ok()) {
+                return layoutMatch;
+            }
+            // If a match for a predefined layout wasn't found, make a custom one from bits.
+            if (audio_channel_mask_t bitMask =
+                    aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+                            aidl.get<Tag::layoutMask>(), isInput);
+                    bitMask != AUDIO_CHANNEL_NONE) {
+                return bitMask;
+            }
+            return unexpected(BAD_VALUE);
+        case Tag::voiceMask:
+            return convert(aidl, mVoice, __func__, "voice");
+    }
+    ALOGE("%s: unexpected tag value %d", __func__, aidl.getTag());
+    return unexpected(BAD_VALUE);
+}
+
+int legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
+        audio_channel_mask_t legacy, bool isInput) {
+    auto& bitMapping = isInput ? getInAudioChannelBits() : getOutAudioChannelBits();
+    const int legacyInitial = legacy; // for error message
+    int aidlLayout = 0;
+    for (const auto& bitPair : bitMapping) {
+        if ((legacy & bitPair.first) == bitPair.first) {
+            aidlLayout |= bitPair.second;
+            legacy = static_cast<audio_channel_mask_t>(legacy & ~bitPair.first);
+            if (legacy == 0) {
+                return aidlLayout;
+            }
+        }
+    }
+    ALOGE("%s: legacy %s audio_channel_mask_t 0x%x contains unrecognized bits 0x%x",
+            __func__, isInput ? "input" : "output", legacyInitial, legacy);
+    return 0;
+}
+
+ConversionResult<AudioChannelLayout> legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+        audio_channel_mask_t legacy, bool isInput) {
+    using DirectMap = std::map<audio_channel_mask_t, AudioChannelLayout>;
+    using Tag = AudioChannelLayout::Tag;
+    static const DirectMap mInAndVoice = make_DirectMap(
+            getInAudioChannelPairs(), getVoiceAudioChannelPairs());
+    static const DirectMap mOut = make_DirectMap(getOutAudioChannelPairs());
+
+    auto convert = [](const audio_channel_mask_t legacy, const DirectMap& m,
+            const char* func, const char* type) -> ConversionResult<AudioChannelLayout> {
+        if (auto it = m.find(legacy); it != m.end()) {
+            return it->second;
+        } else {
+            ALOGW("%s: no AudioChannelLayout found for legacy %s audio_channel_mask_t value 0x%x",
+                    func, type, legacy);
+            return unexpected(BAD_VALUE);
+        }
+    };
+
+    if (legacy == AUDIO_CHANNEL_NONE) {
+        return AudioChannelLayout{};
+    } else if (legacy == AUDIO_CHANNEL_INVALID) {
+        return AudioChannelLayout::make<Tag::invalid>(0);
+    }
+
+    const audio_channel_representation_t repr = audio_channel_mask_get_representation(legacy);
+    if (repr == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+        if (audio_channel_mask_is_valid(legacy)) {
+            const int indexMask = VALUE_OR_RETURN(
+                    convertIntegral<int>(audio_channel_mask_get_bits(legacy)));
+            return AudioChannelLayout::make<Tag::indexMask>(indexMask);
+        } else {
+            ALOGE("%s: legacy audio_channel_mask_t value 0x%x is invalid", __func__, legacy);
+            return unexpected(BAD_VALUE);
+        }
+    } else if (repr == AUDIO_CHANNEL_REPRESENTATION_POSITION) {
+        // The fast path is to find a direct match for some known layout mask.
+        if (const auto layoutMatch = convert(legacy, isInput ? mInAndVoice : mOut, __func__,
+                isInput ? "input / voice" : "output");
+                layoutMatch.ok()) {
+            return layoutMatch;
+        }
+        // If a match for a predefined layout wasn't found, make a custom one from bits,
+        // rejecting those with voice channel bits.
+        if (!isInput ||
+                (legacy & (AUDIO_CHANNEL_IN_VOICE_UPLINK | AUDIO_CHANNEL_IN_VOICE_DNLINK)) == 0) {
+            if (int bitMaskLayout =
+                    legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
+                            legacy, isInput);
+                    bitMaskLayout != 0) {
+                return AudioChannelLayout::make<Tag::layoutMask>(bitMaskLayout);
+            }
+        } else {
+            ALOGE("%s: legacy audio_channel_mask_t value 0x%x contains voice bits",
+                    __func__, legacy);
+        }
+        return unexpected(BAD_VALUE);
+    }
+
+    ALOGE("%s: unknown representation %d in audio_channel_mask_t value 0x%x",
+            __func__, repr, legacy);
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
+        const AudioDeviceDescription& aidl) {
+    static const std::map<AudioDeviceDescription, audio_devices_t> m =
+            make_ReverseMap(getAudioDevicePairs());
+    if (auto it = m.find(aidl); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no legacy audio_devices_t found for %s", __func__, aidl.toString().c_str());
+        return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<AudioDeviceDescription> legacy2aidl_audio_devices_t_AudioDeviceDescription(
+        audio_devices_t legacy) {
+    static const std::map<audio_devices_t, AudioDeviceDescription> m =
+            make_DirectMap(getAudioDevicePairs());
+    if (auto it = m.find(legacy); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no AudioDeviceDescription found for legacy audio_devices_t value 0x%x",
+                __func__, legacy);
+        return unexpected(BAD_VALUE);
+    }
+}
+
+AudioDeviceAddress::Tag suggestDeviceAddressTag(const AudioDeviceDescription& description) {
+    using Tag = AudioDeviceAddress::Tag;
+    if (std::string connection = description.connection;
+            connection == GET_DEVICE_DESC_CONNECTION(BT_A2DP) ||
+            // Note: BT LE Broadcast uses a "group id".
+            (description.type != AudioDeviceType::OUT_BROADCAST &&
+                    connection == GET_DEVICE_DESC_CONNECTION(BT_LE)) ||
+            connection == GET_DEVICE_DESC_CONNECTION(BT_SCO) ||
+            connection == GET_DEVICE_DESC_CONNECTION(WIRELESS)) {
+        return Tag::mac;
+    } else if (connection == GET_DEVICE_DESC_CONNECTION(IP_V4)) {
+        return Tag::ipv4;
+    } else if (connection == GET_DEVICE_DESC_CONNECTION(USB)) {
+        return Tag::alsa;
+    }
+    return Tag::id;
+}
+
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const AudioDevice& aidl,
+        audio_devices_t* legacyType, char* legacyAddress) {
+    std::string stringAddress;
+    RETURN_STATUS_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl, legacyType, &stringAddress));
+    return aidl2legacy_string(stringAddress, legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN);
+}
+
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const AudioDevice& aidl,
+        audio_devices_t* legacyType, String8* legacyAddress) {
+    std::string stringAddress;
+    RETURN_STATUS_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl, legacyType, &stringAddress));
+    *legacyAddress = VALUE_OR_RETURN_STATUS(aidl2legacy_string_view_String8(stringAddress));
+    return OK;
+}
+
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const AudioDevice& aidl,
+        audio_devices_t* legacyType, std::string* legacyAddress) {
+    using Tag = AudioDeviceAddress::Tag;
+    *legacyType = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
+    char addressBuffer[AUDIO_DEVICE_MAX_ADDRESS_LEN]{};
+    // 'aidl.address' can be empty even when the connection type is not.
+    // This happens for device ports that act as "blueprints". In this case
+    // we pass an empty string using the 'id' variant.
+    switch (aidl.address.getTag()) {
+        case Tag::mac: {
+            const std::vector<uint8_t>& mac = aidl.address.get<AudioDeviceAddress::mac>();
+            if (mac.size() != 6) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
+                    mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+        } break;
+        case Tag::ipv4: {
+            const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
+            if (ipv4.size() != 4) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%u.%u.%u.%u",
+                    ipv4[0], ipv4[1], ipv4[2], ipv4[3]);
+        } break;
+        case Tag::ipv6: {
+            const std::vector<int32_t>& ipv6 = aidl.address.get<AudioDeviceAddress::ipv6>();
+            if (ipv6.size() != 8) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
+                    "%04X:%04X:%04X:%04X:%04X:%04X:%04X:%04X",
+                    ipv6[0], ipv6[1], ipv6[2], ipv6[3], ipv6[4], ipv6[5], ipv6[6], ipv6[7]);
+        } break;
+        case Tag::alsa: {
+            const std::vector<int32_t>& alsa = aidl.address.get<AudioDeviceAddress::alsa>();
+            if (alsa.size() != 2) return BAD_VALUE;
+            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "card=%d;device=%d",
+                    alsa[0], alsa[1]);
+        } break;
+        case Tag::id: {
+            RETURN_STATUS_IF_ERROR(aidl2legacy_string(aidl.address.get<AudioDeviceAddress::id>(),
+                            addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+        } break;
+    }
+    *legacyAddress = addressBuffer;
+    return OK;
+}
+
+ConversionResult<AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const char* legacyAddress) {
+    const std::string stringAddress = VALUE_OR_RETURN(
+            legacy2aidl_string(legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+    return legacy2aidl_audio_device_AudioDevice(legacyType, stringAddress);
+}
+
+ConversionResult<AudioDevice>
+legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const String8& legacyAddress) {
+    const std::string stringAddress = VALUE_OR_RETURN(legacy2aidl_String8_string(legacyAddress));
+    return legacy2aidl_audio_device_AudioDevice(legacyType, stringAddress);
+}
+
+ConversionResult<AudioDevice>
+legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const std::string& legacyAddress) {
+    using Tag = AudioDeviceAddress::Tag;
+    AudioDevice aidl;
+    aidl.type = VALUE_OR_RETURN(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
+    // 'legacyAddress' can be empty even when the connection type is not.
+    // This happens for device ports that act as "blueprints". In this case
+    // we pass an empty string using the 'id' variant.
+    if (!legacyAddress.empty()) {
+        switch (suggestDeviceAddressTag(aidl.type)) {
+            case Tag::mac: {
+                std::vector<uint8_t> mac(6);
+                int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+                        &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
+                if (status != mac.size()) {
+                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::mac>(std::move(mac));
+            } break;
+            case Tag::ipv4: {
+                std::vector<uint8_t> ipv4(4);
+                int status = sscanf(legacyAddress.c_str(), "%hhu.%hhu.%hhu.%hhu",
+                        &ipv4[0], &ipv4[1], &ipv4[2], &ipv4[3]);
+                if (status != ipv4.size()) {
+                    ALOGE("%s: malformed IPv4 address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::ipv4>(std::move(ipv4));
+            } break;
+            case Tag::ipv6: {
+                std::vector<int32_t> ipv6(8);
+                int status = sscanf(legacyAddress.c_str(), "%X:%X:%X:%X:%X:%X:%X:%X",
+                        &ipv6[0], &ipv6[1], &ipv6[2], &ipv6[3], &ipv6[4], &ipv6[5], &ipv6[6],
+                        &ipv6[7]);
+                if (status != ipv6.size()) {
+                    ALOGE("%s: malformed IPv6 address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::ipv6>(std::move(ipv6));
+            } break;
+            case Tag::alsa: {
+                std::vector<int32_t> alsa(2);
+                int status = sscanf(legacyAddress.c_str(), "card=%d;device=%d", &alsa[0], &alsa[1]);
+                if (status != alsa.size()) {
+                    ALOGE("%s: malformed ALSA address: \"%s\"", __func__, legacyAddress.c_str());
+                    return unexpected(BAD_VALUE);
+                }
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::alsa>(std::move(alsa));
+            } break;
+            case Tag::id: {
+                aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(legacyAddress);
+            } break;
+        }
+    } else {
+        aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(legacyAddress);
+    }
+    return aidl;
+}
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
+        const AudioFormatDescription& aidl) {
+    static const std::map<AudioFormatDescription, audio_format_t> m =
+            make_ReverseMap(getAudioFormatPairs());
+    if (auto it = m.find(aidl); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no legacy audio_format_t found for %s", __func__, aidl.toString().c_str());
+        return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<AudioFormatDescription> legacy2aidl_audio_format_t_AudioFormatDescription(
+        audio_format_t legacy) {
+    static const std::map<audio_format_t, AudioFormatDescription> m =
+            make_DirectMap(getAudioFormatPairs());
+    if (auto it = m.find(legacy); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no AudioFormatDescription found for legacy audio_format_t value 0x%x",
+                __func__, legacy);
+        return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_AudioGainMode_audio_gain_mode_t(
+        AudioGainMode aidl) {
+    switch (aidl) {
+        case AudioGainMode::JOINT:
+            return AUDIO_GAIN_MODE_JOINT;
+        case AudioGainMode::CHANNELS:
+            return AUDIO_GAIN_MODE_CHANNELS;
+        case AudioGainMode::RAMP:
+            return AUDIO_GAIN_MODE_RAMP;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioGainMode> legacy2aidl_audio_gain_mode_t_AudioGainMode(
+        audio_gain_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_GAIN_MODE_JOINT:
+            return AudioGainMode::JOINT;
+        case AUDIO_GAIN_MODE_CHANNELS:
+            return AudioGainMode::CHANNELS;
+        case AUDIO_GAIN_MODE_RAMP:
+            return AudioGainMode::RAMP;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl) {
+    return convertBitmask<audio_gain_mode_t, int32_t, audio_gain_mode_t, AudioGainMode>(
+            aidl, aidl2legacy_AudioGainMode_audio_gain_mode_t,
+            // AudioGainMode is index-based.
+            indexToEnum_index<AudioGainMode>,
+            // AUDIO_GAIN_MODE_* constants are mask-based.
+            enumToMask_bitmask<audio_gain_mode_t, audio_gain_mode_t>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy) {
+    return convertBitmask<int32_t, audio_gain_mode_t, AudioGainMode, audio_gain_mode_t>(
+            legacy, legacy2aidl_audio_gain_mode_t_AudioGainMode,
+            // AUDIO_GAIN_MODE_* constants are mask-based.
+            indexToEnum_bitmask<audio_gain_mode_t>,
+            // AudioGainMode is index-based.
+            enumToMask_index<int32_t, AudioGainMode>);
+}
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+        const AudioGainConfig& aidl, bool isInput) {
+    audio_gain_config legacy;
+    legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
+    legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+    const bool isJoint = bitmaskIsSet(aidl.mode, AudioGainMode::JOINT);
+    size_t numValues = isJoint ? 1
+                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
+    if (aidl.values.size() != numValues || aidl.values.size() > std::size(legacy.values)) {
+        return unexpected(BAD_VALUE);
+    }
+    for (size_t i = 0; i < numValues; ++i) {
+        legacy.values[i] = VALUE_OR_RETURN(convertIntegral<int>(aidl.values[i]));
+    }
+    legacy.ramp_duration_ms = VALUE_OR_RETURN(convertIntegral<int>(aidl.rampDurationMs));
+    return legacy;
+}
+
+ConversionResult<AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+        const audio_gain_config& legacy, bool isInput) {
+    AudioGainConfig aidl;
+    aidl.index = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.index));
+    aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
+    const bool isJoint = (legacy.mode & AUDIO_GAIN_MODE_JOINT) != 0;
+    size_t numValues = isJoint ? 1
+                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
+    aidl.values.resize(numValues);
+    for (size_t i = 0; i < numValues; ++i) {
+        aidl.values[i] = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.values[i]));
+    }
+    aidl.rampDurationMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.ramp_duration_ms));
+    return aidl;
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+        AudioInputFlags aidl) {
+    switch (aidl) {
+        case AudioInputFlags::FAST:
+            return AUDIO_INPUT_FLAG_FAST;
+        case AudioInputFlags::HW_HOTWORD:
+            return AUDIO_INPUT_FLAG_HW_HOTWORD;
+        case AudioInputFlags::RAW:
+            return AUDIO_INPUT_FLAG_RAW;
+        case AudioInputFlags::SYNC:
+            return AUDIO_INPUT_FLAG_SYNC;
+        case AudioInputFlags::MMAP_NOIRQ:
+            return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
+        case AudioInputFlags::VOIP_TX:
+            return AUDIO_INPUT_FLAG_VOIP_TX;
+        case AudioInputFlags::HW_AV_SYNC:
+            return AUDIO_INPUT_FLAG_HW_AV_SYNC;
+        case AudioInputFlags::DIRECT:
+            return AUDIO_INPUT_FLAG_DIRECT;
+        case AudioInputFlags::ULTRASOUND:
+            return AUDIO_INPUT_FLAG_ULTRASOUND;
+        case AudioInputFlags::HOTWORD_TAP:
+            return AUDIO_INPUT_FLAG_HOTWORD_TAP;
+        case AudioInputFlags::HW_LOOKBACK:
+            return AUDIO_INPUT_FLAG_HW_LOOKBACK;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+        audio_input_flags_t legacy) {
+    switch (legacy) {
+        case AUDIO_INPUT_FLAG_NONE:
+            break; // shouldn't get here. must be listed  -Werror,-Wswitch
+        case AUDIO_INPUT_FLAG_FAST:
+            return AudioInputFlags::FAST;
+        case AUDIO_INPUT_FLAG_HW_HOTWORD:
+            return AudioInputFlags::HW_HOTWORD;
+        case AUDIO_INPUT_FLAG_RAW:
+            return AudioInputFlags::RAW;
+        case AUDIO_INPUT_FLAG_SYNC:
+            return AudioInputFlags::SYNC;
+        case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
+            return AudioInputFlags::MMAP_NOIRQ;
+        case AUDIO_INPUT_FLAG_VOIP_TX:
+            return AudioInputFlags::VOIP_TX;
+        case AUDIO_INPUT_FLAG_HW_AV_SYNC:
+            return AudioInputFlags::HW_AV_SYNC;
+        case AUDIO_INPUT_FLAG_DIRECT:
+            return AudioInputFlags::DIRECT;
+        case AUDIO_INPUT_FLAG_ULTRASOUND:
+            return AudioInputFlags::ULTRASOUND;
+        case AUDIO_INPUT_FLAG_HOTWORD_TAP:
+            return AudioInputFlags::HOTWORD_TAP;
+        case AUDIO_INPUT_FLAG_HW_LOOKBACK:
+            return AudioInputFlags::HW_LOOKBACK;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+        AudioOutputFlags aidl) {
+    switch (aidl) {
+        case AudioOutputFlags::DIRECT:
+            return AUDIO_OUTPUT_FLAG_DIRECT;
+        case AudioOutputFlags::PRIMARY:
+            return AUDIO_OUTPUT_FLAG_PRIMARY;
+        case AudioOutputFlags::FAST:
+            return AUDIO_OUTPUT_FLAG_FAST;
+        case AudioOutputFlags::DEEP_BUFFER:
+            return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+        case AudioOutputFlags::COMPRESS_OFFLOAD:
+            return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+        case AudioOutputFlags::NON_BLOCKING:
+            return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
+        case AudioOutputFlags::HW_AV_SYNC:
+            return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
+        case AudioOutputFlags::TTS:
+            return AUDIO_OUTPUT_FLAG_TTS;
+        case AudioOutputFlags::RAW:
+            return AUDIO_OUTPUT_FLAG_RAW;
+        case AudioOutputFlags::SYNC:
+            return AUDIO_OUTPUT_FLAG_SYNC;
+        case AudioOutputFlags::IEC958_NONAUDIO:
+            return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+        case AudioOutputFlags::DIRECT_PCM:
+            return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
+        case AudioOutputFlags::MMAP_NOIRQ:
+            return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+        case AudioOutputFlags::VOIP_RX:
+            return AUDIO_OUTPUT_FLAG_VOIP_RX;
+        case AudioOutputFlags::INCALL_MUSIC:
+            return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
+        case AudioOutputFlags::GAPLESS_OFFLOAD:
+            return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
+        case AudioOutputFlags::ULTRASOUND:
+            return AUDIO_OUTPUT_FLAG_ULTRASOUND;
+        case AudioOutputFlags::SPATIALIZER:
+            return AUDIO_OUTPUT_FLAG_SPATIALIZER;
+        case AudioOutputFlags::BIT_PERFECT:
+            return AUDIO_OUTPUT_FLAG_BIT_PERFECT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+        audio_output_flags_t legacy) {
+    switch (legacy) {
+        case AUDIO_OUTPUT_FLAG_NONE:
+            break; // shouldn't get here. must be listed  -Werror,-Wswitch
+        case AUDIO_OUTPUT_FLAG_DIRECT:
+            return AudioOutputFlags::DIRECT;
+        case AUDIO_OUTPUT_FLAG_PRIMARY:
+            return AudioOutputFlags::PRIMARY;
+        case AUDIO_OUTPUT_FLAG_FAST:
+            return AudioOutputFlags::FAST;
+        case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
+            return AudioOutputFlags::DEEP_BUFFER;
+        case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
+            return AudioOutputFlags::COMPRESS_OFFLOAD;
+        case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
+            return AudioOutputFlags::NON_BLOCKING;
+        case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
+            return AudioOutputFlags::HW_AV_SYNC;
+        case AUDIO_OUTPUT_FLAG_TTS:
+            return AudioOutputFlags::TTS;
+        case AUDIO_OUTPUT_FLAG_RAW:
+            return AudioOutputFlags::RAW;
+        case AUDIO_OUTPUT_FLAG_SYNC:
+            return AudioOutputFlags::SYNC;
+        case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
+            return AudioOutputFlags::IEC958_NONAUDIO;
+        case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
+            return AudioOutputFlags::DIRECT_PCM;
+        case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
+            return AudioOutputFlags::MMAP_NOIRQ;
+        case AUDIO_OUTPUT_FLAG_VOIP_RX:
+            return AudioOutputFlags::VOIP_RX;
+        case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
+            return AudioOutputFlags::INCALL_MUSIC;
+        case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
+            return AudioOutputFlags::GAPLESS_OFFLOAD;
+        case AUDIO_OUTPUT_FLAG_ULTRASOUND:
+            return AudioOutputFlags::ULTRASOUND;
+        case AUDIO_OUTPUT_FLAG_SPATIALIZER:
+            return AudioOutputFlags::SPATIALIZER;
+        case AUDIO_OUTPUT_FLAG_BIT_PERFECT:
+            return AudioOutputFlags::BIT_PERFECT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+        int32_t aidl) {
+    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+    LegacyMask converted = VALUE_OR_RETURN(
+            (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, AudioInputFlags>(
+                    aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
+                    indexToEnum_index<AudioInputFlags>,
+                    enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
+    return static_cast<audio_input_flags_t>(converted);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+        audio_input_flags_t legacy) {
+    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+    return convertBitmask<int32_t, LegacyMask, AudioInputFlags, audio_input_flags_t>(
+            legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
+            indexToEnum_bitmask<audio_input_flags_t>,
+            enumToMask_index<int32_t, AudioInputFlags>);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+        int32_t aidl) {
+    return convertBitmask<audio_output_flags_t,
+            int32_t,
+            audio_output_flags_t,
+            AudioOutputFlags>(
+            aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
+            indexToEnum_index<AudioOutputFlags>,
+            enumToMask_bitmask<audio_output_flags_t, audio_output_flags_t>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+        audio_output_flags_t legacy) {
+    using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
+
+    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+    return convertBitmask<int32_t, LegacyMask, AudioOutputFlags, audio_output_flags_t>(
+            legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
+            indexToEnum_bitmask<audio_output_flags_t>,
+            enumToMask_index<int32_t, AudioOutputFlags>);
+}
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+        const AudioIoFlags& aidl, bool isInput) {
+    audio_io_flags legacy;
+    if (isInput) {
+        legacy.input = VALUE_OR_RETURN(
+                aidl2legacy_int32_t_audio_input_flags_t_mask(
+                        VALUE_OR_RETURN(UNION_GET(aidl, input))));
+    } else {
+        legacy.output = VALUE_OR_RETURN(
+                aidl2legacy_int32_t_audio_output_flags_t_mask(
+                        VALUE_OR_RETURN(UNION_GET(aidl, output))));
+    }
+    return legacy;
+}
+
+ConversionResult<AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, bool isInput) {
+    AudioIoFlags aidl;
+    if (isInput) {
+        UNION_SET(aidl, input,
+                VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(legacy.input)));
+    } else {
+        UNION_SET(aidl, output,
+                VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(legacy.output)));
+    }
+    return aidl;
+}
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+        AudioStreamType aidl) {
+    switch (aidl) {
+        case AudioStreamType::INVALID:
+            break;  // return error
+        case AudioStreamType::SYS_RESERVED_DEFAULT:
+            return AUDIO_STREAM_DEFAULT;
+        case AudioStreamType::VOICE_CALL:
+            return AUDIO_STREAM_VOICE_CALL;
+        case AudioStreamType::SYSTEM:
+            return AUDIO_STREAM_SYSTEM;
+        case AudioStreamType::RING:
+            return AUDIO_STREAM_RING;
+        case AudioStreamType::MUSIC:
+            return AUDIO_STREAM_MUSIC;
+        case AudioStreamType::ALARM:
+            return AUDIO_STREAM_ALARM;
+        case AudioStreamType::NOTIFICATION:
+            return AUDIO_STREAM_NOTIFICATION;
+        case AudioStreamType::BLUETOOTH_SCO:
+            return AUDIO_STREAM_BLUETOOTH_SCO;
+        case AudioStreamType::ENFORCED_AUDIBLE:
+            return AUDIO_STREAM_ENFORCED_AUDIBLE;
+        case AudioStreamType::DTMF:
+            return AUDIO_STREAM_DTMF;
+        case AudioStreamType::TTS:
+            return AUDIO_STREAM_TTS;
+        case AudioStreamType::ACCESSIBILITY:
+            return AUDIO_STREAM_ACCESSIBILITY;
+        case AudioStreamType::ASSISTANT:
+            return AUDIO_STREAM_ASSISTANT;
+        case AudioStreamType::SYS_RESERVED_REROUTING:
+            return AUDIO_STREAM_REROUTING;
+        case AudioStreamType::SYS_RESERVED_PATCH:
+            return AUDIO_STREAM_PATCH;
+        case AudioStreamType::CALL_ASSISTANT:
+            return AUDIO_STREAM_CALL_ASSISTANT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+        audio_stream_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_STREAM_DEFAULT:
+            return AudioStreamType::SYS_RESERVED_DEFAULT;
+        case AUDIO_STREAM_VOICE_CALL:
+            return AudioStreamType::VOICE_CALL;
+        case AUDIO_STREAM_SYSTEM:
+            return AudioStreamType::SYSTEM;
+        case AUDIO_STREAM_RING:
+            return AudioStreamType::RING;
+        case AUDIO_STREAM_MUSIC:
+            return AudioStreamType::MUSIC;
+        case AUDIO_STREAM_ALARM:
+            return AudioStreamType::ALARM;
+        case AUDIO_STREAM_NOTIFICATION:
+            return AudioStreamType::NOTIFICATION;
+        case AUDIO_STREAM_BLUETOOTH_SCO:
+            return AudioStreamType::BLUETOOTH_SCO;
+        case AUDIO_STREAM_ENFORCED_AUDIBLE:
+            return AudioStreamType::ENFORCED_AUDIBLE;
+        case AUDIO_STREAM_DTMF:
+            return AudioStreamType::DTMF;
+        case AUDIO_STREAM_TTS:
+            return AudioStreamType::TTS;
+        case AUDIO_STREAM_ACCESSIBILITY:
+            return AudioStreamType::ACCESSIBILITY;
+        case AUDIO_STREAM_ASSISTANT:
+            return AudioStreamType::ASSISTANT;
+        case AUDIO_STREAM_REROUTING:
+            return AudioStreamType::SYS_RESERVED_REROUTING;
+        case AUDIO_STREAM_PATCH:
+            return AudioStreamType::SYS_RESERVED_PATCH;
+        case AUDIO_STREAM_CALL_ASSISTANT:
+            return AudioStreamType::CALL_ASSISTANT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
+        AudioSource aidl) {
+    switch (aidl) {
+        case AudioSource::SYS_RESERVED_INVALID:
+            return AUDIO_SOURCE_INVALID;
+        case AudioSource::DEFAULT:
+            return AUDIO_SOURCE_DEFAULT;
+        case AudioSource::MIC:
+            return AUDIO_SOURCE_MIC;
+        case AudioSource::VOICE_UPLINK:
+            return AUDIO_SOURCE_VOICE_UPLINK;
+        case AudioSource::VOICE_DOWNLINK:
+            return AUDIO_SOURCE_VOICE_DOWNLINK;
+        case AudioSource::VOICE_CALL:
+            return AUDIO_SOURCE_VOICE_CALL;
+        case AudioSource::CAMCORDER:
+            return AUDIO_SOURCE_CAMCORDER;
+        case AudioSource::VOICE_RECOGNITION:
+            return AUDIO_SOURCE_VOICE_RECOGNITION;
+        case AudioSource::VOICE_COMMUNICATION:
+            return AUDIO_SOURCE_VOICE_COMMUNICATION;
+        case AudioSource::REMOTE_SUBMIX:
+            return AUDIO_SOURCE_REMOTE_SUBMIX;
+        case AudioSource::UNPROCESSED:
+            return AUDIO_SOURCE_UNPROCESSED;
+        case AudioSource::VOICE_PERFORMANCE:
+            return AUDIO_SOURCE_VOICE_PERFORMANCE;
+        case AudioSource::ULTRASOUND:
+            return AUDIO_SOURCE_ULTRASOUND;
+        case AudioSource::ECHO_REFERENCE:
+            return AUDIO_SOURCE_ECHO_REFERENCE;
+        case AudioSource::FM_TUNER:
+            return AUDIO_SOURCE_FM_TUNER;
+        case AudioSource::HOTWORD:
+            return AUDIO_SOURCE_HOTWORD;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioSource> legacy2aidl_audio_source_t_AudioSource(
+        audio_source_t legacy) {
+    switch (legacy) {
+        case AUDIO_SOURCE_INVALID:
+            return AudioSource::SYS_RESERVED_INVALID;
+        case AUDIO_SOURCE_DEFAULT:
+            return AudioSource::DEFAULT;
+        case AUDIO_SOURCE_MIC:
+            return AudioSource::MIC;
+        case AUDIO_SOURCE_VOICE_UPLINK:
+            return AudioSource::VOICE_UPLINK;
+        case AUDIO_SOURCE_VOICE_DOWNLINK:
+            return AudioSource::VOICE_DOWNLINK;
+        case AUDIO_SOURCE_VOICE_CALL:
+            return AudioSource::VOICE_CALL;
+        case AUDIO_SOURCE_CAMCORDER:
+            return AudioSource::CAMCORDER;
+        case AUDIO_SOURCE_VOICE_RECOGNITION:
+            return AudioSource::VOICE_RECOGNITION;
+        case AUDIO_SOURCE_VOICE_COMMUNICATION:
+            return AudioSource::VOICE_COMMUNICATION;
+        case AUDIO_SOURCE_REMOTE_SUBMIX:
+            return AudioSource::REMOTE_SUBMIX;
+        case AUDIO_SOURCE_UNPROCESSED:
+            return AudioSource::UNPROCESSED;
+        case AUDIO_SOURCE_VOICE_PERFORMANCE:
+            return AudioSource::VOICE_PERFORMANCE;
+        case AUDIO_SOURCE_ULTRASOUND:
+            return AudioSource::ULTRASOUND;
+        case AUDIO_SOURCE_ECHO_REFERENCE:
+            return AudioSource::ECHO_REFERENCE;
+        case AUDIO_SOURCE_FM_TUNER:
+            return AudioSource::FM_TUNER;
+        case AUDIO_SOURCE_HOTWORD:
+            return AudioSource::HOTWORD;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl) {
+    return convertReinterpret<audio_session_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(AudioContentType aidl) {
+    switch (aidl) {
+        case AudioContentType::UNKNOWN:
+            return AUDIO_CONTENT_TYPE_UNKNOWN;
+        case AudioContentType::SPEECH:
+            return AUDIO_CONTENT_TYPE_SPEECH;
+        case AudioContentType::MUSIC:
+            return AUDIO_CONTENT_TYPE_MUSIC;
+        case AudioContentType::MOVIE:
+            return AUDIO_CONTENT_TYPE_MOVIE;
+        case AudioContentType::SONIFICATION:
+            return AUDIO_CONTENT_TYPE_SONIFICATION;
+        case AudioContentType::ULTRASOUND:
+            return AUDIO_CONTENT_TYPE_ULTRASOUND;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_CONTENT_TYPE_UNKNOWN:
+            return AudioContentType::UNKNOWN;
+        case AUDIO_CONTENT_TYPE_SPEECH:
+            return AudioContentType::SPEECH;
+        case AUDIO_CONTENT_TYPE_MUSIC:
+            return AudioContentType::MUSIC;
+        case AUDIO_CONTENT_TYPE_MOVIE:
+            return AudioContentType::MOVIE;
+        case AUDIO_CONTENT_TYPE_SONIFICATION:
+            return AudioContentType::SONIFICATION;
+        case AUDIO_CONTENT_TYPE_ULTRASOUND:
+            return AudioContentType::ULTRASOUND;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_usage_t>
+aidl2legacy_AudioUsage_audio_usage_t(AudioUsage aidl) {
+    switch (aidl) {
+        case AudioUsage::INVALID:
+            break;  // return error
+        case AudioUsage::UNKNOWN:
+            return AUDIO_USAGE_UNKNOWN;
+        case AudioUsage::MEDIA:
+            return AUDIO_USAGE_MEDIA;
+        case AudioUsage::VOICE_COMMUNICATION:
+            return AUDIO_USAGE_VOICE_COMMUNICATION;
+        case AudioUsage::VOICE_COMMUNICATION_SIGNALLING:
+            return AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+        case AudioUsage::ALARM:
+            return AUDIO_USAGE_ALARM;
+        case AudioUsage::NOTIFICATION:
+            return AUDIO_USAGE_NOTIFICATION;
+        case AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE:
+            return AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST;
+        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT;
+        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED:
+            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED;
+        case AudioUsage::NOTIFICATION_EVENT:
+            return AUDIO_USAGE_NOTIFICATION_EVENT;
+        case AudioUsage::ASSISTANCE_ACCESSIBILITY:
+            return AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+        case AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE:
+            return AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
+        case AudioUsage::ASSISTANCE_SONIFICATION:
+            return AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+        case AudioUsage::GAME:
+            return AUDIO_USAGE_GAME;
+        case AudioUsage::VIRTUAL_SOURCE:
+            return AUDIO_USAGE_VIRTUAL_SOURCE;
+        case AudioUsage::ASSISTANT:
+            return AUDIO_USAGE_ASSISTANT;
+        case AudioUsage::CALL_ASSISTANT:
+            return AUDIO_USAGE_CALL_ASSISTANT;
+        case AudioUsage::EMERGENCY:
+            return AUDIO_USAGE_EMERGENCY;
+        case AudioUsage::SAFETY:
+            return AUDIO_USAGE_SAFETY;
+        case AudioUsage::VEHICLE_STATUS:
+            return AUDIO_USAGE_VEHICLE_STATUS;
+        case AudioUsage::ANNOUNCEMENT:
+            return AUDIO_USAGE_ANNOUNCEMENT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioUsage>
+legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy) {
+    switch (legacy) {
+        case AUDIO_USAGE_UNKNOWN:
+            return AudioUsage::UNKNOWN;
+        case AUDIO_USAGE_MEDIA:
+            return AudioUsage::MEDIA;
+        case AUDIO_USAGE_VOICE_COMMUNICATION:
+            return AudioUsage::VOICE_COMMUNICATION;
+        case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+            return AudioUsage::VOICE_COMMUNICATION_SIGNALLING;
+        case AUDIO_USAGE_ALARM:
+            return AudioUsage::ALARM;
+        case AUDIO_USAGE_NOTIFICATION:
+            return AudioUsage::NOTIFICATION;
+        case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
+            return AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT;
+        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED;
+        case AUDIO_USAGE_NOTIFICATION_EVENT:
+            return AudioUsage::NOTIFICATION_EVENT;
+        case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+            return AudioUsage::ASSISTANCE_ACCESSIBILITY;
+        case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+            return AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE;
+        case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
+            return AudioUsage::ASSISTANCE_SONIFICATION;
+        case AUDIO_USAGE_GAME:
+            return AudioUsage::GAME;
+        case AUDIO_USAGE_VIRTUAL_SOURCE:
+            return AudioUsage::VIRTUAL_SOURCE;
+        case AUDIO_USAGE_ASSISTANT:
+            return AudioUsage::ASSISTANT;
+        case AUDIO_USAGE_CALL_ASSISTANT:
+            return AudioUsage::CALL_ASSISTANT;
+        case AUDIO_USAGE_EMERGENCY:
+            return AudioUsage::EMERGENCY;
+        case AUDIO_USAGE_SAFETY:
+            return AudioUsage::SAFETY;
+        case AUDIO_USAGE_VEHICLE_STATUS:
+            return AudioUsage::VEHICLE_STATUS;
+        case AUDIO_USAGE_ANNOUNCEMENT:
+            return AudioUsage::ANNOUNCEMENT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(AudioEncapsulationMode aidl) {
+    switch (aidl) {
+        case AudioEncapsulationMode::INVALID:
+            break;  // return error
+        case AudioEncapsulationMode::NONE:
+            return AUDIO_ENCAPSULATION_MODE_NONE;
+        case AudioEncapsulationMode::ELEMENTARY_STREAM:
+            return AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM;
+        case AudioEncapsulationMode::HANDLE:
+            return AUDIO_ENCAPSULATION_MODE_HANDLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_MODE_NONE:
+            return AudioEncapsulationMode::NONE;
+        case AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM:
+            return AudioEncapsulationMode::ELEMENTARY_STREAM;
+        case AUDIO_ENCAPSULATION_MODE_HANDLE:
+            return AudioEncapsulationMode::HANDLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const AudioOffloadInfo& aidl) {
+    audio_offload_info_t legacy = AUDIO_INFO_INITIALIZER;
+    audio_config_base_t base = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.base, false /*isInput*/));
+    legacy.sample_rate = base.sample_rate;
+    legacy.channel_mask = base.channel_mask;
+    legacy.format = base.format;
+    legacy.stream_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+    legacy.bit_rate = VALUE_OR_RETURN(convertIntegral<int32_t>(aidl.bitRatePerSecond));
+    legacy.duration_us = VALUE_OR_RETURN(convertIntegral<int64_t>(aidl.durationUs));
+    legacy.has_video = aidl.hasVideo;
+    legacy.is_streaming = aidl.isStreaming;
+    legacy.bit_width = VALUE_OR_RETURN(convertIntegral<int32_t>(aidl.bitWidth));
+    legacy.offload_buffer_size = VALUE_OR_RETURN(convertIntegral<int32_t>(aidl.offloadBufferSize));
+    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.encapsulation_mode = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(aidl.encapsulationMode));
+    legacy.content_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.contentId));
+    legacy.sync_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.syncId));
+    return legacy;
+}
+
+ConversionResult<AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy) {
+    AudioOffloadInfo aidl;
+    // Version 0.1 fields.
+    if (legacy.size < offsetof(audio_offload_info_t, usage) + sizeof(audio_offload_info_t::usage)) {
+        return unexpected(BAD_VALUE);
+    }
+    const audio_config_base_t base = { .sample_rate = legacy.sample_rate,
+        .channel_mask = legacy.channel_mask, .format = legacy.format };
+    aidl.base = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(
+                    base, false /*isInput*/));
+    aidl.streamType = VALUE_OR_RETURN(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream_type));
+    aidl.bitRatePerSecond = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_rate));
+    aidl.durationUs = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.duration_us));
+    aidl.hasVideo = legacy.has_video;
+    aidl.isStreaming = legacy.is_streaming;
+    aidl.bitWidth = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_width));
+    aidl.offloadBufferSize = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.offload_buffer_size));
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
+
+    // Version 0.2 fields.
+    if (legacy.version >= AUDIO_OFFLOAD_INFO_VERSION_0_2) {
+        if (legacy.size <
+            offsetof(audio_offload_info_t, sync_id) + sizeof(audio_offload_info_t::sync_id)) {
+            return unexpected(BAD_VALUE);
+        }
+        aidl.encapsulationMode = VALUE_OR_RETURN(
+                legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(
+                        legacy.encapsulation_mode));
+        aidl.contentId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.content_id));
+        aidl.syncId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.sync_id));
+    }
+    return aidl;
+}
+
+ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type) {
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+        case AUDIO_PORT_TYPE_SESSION:
+            break;  // must be listed  -Werror,-Wswitch
+        case AUDIO_PORT_TYPE_DEVICE:
+            switch (role) {
+                case AUDIO_PORT_ROLE_NONE:
+                     break;  // must be listed  -Werror,-Wswitch
+                case AUDIO_PORT_ROLE_SOURCE:
+                    return AudioPortDirection::INPUT;
+                case AUDIO_PORT_ROLE_SINK:
+                    return AudioPortDirection::OUTPUT;
+            }
+            break;
+        case AUDIO_PORT_TYPE_MIX:
+            switch (role) {
+                case AUDIO_PORT_ROLE_NONE:
+                     break;  // must be listed  -Werror,-Wswitch
+                case AUDIO_PORT_ROLE_SOURCE:
+                    return AudioPortDirection::OUTPUT;
+                case AUDIO_PORT_ROLE_SINK:
+                    return AudioPortDirection::INPUT;
+            }
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type) {
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+        case AUDIO_PORT_TYPE_SESSION:
+            break;  // must be listed  -Werror,-Wswitch
+        case AUDIO_PORT_TYPE_DEVICE:
+            switch (direction) {
+                case AudioPortDirection::INPUT:
+                    return AUDIO_PORT_ROLE_SOURCE;
+                case AudioPortDirection::OUTPUT:
+                    return AUDIO_PORT_ROLE_SINK;
+            }
+            break;
+        case AUDIO_PORT_TYPE_MIX:
+            switch (direction) {
+                case AudioPortDirection::OUTPUT:
+                    return AUDIO_PORT_ROLE_SOURCE;
+                case AudioPortDirection::INPUT:
+                    return AUDIO_PORT_ROLE_SINK;
+            }
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const AudioConfig& aidl, bool isInput) {
+    const audio_config_base_t legacyBase = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.base, isInput));
+    audio_config_t legacy = AUDIO_CONFIG_INITIALIZER;
+    legacy.sample_rate = legacyBase.sample_rate;
+    legacy.channel_mask = legacyBase.channel_mask;
+    legacy.format = legacyBase.format;
+    legacy.offload_info = VALUE_OR_RETURN(
+            aidl2legacy_AudioOffloadInfo_audio_offload_info_t(aidl.offloadInfo));
+    legacy.frame_count = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.frameCount));
+    return legacy;
+}
+
+ConversionResult<AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput) {
+    const audio_config_base_t base = { .sample_rate = legacy.sample_rate,
+        .channel_mask = legacy.channel_mask, .format = legacy.format };
+    AudioConfig aidl;
+    aidl.base = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(base, isInput));
+    aidl.offloadInfo = VALUE_OR_RETURN(
+            legacy2aidl_audio_offload_info_t_AudioOffloadInfo(legacy.offload_info));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.frame_count));
+    return aidl;
+}
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(const AudioConfigBase& aidl, bool isInput) {
+    audio_config_base_t legacy;
+    legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<int>(aidl.sampleRate));
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
+    return legacy;
+}
+
+ConversionResult<AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput) {
+    AudioConfigBase aidl;
+    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
+    return aidl;
+}
+
+ConversionResult<audio_uuid_t>
+aidl2legacy_AudioUuid_audio_uuid_t(const AudioUuid& aidl) {
+    audio_uuid_t legacy;
+    legacy.timeLow = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.timeLow));
+    legacy.timeMid = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeMid));
+    legacy.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeHiAndVersion));
+    legacy.clockSeq = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.clockSeq));
+    if (aidl.node.size() != std::size(legacy.node)) {
+        return unexpected(BAD_VALUE);
+    }
+    std::copy(aidl.node.begin(), aidl.node.end(), legacy.node);
+    return legacy;
+}
+
+ConversionResult<AudioUuid>
+legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy) {
+    AudioUuid aidl;
+    aidl.timeLow = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.timeLow));
+    aidl.timeMid = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeMid));
+    aidl.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeHiAndVersion));
+    aidl.clockSeq = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.clockSeq));
+    std::copy(legacy.node, legacy.node + std::size(legacy.node), std::back_inserter(aidl.node));
+    return aidl;
+}
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+        AudioEncapsulationMetadataType aidl) {
+    switch (aidl) {
+        case AudioEncapsulationMetadataType::NONE:
+            return AUDIO_ENCAPSULATION_METADATA_TYPE_NONE;
+        case AudioEncapsulationMetadataType::FRAMEWORK_TUNER:
+            return AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER;
+        case AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR:
+            return AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+        audio_encapsulation_metadata_type_t legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_METADATA_TYPE_NONE:
+            return AudioEncapsulationMetadataType::NONE;
+        case AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER:
+            return AudioEncapsulationMetadataType::FRAMEWORK_TUNER;
+        case AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR:
+            return AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl) {
+    return convertBitmask<uint32_t,
+            int32_t,
+            audio_encapsulation_mode_t,
+            AudioEncapsulationMode>(
+            aidl, aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t,
+            indexToEnum_index<AudioEncapsulationMode>,
+            enumToMask_index<uint32_t, audio_encapsulation_mode_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy) {
+    return convertBitmask<int32_t,
+            uint32_t,
+            AudioEncapsulationMode,
+            audio_encapsulation_mode_t>(
+            legacy, legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode,
+            indexToEnum_index<audio_encapsulation_mode_t>,
+            enumToMask_index<int32_t, AudioEncapsulationMode>);
+}
+
+ConversionResult<uint32_t>
+aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl) {
+    return convertBitmask<uint32_t,
+            int32_t,
+            audio_encapsulation_metadata_type_t,
+            AudioEncapsulationMetadataType>(
+            aidl, aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t,
+            indexToEnum_index<AudioEncapsulationMetadataType>,
+            enumToMask_index<uint32_t, audio_encapsulation_metadata_type_t>);
+}
+
+ConversionResult<int32_t>
+legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy) {
+    return convertBitmask<int32_t,
+            uint32_t,
+            AudioEncapsulationMetadataType,
+            audio_encapsulation_metadata_type_t>(
+            legacy, legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType,
+            indexToEnum_index<audio_encapsulation_metadata_type_t>,
+            enumToMask_index<int32_t, AudioEncapsulationMetadataType>);
+}
+
+ConversionResult<audio_port_config_mix_ext_usecase>
+aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+        const AudioPortMixExtUseCase& aidl, bool isInput) {
+    audio_port_config_mix_ext_usecase legacy{};
+    if (aidl.getTag() != AudioPortMixExtUseCase::Tag::unspecified) {
+        if (!isInput) {
+            legacy.stream = VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
+                            VALUE_OR_RETURN(UNION_GET(aidl, stream))));
+        } else {
+            legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(
+                            VALUE_OR_RETURN(UNION_GET(aidl, source))));
+        }
+    }
+    return legacy;
+}
+
+ConversionResult<AudioPortMixExtUseCase>
+legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+        const audio_port_config_mix_ext_usecase& legacy, bool isInput) {
+    AudioPortMixExtUseCase aidl;
+    if (!isInput) {
+        UNION_SET(aidl, stream, VALUE_OR_RETURN(
+                        legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream)));
+    } else {
+        UNION_SET(aidl, source, VALUE_OR_RETURN(
+                        legacy2aidl_audio_source_t_AudioSource(legacy.source)));
+    }
+    return aidl;
+}
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_config_mix_ext(
+        const AudioPortMixExt& aidl, bool isInput) {
+    audio_port_config_mix_ext legacy{};
+    legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+    legacy.usecase = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+                    aidl.usecase, isInput));
+    return legacy;
+}
+
+ConversionResult<AudioPortMixExt> legacy2aidl_audio_port_config_mix_ext_AudioPortMixExt(
+        const audio_port_config_mix_ext& legacy, bool isInput) {
+    AudioPortMixExt aidl;
+    aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+    aidl.usecase = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+                    legacy.usecase, isInput));
+    return aidl;
+}
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(const AudioPortDeviceExt& aidl) {
+    audio_port_config_device_ext legacy{};
+    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl.device, &legacy.type, legacy.address));
+    return legacy;
+}
+
+ConversionResult<AudioPortDeviceExt> legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+        const audio_port_config_device_ext& legacy) {
+    AudioPortDeviceExt aidl;
+    aidl.device = VALUE_OR_RETURN(
+            legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+    return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_ext = decltype(audio_port_config::ext);
+
+status_t aidl2legacy_AudioPortExt_audio_port_config_ext(
+        const AudioPortExt& aidl, bool isInput,
+        audio_port_config_ext* legacy, audio_port_type_t* type) {
+    switch (aidl.getTag()) {
+        case AudioPortExt::Tag::unspecified:
+            // Just verify that the union is empty.
+            VALUE_OR_RETURN_STATUS(UNION_GET(aidl, unspecified));
+            *legacy = {};
+            *type = AUDIO_PORT_TYPE_NONE;
+            return OK;
+        case AudioPortExt::Tag::device:
+            legacy->device = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+                            VALUE_OR_RETURN_STATUS(UNION_GET(aidl, device))));
+            *type = AUDIO_PORT_TYPE_DEVICE;
+            return OK;
+        case AudioPortExt::Tag::mix:
+            legacy->mix = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioPortMixExt_audio_port_config_mix_ext(
+                            VALUE_OR_RETURN_STATUS(UNION_GET(aidl, mix)), isInput));
+            *type = AUDIO_PORT_TYPE_MIX;
+            return OK;
+        case AudioPortExt::Tag::session:
+            // This variant is not used in the HAL scenario.
+            legacy->session.session = AUDIO_SESSION_NONE;
+            *type = AUDIO_PORT_TYPE_SESSION;
+            return OK;
+
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<AudioPortExt> legacy2aidl_audio_port_config_ext_AudioPortExt(
+        const audio_port_config_ext& legacy, audio_port_type_t type, bool isInput) {
+    AudioPortExt aidl;
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+            UNION_SET(aidl, unspecified, false);
+            return aidl;
+        case AUDIO_PORT_TYPE_DEVICE: {
+            AudioPortDeviceExt device = VALUE_OR_RETURN(
+                    legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(legacy.device));
+            UNION_SET(aidl, device, device);
+            return aidl;
+        }
+        case AUDIO_PORT_TYPE_MIX: {
+            AudioPortMixExt mix = VALUE_OR_RETURN(
+                    legacy2aidl_audio_port_config_mix_ext_AudioPortMixExt(legacy.mix, isInput));
+            UNION_SET(aidl, mix, mix);
+            return aidl;
+        }
+        case AUDIO_PORT_TYPE_SESSION:
+            // This variant is not used in the HAL scenario.
+            UNION_SET(aidl, unspecified, false);
+            return aidl;
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+status_t aidl2legacy_AudioPortConfig_audio_port_config(
+        const AudioPortConfig& aidl, bool isInput, audio_port_config* legacy, int32_t* portId) {
+    legacy->id = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+    *portId = aidl.portId;
+    if (aidl.sampleRate.has_value()) {
+        legacy->sample_rate = VALUE_OR_RETURN_STATUS(
+                convertIntegral<unsigned int>(aidl.sampleRate.value().value));
+        legacy->config_mask |= AUDIO_PORT_CONFIG_SAMPLE_RATE;
+    }
+    if (aidl.channelMask.has_value()) {
+        legacy->channel_mask =
+                VALUE_OR_RETURN_STATUS(
+                        aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                                aidl.channelMask.value(), isInput));
+        legacy->config_mask |= AUDIO_PORT_CONFIG_CHANNEL_MASK;
+    }
+    if (aidl.format.has_value()) {
+        legacy->format = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format.value()));
+        legacy->config_mask |= AUDIO_PORT_CONFIG_FORMAT;
+    }
+    if (aidl.gain.has_value()) {
+        legacy->gain = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioGainConfig_audio_gain_config(
+                        aidl.gain.value(), isInput));
+        legacy->config_mask |= AUDIO_PORT_CONFIG_GAIN;
+    }
+    if (aidl.flags.has_value()) {
+        legacy->flags = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioIoFlags_audio_io_flags(aidl.flags.value(), isInput));
+        legacy->config_mask |= AUDIO_PORT_CONFIG_FLAGS;
+    }
+    RETURN_STATUS_IF_ERROR(aidl2legacy_AudioPortExt_audio_port_config_ext(
+                    aidl.ext, isInput, &legacy->ext, &legacy->type));
+    legacy->role = VALUE_OR_RETURN_STATUS(portRole(isInput ?
+                    AudioPortDirection::INPUT : AudioPortDirection::OUTPUT, legacy->type));
+    return OK;
+}
+
+ConversionResult<AudioPortConfig>
+legacy2aidl_audio_port_config_AudioPortConfig(
+        const audio_port_config& legacy, bool isInput, int32_t portId) {
+    AudioPortConfig aidl;
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    aidl.portId = portId;
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
+        Int aidl_sampleRate;
+        aidl_sampleRate.value = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+        aidl.sampleRate = aidl_sampleRate;
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
+        aidl.channelMask = VALUE_OR_RETURN(
+                legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
+        aidl.format = VALUE_OR_RETURN(
+                legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
+        aidl.gain = VALUE_OR_RETURN(
+                legacy2aidl_audio_gain_config_AudioGainConfig(legacy.gain, isInput));
+    }
+    if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
+        aidl.flags = VALUE_OR_RETURN(
+                legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, isInput));
+    }
+    aidl.ext = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_config_ext_AudioPortExt(legacy.ext, legacy.type, isInput));
+    return aidl;
+}
+
+ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+        const AudioPortMixExt& aidl) {
+    audio_port_mix_ext legacy{};
+    legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+    return legacy;
+}
+
+ConversionResult<AudioPortMixExt> legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+        const audio_port_mix_ext& legacy) {
+    AudioPortMixExt aidl;
+    aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+    return aidl;
+}
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const AudioPortDeviceExt& aidl) {
+    audio_port_device_ext legacy{};
+    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl.device, &legacy.type, legacy.address));
+    legacy.encapsulation_modes = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMode_mask(aidl.encapsulationModes));
+    legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationMetadataType_mask(
+                    aidl.encapsulationMetadataTypes));
+    return legacy;
+}
+
+ConversionResult<AudioPortDeviceExt> legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+        const audio_port_device_ext& legacy) {
+    AudioPortDeviceExt aidl;
+    aidl.device = VALUE_OR_RETURN(
+            legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+    aidl.encapsulationModes = VALUE_OR_RETURN(
+            legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
+    aidl.encapsulationMetadataTypes = VALUE_OR_RETURN(
+            legacy2aidl_AudioEncapsulationMetadataType_mask(legacy.encapsulation_metadata_types));
+    return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_v7_ext = decltype(audio_port_v7::ext);
+
+status_t aidl2legacy_AudioPortExt_audio_port_v7_ext(
+        const AudioPortExt& aidl, audio_port_v7_ext* legacy, audio_port_type_t* type) {
+    switch (aidl.getTag()) {
+        case AudioPortExt::Tag::unspecified:
+            // Just verify that the union is empty.
+            VALUE_OR_RETURN_STATUS(UNION_GET(aidl, unspecified));
+            *legacy = {};
+            *type = AUDIO_PORT_TYPE_NONE;
+            return OK;
+        case AudioPortExt::Tag::device:
+            legacy->device = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+                            VALUE_OR_RETURN_STATUS(UNION_GET(aidl, device))));
+            *type = AUDIO_PORT_TYPE_DEVICE;
+            return OK;
+        case AudioPortExt::Tag::mix:
+            legacy->mix = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+                            VALUE_OR_RETURN_STATUS(UNION_GET(aidl, mix))));
+            *type = AUDIO_PORT_TYPE_MIX;
+            return OK;
+        case AudioPortExt::Tag::session:
+            // This variant is not used in the HAL scenario.
+            legacy->session.session = AUDIO_SESSION_NONE;
+            *type = AUDIO_PORT_TYPE_SESSION;
+            return OK;
+
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<AudioPortExt> legacy2aidl_audio_port_v7_ext_AudioPortExt(
+        const audio_port_v7_ext& legacy, audio_port_type_t type) {
+    AudioPortExt aidl;
+    switch (type) {
+        case AUDIO_PORT_TYPE_NONE:
+            UNION_SET(aidl, unspecified, false);
+            return aidl;
+        case AUDIO_PORT_TYPE_DEVICE: {
+            AudioPortDeviceExt device = VALUE_OR_RETURN(
+                    legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy.device));
+            UNION_SET(aidl, device, device);
+            return aidl;
+        }
+        case AUDIO_PORT_TYPE_MIX: {
+            AudioPortMixExt mix = VALUE_OR_RETURN(
+                    legacy2aidl_audio_port_mix_ext_AudioPortMixExt(legacy.mix));
+            UNION_SET(aidl, mix, mix);
+            return aidl;
+        }
+        case AUDIO_PORT_TYPE_SESSION:
+            // This variant is not used in the HAL scenario.
+            UNION_SET(aidl, unspecified, false);
+            return aidl;
+    }
+    LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(const AudioPort& aidl, bool isInput) {
+    audio_port_v7 legacy;
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.name, legacy.name, sizeof(legacy.name)));
+
+    if (aidl.profiles.size() > std::size(legacy.audio_profiles)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(convertRange(
+                    aidl.profiles.begin(), aidl.profiles.end(), legacy.audio_profiles,
+                    [isInput](const AudioProfile& p) {
+                        return aidl2legacy_AudioProfile_audio_profile(p, isInput);
+                    }));
+    legacy.num_audio_profiles = aidl.profiles.size();
+
+    if (aidl.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(
+                    aidl.extraAudioDescriptors.begin(), aidl.extraAudioDescriptors.end(),
+                    legacy.extra_audio_descriptors,
+                    aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
+    legacy.num_extra_audio_descriptors = aidl.extraAudioDescriptors.size();
+
+    if (aidl.gains.size() > std::size(legacy.gains)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(convertRange(aidl.gains.begin(), aidl.gains.end(), legacy.gains,
+                                 [isInput](const AudioGain& g) {
+                                     return aidl2legacy_AudioGain_audio_gain(g, isInput);
+                                 }));
+    legacy.num_gains = aidl.gains.size();
+
+    RETURN_IF_ERROR(aidl2legacy_AudioPortExt_audio_port_v7_ext(
+                    aidl.ext, &legacy.ext, &legacy.type));
+    legacy.role = VALUE_OR_RETURN(portRole(
+                    isInput ? AudioPortDirection::INPUT : AudioPortDirection::OUTPUT, legacy.type));
+
+    AudioPortConfig aidlPortConfig;
+    int32_t portId;
+    aidlPortConfig.flags = aidl.flags;
+    aidlPortConfig.ext = aidl.ext;
+    RETURN_IF_ERROR(aidl2legacy_AudioPortConfig_audio_port_config(
+                    aidlPortConfig, isInput, &legacy.active_config, &portId));
+    return legacy;
+}
+
+ConversionResult<AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput) {
+    AudioPort aidl;
+    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    aidl.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+
+    if (legacy.num_audio_profiles > std::size(legacy.audio_profiles)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.audio_profiles, legacy.audio_profiles + legacy.num_audio_profiles,
+                         std::back_inserter(aidl.profiles),
+                         [isInput](const audio_profile& p) {
+                             return legacy2aidl_audio_profile_AudioProfile(p, isInput);
+                         }));
+
+    if (legacy.num_extra_audio_descriptors > std::size(legacy.extra_audio_descriptors)) {
+        return unexpected(BAD_VALUE);
+    }
+    aidl.profiles.resize(legacy.num_audio_profiles);
+    RETURN_IF_ERROR(
+            convertRange(legacy.extra_audio_descriptors,
+                    legacy.extra_audio_descriptors + legacy.num_extra_audio_descriptors,
+                    std::back_inserter(aidl.extraAudioDescriptors),
+                    legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor));
+
+    if (legacy.num_gains > std::size(legacy.gains)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.gains, legacy.gains + legacy.num_gains,
+                         std::back_inserter(aidl.gains),
+                         [isInput](const audio_gain& g) {
+                             return legacy2aidl_audio_gain_AudioGain(g, isInput);
+                         }));
+    aidl.gains.resize(legacy.num_gains);
+
+    aidl.ext = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_v7_ext_AudioPortExt(legacy.ext, legacy.type));
+
+    AudioPortConfig aidlPortConfig = VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(
+                    legacy.active_config, isInput, aidl.id));
+    if (aidlPortConfig.flags.has_value()) {
+        aidl.flags = aidlPortConfig.flags.value();
+    } else {
+        aidl.flags = isInput ?
+                AudioIoFlags::make<AudioIoFlags::Tag::input>(0) :
+                AudioIoFlags::make<AudioIoFlags::Tag::output>(0);
+    }
+    return aidl;
+}
+
+ConversionResult<audio_profile>
+aidl2legacy_AudioProfile_audio_profile(const AudioProfile& aidl, bool isInput) {
+    audio_profile legacy;
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
+
+    if (aidl.sampleRates.size() > std::size(legacy.sample_rates)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(aidl.sampleRates.begin(), aidl.sampleRates.end(), legacy.sample_rates,
+                         convertIntegral<int32_t, unsigned int>));
+    legacy.num_sample_rates = aidl.sampleRates.size();
+
+    if (aidl.channelMasks.size() > std::size(legacy.channel_masks)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(aidl.channelMasks.begin(), aidl.channelMasks.end(), legacy.channel_masks,
+                    [isInput](const AudioChannelLayout& l) {
+                        return aidl2legacy_AudioChannelLayout_audio_channel_mask_t(l, isInput);
+                    }));
+    legacy.num_channel_masks = aidl.channelMasks.size();
+
+    legacy.encapsulation_type = VALUE_OR_RETURN(
+            aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(aidl.encapsulationType));
+    return legacy;
+}
+
+ConversionResult<AudioProfile>
+legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy, bool isInput) {
+    AudioProfile aidl;
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
+
+    if (legacy.num_sample_rates > std::size(legacy.sample_rates)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.sample_rates, legacy.sample_rates + legacy.num_sample_rates,
+                         std::back_inserter(aidl.sampleRates),
+                         convertIntegral<unsigned int, int32_t>));
+
+    if (legacy.num_channel_masks > std::size(legacy.channel_masks)) {
+        return unexpected(BAD_VALUE);
+    }
+    RETURN_IF_ERROR(
+            convertRange(legacy.channel_masks, legacy.channel_masks + legacy.num_channel_masks,
+                         std::back_inserter(aidl.channelMasks),
+                    [isInput](audio_channel_mask_t m) {
+                        return legacy2aidl_audio_channel_mask_t_AudioChannelLayout(m, isInput);
+                    }));
+
+    aidl.encapsulationType = VALUE_OR_RETURN(
+            legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+                    legacy.encapsulation_type));
+    return aidl;
+}
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const AudioGain& aidl, bool isInput) {
+    audio_gain legacy;
+    legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
+    legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    aidl.channelMask, isInput));
+    legacy.min_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.minValue));
+    legacy.max_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.maxValue));
+    legacy.default_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.defaultValue));
+    legacy.step_value = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.stepValue));
+    legacy.min_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.minRampMs));
+    legacy.max_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.maxRampMs));
+    return legacy;
+}
+
+ConversionResult<AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput) {
+    AudioGain aidl;
+    aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
+    aidl.minValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_value));
+    aidl.maxValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_value));
+    aidl.defaultValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.default_value));
+    aidl.stepValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.step_value));
+    aidl.minRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_ramp_ms));
+    aidl.maxRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_ramp_ms));
+    return aidl;
+}
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(AudioMode aidl) {
+    switch (aidl) {
+        case AudioMode::SYS_RESERVED_INVALID:
+            return AUDIO_MODE_INVALID;
+        case AudioMode::SYS_RESERVED_CURRENT:
+            return AUDIO_MODE_CURRENT;
+        case AudioMode::NORMAL:
+            return AUDIO_MODE_NORMAL;
+        case AudioMode::RINGTONE:
+            return AUDIO_MODE_RINGTONE;
+        case AudioMode::IN_CALL:
+            return AUDIO_MODE_IN_CALL;
+        case AudioMode::IN_COMMUNICATION:
+            return AUDIO_MODE_IN_COMMUNICATION;
+        case AudioMode::CALL_SCREEN:
+            return AUDIO_MODE_CALL_SCREEN;
+        case AudioMode::SYS_RESERVED_CALL_REDIRECT:
+            return AUDIO_MODE_CALL_REDIRECT;
+        case AudioMode::SYS_RESERVED_COMMUNICATION_REDIRECT:
+            return AUDIO_MODE_COMMUNICATION_REDIRECT;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_MODE_INVALID:
+            return AudioMode::SYS_RESERVED_INVALID;
+        case AUDIO_MODE_CURRENT:
+            return AudioMode::SYS_RESERVED_CURRENT;
+        case AUDIO_MODE_NORMAL:
+            return AudioMode::NORMAL;
+        case AUDIO_MODE_RINGTONE:
+            return AudioMode::RINGTONE;
+        case AUDIO_MODE_IN_CALL:
+            return AudioMode::IN_CALL;
+        case AUDIO_MODE_IN_COMMUNICATION:
+            return AudioMode::IN_COMMUNICATION;
+        case AUDIO_MODE_CALL_SCREEN:
+            return AudioMode::CALL_SCREEN;
+        case AUDIO_MODE_CALL_REDIRECT:
+            return AudioMode::SYS_RESERVED_CALL_REDIRECT;
+        case AUDIO_MODE_COMMUNICATION_REDIRECT:
+            return AudioMode::SYS_RESERVED_COMMUNICATION_REDIRECT;
+        case AUDIO_MODE_CNT:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_standard_t>
+aidl2legacy_AudioStandard_audio_standard_t(AudioStandard aidl) {
+    switch (aidl) {
+        case AudioStandard::NONE:
+            return AUDIO_STANDARD_NONE;
+        case AudioStandard::EDID:
+            return AUDIO_STANDARD_EDID;
+        case AudioStandard::SADB:
+            return AUDIO_STANDARD_SADB;
+        case AudioStandard::VSADB:
+            return AUDIO_STANDARD_VSADB;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioStandard>
+legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy) {
+    switch (legacy) {
+        case AUDIO_STANDARD_NONE:
+            return AudioStandard::NONE;
+        case AUDIO_STANDARD_EDID:
+            return AudioStandard::EDID;
+        case AUDIO_STANDARD_SADB:
+            return AudioStandard::SADB;
+        case AUDIO_STANDARD_VSADB:
+            return AudioStandard::VSADB;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+        const ExtraAudioDescriptor& aidl) {
+    audio_extra_audio_descriptor legacy;
+    legacy.standard = VALUE_OR_RETURN(aidl2legacy_AudioStandard_audio_standard_t(aidl.standard));
+    if (aidl.audioDescriptor.size() > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
+        return unexpected(BAD_VALUE);
+    }
+    legacy.descriptor_length = aidl.audioDescriptor.size();
+    std::copy(aidl.audioDescriptor.begin(), aidl.audioDescriptor.end(),
+              std::begin(legacy.descriptor));
+    legacy.encapsulation_type =
+            VALUE_OR_RETURN(aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+                    aidl.encapsulationType));
+    return legacy;
+}
+
+ConversionResult<ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+        const audio_extra_audio_descriptor& legacy) {
+    ExtraAudioDescriptor aidl;
+    aidl.standard = VALUE_OR_RETURN(legacy2aidl_audio_standard_t_AudioStandard(legacy.standard));
+    if (legacy.descriptor_length > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
+        return unexpected(BAD_VALUE);
+    }
+    aidl.audioDescriptor.resize(legacy.descriptor_length);
+    std::copy(legacy.descriptor, legacy.descriptor + legacy.descriptor_length,
+              aidl.audioDescriptor.begin());
+    aidl.encapsulationType =
+            VALUE_OR_RETURN(legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+                    legacy.encapsulation_type));
+    return aidl;
+}
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+        const AudioEncapsulationType& aidl) {
+    switch (aidl) {
+        case AudioEncapsulationType::NONE:
+            return AUDIO_ENCAPSULATION_TYPE_NONE;
+        case AudioEncapsulationType::IEC61937:
+            return AUDIO_ENCAPSULATION_TYPE_IEC61937;
+        case AudioEncapsulationType::PCM:
+            return AUDIO_ENCAPSULATION_TYPE_PCM;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+        const audio_encapsulation_type_t & legacy) {
+    switch (legacy) {
+        case AUDIO_ENCAPSULATION_TYPE_NONE:
+            return AudioEncapsulationType::NONE;
+        case AUDIO_ENCAPSULATION_TYPE_IEC61937:
+            return AudioEncapsulationType::IEC61937;
+        case AUDIO_ENCAPSULATION_TYPE_PCM:
+            return AudioEncapsulationType::PCM;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_dual_mono_mode_t>
+aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(AudioDualMonoMode aidl) {
+    switch (aidl) {
+        case AudioDualMonoMode::OFF:
+            return AUDIO_DUAL_MONO_MODE_OFF;
+        case AudioDualMonoMode::LR:
+            return AUDIO_DUAL_MONO_MODE_LR;
+        case AudioDualMonoMode::LL:
+            return AUDIO_DUAL_MONO_MODE_LL;
+        case AudioDualMonoMode::RR:
+            return AUDIO_DUAL_MONO_MODE_RR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioDualMonoMode>
+legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_DUAL_MONO_MODE_OFF:
+            return AudioDualMonoMode::OFF;
+        case AUDIO_DUAL_MONO_MODE_LR:
+            return AudioDualMonoMode::LR;
+        case AUDIO_DUAL_MONO_MODE_LL:
+            return AudioDualMonoMode::LL;
+        case AUDIO_DUAL_MONO_MODE_RR:
+            return AudioDualMonoMode::RR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_timestretch_fallback_mode_t>
+aidl2legacy_TimestretchFallbackMode_audio_timestretch_fallback_mode_t(
+        AudioPlaybackRate::TimestretchFallbackMode aidl) {
+    switch (aidl) {
+        case AudioPlaybackRate::TimestretchFallbackMode::SYS_RESERVED_CUT_REPEAT:
+            return AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT;
+        case AudioPlaybackRate::TimestretchFallbackMode::SYS_RESERVED_DEFAULT:
+            return AUDIO_TIMESTRETCH_FALLBACK_DEFAULT;
+        case AudioPlaybackRate::TimestretchFallbackMode::MUTE:
+            return AUDIO_TIMESTRETCH_FALLBACK_MUTE;
+        case AudioPlaybackRate::TimestretchFallbackMode::FAIL:
+            return AUDIO_TIMESTRETCH_FALLBACK_FAIL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioPlaybackRate::TimestretchFallbackMode>
+legacy2aidl_audio_timestretch_fallback_mode_t_TimestretchFallbackMode(
+        audio_timestretch_fallback_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT:
+            return AudioPlaybackRate::TimestretchFallbackMode::SYS_RESERVED_CUT_REPEAT;
+        case AUDIO_TIMESTRETCH_FALLBACK_DEFAULT:
+            return AudioPlaybackRate::TimestretchFallbackMode::SYS_RESERVED_DEFAULT;
+        case AUDIO_TIMESTRETCH_FALLBACK_MUTE:
+            return AudioPlaybackRate::TimestretchFallbackMode::MUTE;
+        case AUDIO_TIMESTRETCH_FALLBACK_FAIL:
+            return AudioPlaybackRate::TimestretchFallbackMode::FAIL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_timestretch_stretch_mode_t>
+aidl2legacy_TimestretchMode_audio_timestretch_stretch_mode_t(
+        AudioPlaybackRate::TimestretchMode aidl) {
+    switch (aidl) {
+        case AudioPlaybackRate::TimestretchMode::DEFAULT:
+            return AUDIO_TIMESTRETCH_STRETCH_DEFAULT;
+        case AudioPlaybackRate::TimestretchMode::VOICE:
+            return AUDIO_TIMESTRETCH_STRETCH_VOICE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioPlaybackRate::TimestretchMode>
+legacy2aidl_audio_timestretch_stretch_mode_t_TimestretchMode(
+        audio_timestretch_stretch_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_TIMESTRETCH_STRETCH_DEFAULT:
+            return AudioPlaybackRate::TimestretchMode::DEFAULT;
+        case AUDIO_TIMESTRETCH_STRETCH_VOICE:
+            return AudioPlaybackRate::TimestretchMode::VOICE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_playback_rate_t>
+aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(const AudioPlaybackRate& aidl) {
+    audio_playback_rate_t legacy;
+    legacy.mSpeed = aidl.speed;
+    legacy.mPitch = aidl.pitch;
+    legacy.mFallbackMode = VALUE_OR_RETURN(
+            aidl2legacy_TimestretchFallbackMode_audio_timestretch_fallback_mode_t(
+                    aidl.fallbackMode));
+    legacy.mStretchMode = VALUE_OR_RETURN(
+            aidl2legacy_TimestretchMode_audio_timestretch_stretch_mode_t(aidl.timestretchMode));
+    return legacy;
+}
+
+ConversionResult<AudioPlaybackRate>
+legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy) {
+    AudioPlaybackRate aidl;
+    aidl.speed = legacy.mSpeed;
+    aidl.pitch = legacy.mPitch;
+    aidl.fallbackMode = VALUE_OR_RETURN(
+            legacy2aidl_audio_timestretch_fallback_mode_t_TimestretchFallbackMode(
+                    legacy.mFallbackMode));
+    aidl.timestretchMode = VALUE_OR_RETURN(
+            legacy2aidl_audio_timestretch_stretch_mode_t_TimestretchMode(legacy.mStretchMode));
+    return aidl;
+}
+
+ConversionResult<audio_latency_mode_t>
+aidl2legacy_AudioLatencyMode_audio_latency_mode_t(AudioLatencyMode aidl) {
+    switch (aidl) {
+        case AudioLatencyMode::FREE:
+            return AUDIO_LATENCY_MODE_FREE;
+        case AudioLatencyMode::LOW:
+            return AUDIO_LATENCY_MODE_LOW;
+        case AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_SOFTWARE:
+            return AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE;
+        case AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_HARDWARE:
+            return AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE;
+    }
+    return unexpected(BAD_VALUE);
+}
+ConversionResult<AudioLatencyMode>
+legacy2aidl_audio_latency_mode_t_AudioLatencyMode(audio_latency_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_LATENCY_MODE_FREE:
+            return AudioLatencyMode::FREE;
+        case AUDIO_LATENCY_MODE_LOW:
+            return AudioLatencyMode::LOW;
+        case AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE:
+            return AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_SOFTWARE;
+        case AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE:
+            return AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_HARDWARE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_microphone_location_t>
+aidl2legacy_MicrophoneInfoLocation_audio_microphone_location_t(MicrophoneInfo::Location aidl) {
+    switch (aidl) {
+        case MicrophoneInfo::Location::UNKNOWN:
+            return AUDIO_MICROPHONE_LOCATION_UNKNOWN;
+        case MicrophoneInfo::Location::MAINBODY:
+            return AUDIO_MICROPHONE_LOCATION_MAINBODY;
+        case MicrophoneInfo::Location::MAINBODY_MOVABLE:
+            return AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE;
+        case MicrophoneInfo::Location::PERIPHERAL:
+            return AUDIO_MICROPHONE_LOCATION_PERIPHERAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+ConversionResult<MicrophoneInfo::Location>
+legacy2aidl_audio_microphone_location_t_MicrophoneInfoLocation(audio_microphone_location_t legacy) {
+    switch (legacy) {
+        case AUDIO_MICROPHONE_LOCATION_UNKNOWN:
+            return MicrophoneInfo::Location::UNKNOWN;
+        case AUDIO_MICROPHONE_LOCATION_MAINBODY:
+            return MicrophoneInfo::Location::MAINBODY;
+        case AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE:
+            return MicrophoneInfo::Location::MAINBODY_MOVABLE;
+        case AUDIO_MICROPHONE_LOCATION_PERIPHERAL:
+            return MicrophoneInfo::Location::PERIPHERAL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_microphone_group_t> aidl2legacy_int32_t_audio_microphone_group_t(
+        int32_t aidl) {
+    return convertReinterpret<audio_microphone_group_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_microphone_group_t_int32_t(
+        audio_microphone_group_t legacy) {
+    return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_microphone_directionality_t>
+aidl2legacy_MicrophoneInfoDirectionality_audio_microphone_directionality_t(
+        MicrophoneInfo::Directionality aidl) {
+    switch (aidl) {
+        case MicrophoneInfo::Directionality::UNKNOWN:
+            return AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN;
+        case MicrophoneInfo::Directionality::OMNI:
+            return AUDIO_MICROPHONE_DIRECTIONALITY_OMNI;
+        case MicrophoneInfo::Directionality::BI_DIRECTIONAL:
+            return AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL;
+        case MicrophoneInfo::Directionality::CARDIOID:
+            return AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID;
+        case MicrophoneInfo::Directionality::HYPER_CARDIOID:
+            return AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID;
+        case MicrophoneInfo::Directionality::SUPER_CARDIOID:
+            return AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID;
+    }
+    return unexpected(BAD_VALUE);
+}
+ConversionResult<MicrophoneInfo::Directionality>
+legacy2aidl_audio_microphone_directionality_t_MicrophoneInfoDirectionality(
+        audio_microphone_directionality_t legacy) {
+    switch (legacy) {
+        case AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN:
+            return MicrophoneInfo::Directionality::UNKNOWN;
+        case AUDIO_MICROPHONE_DIRECTIONALITY_OMNI:
+            return MicrophoneInfo::Directionality::OMNI;
+        case AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL:
+            return MicrophoneInfo::Directionality::BI_DIRECTIONAL;
+        case AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID:
+            return MicrophoneInfo::Directionality::CARDIOID;
+        case AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID:
+            return MicrophoneInfo::Directionality::HYPER_CARDIOID;
+        case AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID:
+            return MicrophoneInfo::Directionality::SUPER_CARDIOID;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_microphone_coordinate>
+aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
+        const MicrophoneInfo::Coordinate& aidl) {
+    audio_microphone_coordinate legacy;
+    legacy.x = aidl.x;
+    legacy.y = aidl.y;
+    legacy.z = aidl.z;
+    return legacy;
+}
+ConversionResult<MicrophoneInfo::Coordinate>
+legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
+        const audio_microphone_coordinate& legacy) {
+    MicrophoneInfo::Coordinate aidl;
+    aidl.x = legacy.x;
+    aidl.y = legacy.y;
+    aidl.z = legacy.z;
+    return aidl;
+}
+
+ConversionResult<audio_microphone_channel_mapping_t>
+aidl2legacy_MicrophoneDynamicInfoChannelMapping_audio_microphone_channel_mapping_t(
+        MicrophoneDynamicInfo::ChannelMapping aidl) {
+    switch (aidl) {
+        case MicrophoneDynamicInfo::ChannelMapping::UNUSED:
+            return AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
+        case MicrophoneDynamicInfo::ChannelMapping::DIRECT:
+            return AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT;
+        case MicrophoneDynamicInfo::ChannelMapping::PROCESSED:
+            return AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED;
+    }
+    return unexpected(BAD_VALUE);
+}
+ConversionResult<MicrophoneDynamicInfo::ChannelMapping>
+legacy2aidl_audio_microphone_channel_mapping_t_MicrophoneDynamicInfoChannelMapping(
+        audio_microphone_channel_mapping_t legacy) {
+    switch (legacy) {
+        case AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED:
+            return MicrophoneDynamicInfo::ChannelMapping::UNUSED;
+        case AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT:
+            return MicrophoneDynamicInfo::ChannelMapping::DIRECT;
+        case AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED:
+            return MicrophoneDynamicInfo::ChannelMapping::PROCESSED;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_microphone_characteristic_t>
+aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+        const MicrophoneInfo& aidlInfo, const MicrophoneDynamicInfo& aidlDynamic) {
+    static const audio_microphone_coordinate kCoordinateUnknown = {
+        AUDIO_MICROPHONE_COORDINATE_UNKNOWN, AUDIO_MICROPHONE_COORDINATE_UNKNOWN,
+        AUDIO_MICROPHONE_COORDINATE_UNKNOWN };
+    audio_microphone_characteristic_t legacy{};
+    if (aidlInfo.id != aidlDynamic.id) {
+        return unexpected(BAD_VALUE);
+    }
+    // Note: in the legacy structure, 'device_id' is the mic's ID, 'id' is APM port id.
+    RETURN_IF_ERROR(aidl2legacy_string(aidlInfo.id, legacy.device_id, AUDIO_MICROPHONE_ID_MAX_LEN));
+    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidlInfo.device, &legacy.device, legacy.address));
+    legacy.location = VALUE_OR_RETURN(
+            aidl2legacy_MicrophoneInfoLocation_audio_microphone_location_t(aidlInfo.location));
+    legacy.group = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_microphone_group_t(aidlInfo.group));
+    // For some reason, the legacy field is unsigned, however in the SDK layer it is signed,
+    // as it is in AIDL. So, use UINT_MAX for INDEX_IN_THE_GROUP_UNKNOWN which is -1.
+    if (aidlInfo.indexInTheGroup != MicrophoneInfo::INDEX_IN_THE_GROUP_UNKNOWN) {
+        legacy.index_in_the_group = VALUE_OR_RETURN(
+                convertReinterpret<unsigned int>(aidlInfo.indexInTheGroup));
+    } else {
+        legacy.index_in_the_group = UINT_MAX;
+    }
+    if (aidlInfo.sensitivity.has_value()) {
+        legacy.sensitivity = aidlInfo.sensitivity.value().leveldBFS;
+        legacy.max_spl = aidlInfo.sensitivity.value().maxSpldB;
+        legacy.min_spl = aidlInfo.sensitivity.value().minSpldB;
+    } else {
+        legacy.sensitivity = AUDIO_MICROPHONE_SENSITIVITY_UNKNOWN;
+        legacy.max_spl = AUDIO_MICROPHONE_SPL_UNKNOWN;
+        legacy.min_spl = AUDIO_MICROPHONE_SPL_UNKNOWN;
+    }
+    legacy.directionality = VALUE_OR_RETURN(
+            aidl2legacy_MicrophoneInfoDirectionality_audio_microphone_directionality_t(
+                    aidlInfo.directionality));
+    if (aidlInfo.frequencyResponse.size() > AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES) {
+        return unexpected(BAD_VALUE);
+    }
+    legacy.num_frequency_responses = 0;
+    for (const auto& p: aidlInfo.frequencyResponse) {
+        legacy.frequency_responses[0][legacy.num_frequency_responses] = p.frequencyHz;
+        legacy.frequency_responses[1][legacy.num_frequency_responses++] = p.leveldB;
+    }
+    if (aidlInfo.position.has_value()) {
+        legacy.geometric_location = VALUE_OR_RETURN(
+                aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
+                        aidlInfo.position.value()));
+    } else {
+        legacy.geometric_location = kCoordinateUnknown;
+    }
+    if (aidlInfo.orientation.has_value()) {
+        legacy.orientation = VALUE_OR_RETURN(
+                aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
+                        aidlInfo.orientation.value()));
+    } else {
+        legacy.orientation = kCoordinateUnknown;
+    }
+    if (aidlDynamic.channelMapping.size() > AUDIO_CHANNEL_COUNT_MAX) {
+        return unexpected(BAD_VALUE);
+    }
+    size_t i = 0;
+    for (; i < aidlDynamic.channelMapping.size(); ++i) {
+        legacy.channel_mapping[i] = VALUE_OR_RETURN(
+                aidl2legacy_MicrophoneDynamicInfoChannelMapping_audio_microphone_channel_mapping_t(
+                        aidlDynamic.channelMapping[i]));
+    }
+    for (; i < AUDIO_CHANNEL_COUNT_MAX; ++i) {
+        legacy.channel_mapping[i] = AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
+    }
+    return legacy;
+}
+
+status_t
+legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfos(
+        const audio_microphone_characteristic_t& legacy,
+        MicrophoneInfo* aidlInfo, MicrophoneDynamicInfo* aidlDynamic) {
+    aidlInfo->id = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_string(legacy.device_id, AUDIO_MICROPHONE_ID_MAX_LEN));
+    aidlDynamic->id = aidlInfo->id;
+    aidlInfo->device = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_device_AudioDevice(
+                    legacy.device, legacy.address));
+    aidlInfo->location = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_microphone_location_t_MicrophoneInfoLocation(legacy.location));
+    aidlInfo->group = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_microphone_group_t_int32_t(legacy.group));
+    // For some reason, the legacy field is unsigned, however in the SDK layer it is signed,
+    // as it is in AIDL. So, use UINT_MAX for INDEX_IN_THE_GROUP_UNKNOWN which is -1.
+    if (legacy.index_in_the_group != UINT_MAX) {
+        aidlInfo->indexInTheGroup = VALUE_OR_RETURN_STATUS(
+                convertReinterpret<int32_t>(legacy.index_in_the_group));
+    } else {
+        aidlInfo->indexInTheGroup = MicrophoneInfo::INDEX_IN_THE_GROUP_UNKNOWN;
+    }
+    if (legacy.sensitivity != AUDIO_MICROPHONE_SENSITIVITY_UNKNOWN &&
+            legacy.max_spl != AUDIO_MICROPHONE_SPL_UNKNOWN &&
+            legacy.min_spl != AUDIO_MICROPHONE_SPL_UNKNOWN) {
+        MicrophoneInfo::Sensitivity sensitivity;
+        sensitivity.leveldBFS = legacy.sensitivity;
+        sensitivity.maxSpldB = legacy.max_spl;
+        sensitivity.minSpldB = legacy.min_spl;
+        aidlInfo->sensitivity = std::move(sensitivity);
+    } else {
+        aidlInfo->sensitivity = {};
+    }
+    aidlInfo->directionality = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_microphone_directionality_t_MicrophoneInfoDirectionality(
+                    legacy.directionality));
+    if (legacy.num_frequency_responses > AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES) {
+        return BAD_VALUE;
+    }
+    aidlInfo->frequencyResponse.resize(legacy.num_frequency_responses);
+    for (size_t i = 0; i < legacy.num_frequency_responses; ++i) {
+        aidlInfo->frequencyResponse[i].frequencyHz = legacy.frequency_responses[0][i];
+        aidlInfo->frequencyResponse[i].leveldB = legacy.frequency_responses[1][i];
+    }
+    if (legacy.geometric_location.x != AUDIO_MICROPHONE_COORDINATE_UNKNOWN &&
+            legacy.geometric_location.y != AUDIO_MICROPHONE_COORDINATE_UNKNOWN &&
+            legacy.geometric_location.z != AUDIO_MICROPHONE_COORDINATE_UNKNOWN) {
+        aidlInfo->position = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
+                        legacy.geometric_location));
+    } else {
+        aidlInfo->position = {};
+    }
+    if (legacy.orientation.x != AUDIO_MICROPHONE_COORDINATE_UNKNOWN &&
+            legacy.orientation.y != AUDIO_MICROPHONE_COORDINATE_UNKNOWN &&
+            legacy.orientation.z != AUDIO_MICROPHONE_COORDINATE_UNKNOWN) {
+        aidlInfo->orientation = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
+                        legacy.orientation));
+    } else {
+        aidlInfo->orientation = {};
+    }
+    size_t channelsUsed = AUDIO_CHANNEL_COUNT_MAX;
+    while (channelsUsed != 0 &&
+            legacy.channel_mapping[--channelsUsed] == AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED) {}
+    // Doing an increment is correct even when channel 0 is 'UNUSED',
+    // that's because AIDL requires to have at least 1 element in the mapping.
+    ++channelsUsed;
+    aidlDynamic->channelMapping.resize(channelsUsed);
+    for (size_t i = 0; i < channelsUsed; ++i) {
+        aidlDynamic->channelMapping[i] = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_microphone_channel_mapping_t_MicrophoneDynamicInfoChannelMapping(
+                        legacy.channel_mapping[i]));
+    }
+    return OK;
+}
+
+}  // namespace android
+
+#undef GET_DEVICE_DESC_CONNECTION
+
+#if defined(BACKEND_NDK)
+}  // aidl
+#endif
diff --git a/media/audioaidlconversion/AidlConversionEffect.cpp b/media/audioaidlconversion/AidlConversionEffect.cpp
new file mode 100644
index 0000000..611cfab
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionEffect.cpp
@@ -0,0 +1,475 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <inttypes.h>
+#include <utility>
+
+#define LOG_TAG "AidlConversionEffect"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionEffect.h>
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AIDL NDK backend to legacy audio data structure conversion utilities.
+
+namespace aidl {
+namespace android {
+
+using ::aidl::android::hardware::audio::effect::AcousticEchoCanceler;
+using ::aidl::android::hardware::audio::effect::AutomaticGainControlV2;
+using ::aidl::android::hardware::audio::effect::BassBoost;
+using ::aidl::android::hardware::audio::effect::DefaultExtension;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Downmix;
+using ::aidl::android::hardware::audio::effect::DynamicsProcessing;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::PresetReverb;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::aidl::android::hardware::audio::effect::Visualizer;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+
+using ::android::BAD_VALUE;
+using ::android::OK;
+using ::android::status_t;
+using ::android::base::unexpected;
+using ::android::effect::utils::EffectParamReader;
+using ::android::effect::utils::EffectParamWriter;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Converters
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(Flags::Type type) {
+    switch (type) {
+        case Flags::Type::INSERT:
+            return EFFECT_FLAG_TYPE_INSERT;
+        case Flags::Type::AUXILIARY:
+            return EFFECT_FLAG_TYPE_AUXILIARY;
+        case Flags::Type::REPLACE:
+            return EFFECT_FLAG_TYPE_REPLACE;
+        case Flags::Type::PRE_PROC:
+            return EFFECT_FLAG_TYPE_PRE_PROC;
+        case Flags::Type::POST_PROC:
+            return EFFECT_FLAG_TYPE_POST_PROC;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::Type> legacy2aidl_uint32_Flags_Type(uint32_t legacy) {
+    switch (legacy & EFFECT_FLAG_TYPE_MASK) {
+        case EFFECT_FLAG_TYPE_INSERT:
+            return Flags::Type::INSERT;
+        case EFFECT_FLAG_TYPE_AUXILIARY:
+            return Flags::Type::AUXILIARY;
+        case EFFECT_FLAG_TYPE_REPLACE:
+            return Flags::Type::REPLACE;
+        case EFFECT_FLAG_TYPE_PRE_PROC:
+            return Flags::Type::PRE_PROC;
+        case EFFECT_FLAG_TYPE_POST_PROC:
+            return Flags::Type::POST_PROC;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Insert_uint32(Flags::Insert insert) {
+    switch (insert) {
+        case Flags::Insert::ANY:
+            return EFFECT_FLAG_INSERT_ANY;
+        case Flags::Insert::FIRST:
+            return EFFECT_FLAG_INSERT_FIRST;
+        case Flags::Insert::LAST:
+            return EFFECT_FLAG_INSERT_LAST;
+        case Flags::Insert::EXCLUSIVE:
+            return EFFECT_FLAG_INSERT_EXCLUSIVE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::Insert> legacy2aidl_uint32_Flags_Insert(uint32_t legacy) {
+    switch (legacy & EFFECT_FLAG_INSERT_MASK) {
+        case EFFECT_FLAG_INSERT_ANY:
+            return Flags::Insert::ANY;
+        case EFFECT_FLAG_INSERT_FIRST:
+            return Flags::Insert::FIRST;
+        case EFFECT_FLAG_INSERT_LAST:
+            return Flags::Insert::LAST;
+        case EFFECT_FLAG_INSERT_EXCLUSIVE:
+            return Flags::Insert::EXCLUSIVE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Volume_uint32(Flags::Volume volume) {
+    switch (volume) {
+        case Flags::Volume::NONE:
+            return 0;
+        case Flags::Volume::CTRL:
+            return EFFECT_FLAG_VOLUME_CTRL;
+        case Flags::Volume::IND:
+            return EFFECT_FLAG_VOLUME_IND;
+        case Flags::Volume::MONITOR:
+            return EFFECT_FLAG_VOLUME_MONITOR;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::Volume> legacy2aidl_uint32_Flags_Volume(uint32_t legacy) {
+    switch (legacy & EFFECT_FLAG_VOLUME_MASK) {
+        case EFFECT_FLAG_VOLUME_CTRL:
+            return Flags::Volume::CTRL;
+        case EFFECT_FLAG_VOLUME_IND:
+            return Flags::Volume::IND;
+        case EFFECT_FLAG_VOLUME_MONITOR:
+            return Flags::Volume::MONITOR;
+        case EFFECT_FLAG_VOLUME_NONE:
+            return Flags::Volume::NONE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_uint32(Flags aidl) {
+    uint32_t legacy = 0;
+    legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Type_uint32(aidl.type));
+    legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Insert_uint32(aidl.insert));
+    legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Volume_uint32(aidl.volume));
+    legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_HardwareAccelerator_uint32(aidl.hwAcceleratorMode));
+
+    if (aidl.offloadIndication) {
+        legacy |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
+    }
+    if (aidl.deviceIndication) {
+        legacy |= EFFECT_FLAG_DEVICE_IND;
+    }
+    if (aidl.audioModeIndication) {
+        legacy |= EFFECT_FLAG_AUDIO_MODE_IND;
+    }
+    if (aidl.audioSourceIndication) {
+        legacy |= EFFECT_FLAG_AUDIO_SOURCE_IND;
+    }
+    if (aidl.bypass) {
+        legacy |= EFFECT_FLAG_NO_PROCESS;
+    }
+    return legacy;
+}
+
+ConversionResult<Flags> legacy2aidl_uint32_Flags(uint32_t legacy) {
+    Flags aidl;
+
+    aidl.type = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Type(legacy));
+    aidl.insert = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Insert(legacy));
+    aidl.volume = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Volume(legacy));
+    aidl.hwAcceleratorMode = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_HardwareAccelerator(legacy));
+    aidl.offloadIndication = (legacy & EFFECT_FLAG_OFFLOAD_SUPPORTED);
+    aidl.deviceIndication = (legacy & EFFECT_FLAG_DEVICE_IND);
+    aidl.audioModeIndication = (legacy & EFFECT_FLAG_AUDIO_MODE_IND);
+    aidl.audioSourceIndication = (legacy & EFFECT_FLAG_AUDIO_SOURCE_IND);
+    aidl.bypass = (legacy & EFFECT_FLAG_NO_PROCESS);
+    return aidl;
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_HardwareAccelerator_uint32(
+        Flags::HardwareAccelerator hwAcceleratorMode) {
+    switch (hwAcceleratorMode) {
+        case Flags::HardwareAccelerator::NONE:
+            return 0;
+        case Flags::HardwareAccelerator::SIMPLE:
+            return EFFECT_FLAG_HW_ACC_SIMPLE;
+        case Flags::HardwareAccelerator::TUNNEL:
+            return EFFECT_FLAG_HW_ACC_TUNNEL;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::HardwareAccelerator> legacy2aidl_uint32_Flags_HardwareAccelerator(
+        uint32_t legacy) {
+    switch (legacy & EFFECT_FLAG_HW_ACC_MASK) {
+        case EFFECT_FLAG_HW_ACC_SIMPLE:
+            return Flags::HardwareAccelerator::SIMPLE;
+        case EFFECT_FLAG_HW_ACC_TUNNEL:
+            return Flags::HardwareAccelerator::TUNNEL;
+        case 0:
+            return Flags::HardwareAccelerator::NONE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<effect_descriptor_t>
+aidl2legacy_Descriptor_effect_descriptor(const Descriptor& aidl) {
+    effect_descriptor_t legacy;
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.common.id.type));
+    legacy.uuid = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.common.id.uuid));
+    // legacy descriptor doesn't have proxy information
+    // proxy = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.proxy));
+    legacy.apiVersion = EFFECT_CONTROL_API_VERSION;
+    legacy.flags = VALUE_OR_RETURN(aidl2legacy_Flags_uint32(aidl.common.flags));
+    legacy.cpuLoad = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.common.cpuLoad));
+    legacy.memoryUsage = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.common.memoryUsage));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.common.name, legacy.name, sizeof(legacy.name)));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.common.implementor, legacy.implementor,
+                                        sizeof(legacy.implementor)));
+    return legacy;
+}
+
+ConversionResult<Descriptor>
+legacy2aidl_effect_descriptor_Descriptor(const effect_descriptor_t& legacy) {
+    Descriptor aidl;
+    aidl.common.id.type = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.type));
+    aidl.common.id.uuid = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.uuid));
+    // legacy descriptor doesn't have proxy information
+    // aidl.common.id.proxy
+    aidl.common.flags = VALUE_OR_RETURN(legacy2aidl_uint32_Flags(legacy.flags));
+    aidl.common.cpuLoad = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.cpuLoad));
+    aidl.common.memoryUsage = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.memoryUsage));
+    aidl.common.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+    aidl.common.implementor =
+            VALUE_OR_RETURN(legacy2aidl_string(legacy.implementor, sizeof(legacy.implementor)));
+    return aidl;
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_echoDelay(const Parameter& aidl) {
+    int echoDelay = VALUE_OR_RETURN(
+            GET_PARAMETER_SPECIFIC_FIELD(aidl, AcousticEchoCanceler, acousticEchoCanceler,
+                                         AcousticEchoCanceler::echoDelayUs, int));
+    return VALUE_OR_RETURN(convertReinterpret<uint32_t>(echoDelay));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_echoDelay_Parameter_aec(uint32_t legacy) {
+    int delay = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy));
+    return MAKE_SPECIFIC_PARAMETER(AcousticEchoCanceler, acousticEchoCanceler, echoDelayUs, delay);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_mobileMode(const Parameter& aidl) {
+    bool mobileMode = VALUE_OR_RETURN(
+            GET_PARAMETER_SPECIFIC_FIELD(aidl, AcousticEchoCanceler, acousticEchoCanceler,
+                                         AcousticEchoCanceler::mobileMode, bool));
+    return VALUE_OR_RETURN(convertIntegral<uint32_t>(mobileMode));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_mobileMode_Parameter_aec(uint32_t legacy) {
+    bool mode = VALUE_OR_RETURN(convertIntegral<bool>(legacy));
+    return MAKE_SPECIFIC_PARAMETER(AcousticEchoCanceler, acousticEchoCanceler, mobileMode, mode);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_fixedDigitalGain(
+        const Parameter& aidl) {
+    int gain = VALUE_OR_RETURN(
+            GET_PARAMETER_SPECIFIC_FIELD(aidl, AutomaticGainControlV2, automaticGainControlV2,
+                                         AutomaticGainControlV2::fixedDigitalGainMb, int));
+    return VALUE_OR_RETURN(convertReinterpret<uint32_t>(gain));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_fixedDigitalGain_Parameter_agc(uint32_t legacy) {
+    int gain = VALUE_OR_RETURN(convertReinterpret<int>(legacy));
+    return MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV2, automaticGainControlV2,
+                                   fixedDigitalGainMb, gain);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_levelEstimator(
+        const Parameter& aidl) {
+    const auto& le = VALUE_OR_RETURN(GET_PARAMETER_SPECIFIC_FIELD(
+            aidl, AutomaticGainControlV2, automaticGainControlV2,
+            AutomaticGainControlV2::levelEstimator, AutomaticGainControlV2::LevelEstimator));
+    return static_cast<uint32_t>(le);
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_levelEstimator_Parameter_agc(uint32_t legacy) {
+    if (legacy > (uint32_t)AutomaticGainControlV2::LevelEstimator::PEAK) {
+        return unexpected(BAD_VALUE);
+    }
+    AutomaticGainControlV2::LevelEstimator le =
+            static_cast<AutomaticGainControlV2::LevelEstimator>(legacy);
+    return MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV2, automaticGainControlV2, levelEstimator,
+                                   le);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_saturationMargin(
+        const Parameter& aidl) {
+    int saturationMargin = VALUE_OR_RETURN(
+            GET_PARAMETER_SPECIFIC_FIELD(aidl, AutomaticGainControlV2, automaticGainControlV2,
+                                         AutomaticGainControlV2::saturationMarginMb, int));
+    return VALUE_OR_RETURN(convertIntegral<uint32_t>(saturationMargin));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_saturationMargin_Parameter_agc(uint32_t legacy) {
+    int saturationMargin = VALUE_OR_RETURN(convertIntegral<int>(legacy));
+    return MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV2, automaticGainControlV2,
+                                   saturationMarginMb, saturationMargin);
+}
+
+ConversionResult<uint16_t> aidl2legacy_Parameter_BassBoost_uint16_strengthPm(
+        const Parameter& aidl) {
+    int strength = VALUE_OR_RETURN(
+            GET_PARAMETER_SPECIFIC_FIELD(aidl, BassBoost, bassBoost, BassBoost::strengthPm, int));
+    return VALUE_OR_RETURN(convertIntegral<uint16_t>(strength));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint16_strengthPm_Parameter_BassBoost(uint16_t legacy) {
+    int strength = VALUE_OR_RETURN(convertIntegral<int>(legacy));
+    return MAKE_SPECIFIC_PARAMETER(BassBoost, bassBoost, strengthPm, strength);
+}
+
+ConversionResult<int16_t> aidl2legacy_Parameter_Downmix_int16_type(const Parameter& aidl) {
+    Downmix::Type aidlType = VALUE_OR_RETURN(
+            GET_PARAMETER_SPECIFIC_FIELD(aidl, Downmix, downmix, Downmix::type, Downmix::Type));
+    return VALUE_OR_RETURN(convertIntegral<int16_t>(static_cast<uint32_t>(aidlType)));
+}
+
+ConversionResult<Parameter> legacy2aidl_int16_type_Parameter_Downmix(int16_t legacy) {
+    if (legacy > (uint32_t) Downmix::Type::FOLD) {
+        return unexpected(BAD_VALUE);
+    }
+    Downmix::Type aidlType = static_cast<Downmix::Type>(legacy);
+    return MAKE_SPECIFIC_PARAMETER(Downmix, downmix, type, aidlType);
+}
+
+ConversionResult<int16_t> aidl2legacy_DynamicsProcessing_ResolutionPreference_uint32_(
+        const Parameter& aidl) {
+    Downmix::Type aidlType = VALUE_OR_RETURN(
+            GET_PARAMETER_SPECIFIC_FIELD(aidl, Downmix, downmix, Downmix::type, Downmix::Type));
+    return VALUE_OR_RETURN(convertIntegral<int16_t>(static_cast<uint32_t>(aidlType)));
+}
+
+ConversionResult<DynamicsProcessing::ResolutionPreference>
+legacy2aidl_int32_DynamicsProcessing_ResolutionPreference(int32_t legacy) {
+    if (legacy > (int32_t)DynamicsProcessing::ResolutionPreference::FAVOR_TIME_RESOLUTION) {
+        return unexpected(BAD_VALUE);
+    }
+    return static_cast<DynamicsProcessing::ResolutionPreference>(legacy);
+}
+
+ConversionResult<int32_t> aidl2legacy_DynamicsProcessing_ResolutionPreference_int32(
+        DynamicsProcessing::ResolutionPreference aidl) {
+    return static_cast<int32_t>(aidl);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_ScalingMode_uint32(
+        Visualizer::ScalingMode aidl) {
+    switch (aidl) {
+        case Visualizer::ScalingMode::NORMALIZED: {
+            return 0;
+        }
+        case Visualizer::ScalingMode::AS_PLAYED: {
+            return 1;
+        }
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Visualizer::ScalingMode> legacy2aidl_Parameter_Visualizer_uint32_ScalingMode(
+        uint32_t legacy) {
+    if (legacy == 0) {
+        return Visualizer::ScalingMode::NORMALIZED;
+    } else if (legacy == 1) {
+        return Visualizer::ScalingMode::AS_PLAYED;
+    } else {
+        return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_MeasurementMode_uint32(
+        Visualizer::MeasurementMode aidl) {
+    switch (aidl) {
+        case Visualizer::MeasurementMode::NONE: {
+            return 0;
+        }
+        case Visualizer::MeasurementMode::PEAK_RMS: {
+            return 1;
+        }
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Visualizer::MeasurementMode>
+legacy2aidl_Parameter_Visualizer_uint32_MeasurementMode(uint32_t legacy) {
+    if (legacy == 0) {
+        return Visualizer::MeasurementMode::NONE;
+    } else if (legacy == 1) {
+        return Visualizer::MeasurementMode::PEAK_RMS;
+    } else {
+        return unexpected(BAD_VALUE);
+    }
+}
+
+/**
+ * Copy the parameter area of effect_param_t to DefaultExtension::bytes.
+ */
+ConversionResult<VendorExtension> legacy2aidl_EffectParameterReader_Param_VendorExtension(
+        EffectParamReader& param) {
+    size_t len = param.getParameterSize();
+    DefaultExtension defaultExt;
+    defaultExt.bytes.resize(len);
+    RETURN_IF_ERROR(param.readFromParameter(defaultExt.bytes.data(), len));
+
+    VendorExtension ext;
+    ext.extension.setParcelable(defaultExt);
+    return ext;
+}
+
+/**
+ * Copy the data area of effect_param_t to DefaultExtension::bytes.
+ */
+ConversionResult<VendorExtension> legacy2aidl_EffectParameterReader_Data_VendorExtension(
+        EffectParamReader& param) {
+    size_t len = param.getValueSize();
+    DefaultExtension defaultExt;
+    defaultExt.bytes.resize(len);
+    RETURN_IF_ERROR(param.readFromValue(defaultExt.bytes.data(), len));
+
+    VendorExtension ext;
+    ext.extension.setParcelable(defaultExt);
+    return ext;
+}
+
+/**
+ * Copy DefaultExtension::bytes to the data area of effect_param_t.
+ */
+ConversionResult<status_t> aidl2legacy_VendorExtension_EffectParameterWriter_Data(
+        EffectParamWriter& param, VendorExtension ext) {
+    std::optional<DefaultExtension> defaultExt;
+    RETURN_IF_ERROR(ext.extension.getParcelable(&defaultExt));
+    if (!defaultExt.has_value()) {
+        return unexpected(BAD_VALUE);
+    }
+
+    RETURN_IF_ERROR(param.writeToValue(defaultExt->bytes.data(), defaultExt->bytes.size()));
+
+    return OK;
+}
+
+ConversionResult<Parameter> legacy2aidl_EffectParameterReader_ParameterExtension(
+        EffectParamReader& param) {
+    VendorExtension ext =
+            VALUE_OR_RETURN(legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+    return UNION_MAKE(Parameter, specific, UNION_MAKE(Parameter::Specific, vendorEffect, ext));
+}
+
+ConversionResult<::android::status_t> aidl2legacy_ParameterExtension_EffectParameterWriter(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl,
+        EffectParamWriter& legacy) {
+    VendorExtension ext = VALUE_OR_RETURN(
+            (::aidl::android::getParameterSpecific<Parameter, VendorExtension,
+                                                   Parameter::Specific::vendorEffect>(aidl)));
+    return VALUE_OR_RETURN_STATUS(
+            aidl2legacy_VendorExtension_EffectParameterWriter_Data(legacy, ext));
+}
+
+}  // namespace android
+}  // aidl
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
new file mode 100644
index 0000000..9b14a5e
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sstream>
+#include <utility>
+
+#include <system/audio.h>
+#define LOG_TAG "AidlConversionNdk"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+#include <utils/Errors.h>
+
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
+#include <Utils.h>
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AIDL NDK backend to legacy audio data structure conversion utilities.
+
+namespace aidl {
+namespace android {
+
+using hardware::audio::common::PlaybackTrackMetadata;
+using hardware::audio::common::RecordTrackMetadata;
+using ::android::BAD_VALUE;
+using ::android::OK;
+
+namespace {
+
+::android::status_t combineString(
+        const std::vector<std::string>& v, char separator, std::string* result) {
+    std::ostringstream oss;
+    for (const auto& s : v) {
+        if (oss.tellp() > 0) {
+            oss << separator;
+        }
+        if (s.find(separator) == std::string::npos) {
+            oss << s;
+        } else {
+            ALOGE("%s: string \"%s\" contains separator character \"%c\"",
+                    __func__, s.c_str(), separator);
+            return BAD_VALUE;
+        }
+    }
+    *result = oss.str();
+    return OK;
+}
+
+std::vector<std::string> splitString(const std::string& s, char separator) {
+    std::istringstream iss(s);
+    std::string t;
+    std::vector<std::string> result;
+    while (std::getline(iss, t, separator)) {
+        result.push_back(std::move(t));
+    }
+    return result;
+}
+
+std::vector<std::string> filterOutNonVendorTags(const std::vector<std::string>& tags) {
+    std::vector<std::string> result;
+    std::copy_if(tags.begin(), tags.end(), std::back_inserter(result),
+            ::aidl::android::hardware::audio::common::maybeVendorExtension);
+    return result;
+}
+
+}  // namespace
+
+// buffer_provider_t is not supported thus skipped
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+        const media::audio::common::AudioConfig& aidl, bool isInput) {
+    buffer_config_t legacy;
+
+    legacy.samplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.base.sampleRate));
+    legacy.mask |= EFFECT_CONFIG_SMP_RATE;
+
+    legacy.channels = VALUE_OR_RETURN(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.base.channelMask, isInput));
+    legacy.mask |= EFFECT_CONFIG_CHANNELS;
+
+    legacy.format =
+            VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.base.format));
+    legacy.mask |= EFFECT_CONFIG_FORMAT;
+    legacy.buffer.frameCount = aidl.frameCount;
+
+    // TODO: add accessMode and mask
+    return legacy;
+}
+
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_buffer_config_t_AudioConfig(const buffer_config_t& legacy, bool isInput) {
+    media::audio::common::AudioConfig aidl;
+
+    if (legacy.mask & EFFECT_CONFIG_SMP_RATE) {
+        aidl.base.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.samplingRate));
+    }
+    if (legacy.mask & EFFECT_CONFIG_CHANNELS) {
+        aidl.base.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                static_cast<audio_channel_mask_t>(legacy.channels), isInput));
+    }
+    if (legacy.mask & EFFECT_CONFIG_FORMAT) {
+        aidl.base.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
+                static_cast<audio_format_t>(legacy.format)));
+    }
+    aidl.frameCount = legacy.buffer.frameCount;
+
+    // TODO: add accessMode and mask
+    return aidl;
+}
+
+::android::status_t aidl2legacy_AudioAttributesTags(
+        const std::vector<std::string>& aidl, char* legacy) {
+    std::string aidlTags;
+    RETURN_STATUS_IF_ERROR(combineString(
+                    filterOutNonVendorTags(aidl), AUDIO_ATTRIBUTES_TAGS_SEPARATOR, &aidlTags));
+    RETURN_STATUS_IF_ERROR(aidl2legacy_string(aidlTags, legacy, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE));
+    return OK;
+}
+
+ConversionResult<std::vector<std::string>> legacy2aidl_AudioAttributesTags(const char* legacy) {
+    std::string legacyTags = VALUE_OR_RETURN(legacy2aidl_string(
+                    legacy, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE));
+    return filterOutNonVendorTags(splitString(legacyTags, AUDIO_ATTRIBUTES_TAGS_SEPARATOR));
+}
+
+ConversionResult<playback_track_metadata_v7>
+aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(const PlaybackTrackMetadata& aidl) {
+    playback_track_metadata_v7 legacy;
+    legacy.base.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+    legacy.base.content_type = VALUE_OR_RETURN(aidl2legacy_AudioContentType_audio_content_type_t(
+                    aidl.contentType));
+    legacy.base.gain = aidl.gain;
+    legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    aidl.channelMask, false /*isInput*/));
+    RETURN_IF_ERROR(aidl2legacy_AudioAttributesTags(aidl.tags, legacy.tags));
+    return legacy;
+}
+
+ConversionResult<PlaybackTrackMetadata>
+legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(
+        const playback_track_metadata_v7& legacy) {
+    PlaybackTrackMetadata aidl;
+    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.base.usage));
+    aidl.contentType = VALUE_OR_RETURN(legacy2aidl_audio_content_type_t_AudioContentType(
+                    legacy.base.content_type));
+    aidl.gain = legacy.base.gain;
+    aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                    legacy.channel_mask, false /*isInput*/));
+    aidl.tags = VALUE_OR_RETURN(legacy2aidl_AudioAttributesTags(legacy.tags));
+    return aidl;
+}
+
+ConversionResult<record_track_metadata_v7>
+aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(const RecordTrackMetadata& aidl) {
+    record_track_metadata_v7 legacy;
+    legacy.base.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(aidl.source));
+    legacy.base.gain = aidl.gain;
+    if (aidl.destinationDevice.has_value()) {
+        RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(aidl.destinationDevice.value(),
+                        &legacy.base.dest_device, legacy.base.dest_device_address));
+    } else {
+        legacy.base.dest_device = AUDIO_DEVICE_NONE;
+    }
+    legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    aidl.channelMask, true /*isInput*/));
+    RETURN_IF_ERROR(aidl2legacy_AudioAttributesTags(aidl.tags, legacy.tags));
+    return legacy;
+}
+
+ConversionResult<RecordTrackMetadata>
+legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy) {
+    RecordTrackMetadata aidl;
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.base.source));
+    aidl.gain = legacy.base.gain;
+    if (legacy.base.dest_device != AUDIO_DEVICE_NONE) {
+        aidl.destinationDevice = VALUE_OR_RETURN(legacy2aidl_audio_device_AudioDevice(
+                        legacy.base.dest_device, legacy.base.dest_device_address));
+    }
+    aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                    legacy.channel_mask, true /*isInput*/));
+    aidl.tags = VALUE_OR_RETURN(legacy2aidl_AudioAttributesTags(legacy.tags));
+    return aidl;
+}
+
+}  // namespace android
+}  // aidl
diff --git a/media/audioaidlconversion/AidlConversionNdkCpp.cpp b/media/audioaidlconversion/AidlConversionNdkCpp.cpp
new file mode 100644
index 0000000..c64a074
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionNdkCpp.cpp
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <type_traits>
+
+#define LOG_TAG "AidlConversionNdkCpp"
+#include <utils/Log.h>
+
+#include <android-base/expected.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_enums.h>
+#include <android/binder_parcel.h>
+#include <binder/Enums.h>
+#include <media/AidlConversionNdkCpp.h>
+#include <media/AidlConversionUtil.h>
+
+using aidl::android::aidl_utils::statusTFromBinderStatusT;
+
+namespace android {
+
+namespace {
+
+// cpp2ndk and ndk2cpp are universal converters which work for any type,
+// however they are not the most efficient way to convert due to extra
+// marshaling / unmarshaling step.
+
+template<typename NdkType, typename CppType>
+ConversionResult<NdkType> cpp2ndk(const CppType& cpp) {
+    Parcel cppParcel;
+    RETURN_IF_ERROR(cpp.writeToParcel(&cppParcel));
+    ::ndk::ScopedAParcel ndkParcel(AParcel_create());
+    const int32_t ndkParcelBegin = AParcel_getDataPosition(ndkParcel.get());
+    RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_unmarshal(
+                            ndkParcel.get(), cppParcel.data(), cppParcel.dataSize())));
+    RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_setDataPosition(
+                            ndkParcel.get(), ndkParcelBegin)));
+    NdkType ndk;
+    RETURN_IF_ERROR(statusTFromBinderStatusT(ndk.readFromParcel(ndkParcel.get())));
+    return ndk;
+}
+
+template<typename CppType, typename NdkType>
+ConversionResult<CppType> ndk2cpp(const NdkType& ndk) {
+    ::ndk::ScopedAParcel ndkParcel(AParcel_create());
+    RETURN_IF_ERROR(statusTFromBinderStatusT(ndk.writeToParcel(ndkParcel.get())));
+    const int32_t ndkParcelDataSize = AParcel_getDataSize(ndkParcel.get());
+    if (ndkParcelDataSize < 0) {
+        return base::unexpected(BAD_VALUE);
+    }
+    // Parcel does not expose its data in a mutable form, we have to use an intermediate buffer.
+    std::vector<uint8_t> parcelData(static_cast<size_t>(ndkParcelDataSize));
+    RETURN_IF_ERROR(statusTFromBinderStatusT(AParcel_marshal(
+                            ndkParcel.get(), parcelData.data(), 0, ndkParcelDataSize)));
+    Parcel cppParcel;
+    RETURN_IF_ERROR(cppParcel.setData(parcelData.data(), parcelData.size()));
+    CppType cpp;
+    RETURN_IF_ERROR(cpp.readFromParcel(&cppParcel));
+    return cpp;
+}
+
+// cpp2ndk_Enum and ndk2cpp_Enum are more efficient implementations specifically for enums.
+
+template<typename OutEnum, typename OutEnumRange, typename InEnum>
+        ConversionResult<OutEnum> convertEnum(const OutEnumRange& range, InEnum e) {
+    using InIntType = std::underlying_type_t<InEnum>;
+    static_assert(std::is_same_v<InIntType, std::underlying_type_t<OutEnum>>);
+
+    InIntType inEnumIndex = static_cast<InIntType>(e);
+    OutEnum outEnum = static_cast<OutEnum>(inEnumIndex);
+    if (std::find(range.begin(), range.end(), outEnum) == range.end()) {
+        return base::unexpected(BAD_VALUE);
+    }
+    return outEnum;
+}
+
+template<typename NdkEnum, typename CppEnum>
+        ConversionResult<NdkEnum> cpp2ndk_Enum(CppEnum cpp) {
+    return convertEnum<NdkEnum>(::ndk::enum_range<NdkEnum>(), cpp);
+}
+
+template<typename CppEnum, typename NdkEnum>
+        ConversionResult<CppEnum> ndk2cpp_Enum(NdkEnum ndk) {
+    return convertEnum<CppEnum>(enum_range<CppEnum>(), ndk);
+}
+
+}  // namespace
+
+#define GENERATE_CONVERTERS(packageName, className)                     \
+    ConversionResult<::aidl::packageName::className> cpp2ndk_##className( \
+            const ::packageName::className& cpp) {                      \
+        return cpp2ndk<::aidl::packageName::className>(cpp);            \
+    }                                                                   \
+    ConversionResult<::packageName::className> ndk2cpp_##className(     \
+            const ::aidl::packageName::className& ndk) {                \
+        return ndk2cpp<::packageName::className>(ndk);                  \
+    }
+
+#define GENERATE_ENUM_CONVERTERS(packageName, className)                \
+    ConversionResult<::aidl::packageName::className> cpp2ndk_##className( \
+            const ::packageName::className& cpp) {                      \
+        return cpp2ndk_Enum<::aidl::packageName::className>(cpp);       \
+    }                                                                   \
+    ConversionResult<::packageName::className> ndk2cpp_##className(     \
+            const ::aidl::packageName::className& ndk) {                \
+        return ndk2cpp_Enum<::packageName::className>(ndk);             \
+}
+
+GENERATE_CONVERTERS(android::media::audio::common, AudioFormatDescription);
+GENERATE_CONVERTERS(android::media::audio::common, AudioHalEngineConfig);
+GENERATE_CONVERTERS(android::media::audio::common, AudioMMapPolicyInfo);
+GENERATE_ENUM_CONVERTERS(android::media::audio::common, AudioMMapPolicyType);
+GENERATE_ENUM_CONVERTERS(android::media::audio::common, AudioMode);
+GENERATE_CONVERTERS(android::media::audio::common, AudioPort);
+
+}  // namespace android
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
new file mode 100644
index 0000000..d3a5755
--- /dev/null
+++ b/media/audioaidlconversion/Android.bp
@@ -0,0 +1,238 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "audio_aidl_conversion_common_util_default",
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    min_sdk_version: "29",
+    export_include_dirs: [
+        "include",
+    ],
+    header_libs: [
+        "libbase_headers",
+        "liberror_headers",
+    ],
+    export_header_lib_headers: [
+        "libbase_headers",
+        "liberror_headers",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.btservices",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+// This is intended for clients needing to include AidlConversionUtil.h, without extra dependencies.
+cc_library_headers {
+    name: "libaudio_aidl_conversion_common_util_cpp",
+    defaults: [
+        "audio_aidl_conversion_common_util_default",
+    ],
+}
+
+cc_library_headers {
+    name: "libaudio_aidl_conversion_common_util_ndk",
+    defaults: [
+        "audio_aidl_conversion_common_util_default",
+    ],
+    cflags: [
+        "-DBACKEND_NDK",
+    ],
+}
+
+cc_defaults {
+    name: "audio_aidl_conversion_common_default",
+    export_include_dirs: ["include"],
+    host_supported: true,
+    vendor_available: true,
+    double_loadable: true,
+    header_libs: [
+        "libaudio_system_headers",
+        "libhardware_headers",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libshmemcompat",
+        "libstagefright_foundation",
+        "libutils",
+        "shared-file-region-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+    export_shared_lib_headers: [
+        "libbase",
+        "shared-file-region-aidl-cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+    ],
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+/**
+ * Only AIDL CPP backend conversion supported.
+ */
+cc_library {
+    name: "libaudio_aidl_conversion_common_cpp",
+    srcs: [
+        "AidlConversionCppNdk.cpp",
+    ],
+    header_libs: [
+        "libaudio_aidl_conversion_common_util_cpp",
+    ],
+    export_header_lib_headers: [
+        "libaudio_aidl_conversion_common_util_cpp",
+    ],
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "latest_android_media_audio_common_types_cpp_export_shared",
+    ],
+    min_sdk_version: "29",
+}
+
+/**
+ * Only AIDL NDK backend conversion supported.
+ */
+cc_library {
+    name: "libaudio_aidl_conversion_common_ndk",
+    srcs: [
+        "AidlConversionCppNdk.cpp",
+        "AidlConversionNdk.cpp",
+    ],
+    header_libs: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    export_header_lib_headers: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "latest_android_hardware_audio_common_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    shared_libs: [
+        "libbinder_ndk",
+        "libbase",
+    ],
+    static_libs: [
+        "libaudioaidlcommon",
+    ],
+    cflags: [
+        "-DBACKEND_NDK",
+    ],
+    min_sdk_version: "31", //AParcelableHolder has been introduced in 31
+}
+
+/**
+ * Only including AIDL core HAL conversion.
+ */
+cc_library {
+    name: "libaudio_aidl_conversion_core_ndk",
+    srcs: [
+        "AidlConversionCore.cpp",
+    ],
+    header_libs: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    export_header_lib_headers: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "latest_android_hardware_audio_common_ndk_shared",
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    shared_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+        "libbinder_ndk",
+        "libbase",
+    ],
+    cflags: [
+        "-DBACKEND_NDK",
+    ],
+    min_sdk_version: "31", //AParcelableHolder has been introduced in 31
+}
+
+/**
+ * Only including AIDL effect HAL conversion.
+ */
+cc_library {
+    name: "libaudio_aidl_conversion_effect_ndk",
+    srcs: [
+        "AidlConversionEffect.cpp",
+    ],
+    header_libs: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    export_header_lib_headers: [
+        "libaudio_aidl_conversion_common_util_ndk",
+    ],
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "latest_android_hardware_audio_common_ndk_shared",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    shared_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+        "libbinder_ndk",
+        "libbase",
+    ],
+    cflags: [
+        "-DBACKEND_NDK",
+    ],
+    min_sdk_version: "31", //AParcelableHolder has been introduced in 31
+}
+
+/**
+ * Conversions between the NDK and CPP backends for common types.
+ */
+cc_library {
+    name: "libaudio_aidl_conversion_common_ndk_cpp",
+    srcs: [
+        "AidlConversionNdkCpp.cpp",
+    ],
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "audio_aidl_conversion_common_util_default",
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    shared_libs: [
+        "libbinder_ndk",
+        "libbase",
+    ],
+    cflags: [
+        "-DBACKEND_CPP_NDK",
+    ],
+    min_sdk_version: "33", //AParcel_unmarshal has been introduced in 33
+}
diff --git a/media/audioaidlconversion/TEST_MAPPING b/media/audioaidlconversion/TEST_MAPPING
new file mode 100644
index 0000000..903b88a
--- /dev/null
+++ b/media/audioaidlconversion/TEST_MAPPING
@@ -0,0 +1,8 @@
+{
+  "presubmit": [
+    {
+      "name": "audio_aidl_ndk_conversion_tests",
+      "name": "audio_aidl_ndk_cpp_conversion_tests"
+    }
+  ]
+}
diff --git a/media/audioaidlconversion/include/media/AidlConversionCore.h b/media/audioaidlconversion/include/media/AidlConversionCore.h
new file mode 100644
index 0000000..aaa2e53
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionCore.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * Can only handle conversion between AIDL (NDK backend) and legacy type.
+ */
+#include <aidl/android/hardware/audio/core/IStreamIn.h>
+#include <media/AidlConversionUtil.h>
+#include <system/audio.h>
+
+namespace aidl {
+namespace android {
+
+ConversionResult<audio_microphone_direction_t>
+aidl2legacy_MicrophoneDirection_audio_microphone_direction_t(
+        hardware::audio::core::IStreamIn::MicrophoneDirection aidl);
+ConversionResult<hardware::audio::core::IStreamIn::MicrophoneDirection>
+legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(audio_microphone_direction_t legacy);
+
+}  // namespace android
+}  // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
new file mode 100644
index 0000000..bc9d4d5
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
@@ -0,0 +1,443 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// WARNING: This file is intended for multiple inclusion.
+// Do not include directly, use 'AidlConversionCppNdk.h'.
+#if (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK)) || \
+    (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP))
+#if defined(BACKEND_NDK_IMPL)
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK
+#else
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP
+#endif  // BACKEND_NDK_IMPL
+
+#include <limits>
+#include <type_traits>
+
+/**
+ * Can handle conversion between AIDL (both CPP and NDK backend) and legacy type.
+ * Controlled by the cflags preprocessor in Android.bp.
+ */
+#if defined(BACKEND_NDK_IMPL)
+#define PREFIX(f) <aidl/f>
+#else
+#define PREFIX(f) <f>
+#endif
+
+#include PREFIX(android/media/audio/common/AudioChannelLayout.h)
+#include PREFIX(android/media/audio/common/AudioConfig.h)
+#include PREFIX(android/media/audio/common/AudioConfigBase.h)
+#include PREFIX(android/media/audio/common/AudioContentType.h)
+#include PREFIX(android/media/audio/common/AudioDeviceDescription.h)
+#include PREFIX(android/media/audio/common/AudioDualMonoMode.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationMetadataType.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationMode.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationType.h)
+#include PREFIX(android/media/audio/common/AudioFormatDescription.h)
+#include PREFIX(android/media/audio/common/AudioGain.h)
+#include PREFIX(android/media/audio/common/AudioGainConfig.h)
+#include PREFIX(android/media/audio/common/AudioGainMode.h)
+#include PREFIX(android/media/audio/common/AudioInputFlags.h)
+#include PREFIX(android/media/audio/common/AudioIoFlags.h)
+#include PREFIX(android/media/audio/common/AudioLatencyMode.h)
+#include PREFIX(android/media/audio/common/AudioMode.h)
+#include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
+#include PREFIX(android/media/audio/common/AudioOutputFlags.h)
+#include PREFIX(android/media/audio/common/AudioPort.h)
+#include PREFIX(android/media/audio/common/AudioPortConfig.h)
+#include PREFIX(android/media/audio/common/AudioPortExt.h)
+#include PREFIX(android/media/audio/common/AudioPortMixExt.h)
+#include PREFIX(android/media/audio/common/AudioPlaybackRate.h)
+#include PREFIX(android/media/audio/common/AudioProfile.h)
+#include PREFIX(android/media/audio/common/AudioSource.h)
+#include PREFIX(android/media/audio/common/AudioStandard.h)
+#include PREFIX(android/media/audio/common/AudioUsage.h)
+#include PREFIX(android/media/audio/common/AudioUuid.h)
+#include PREFIX(android/media/audio/common/ExtraAudioDescriptor.h)
+#include PREFIX(android/media/audio/common/Int.h)
+#include PREFIX(android/media/audio/common/MicrophoneDynamicInfo.h)
+#include PREFIX(android/media/audio/common/MicrophoneInfo.h)
+#undef PREFIX
+
+#include <system/audio.h>
+#include <system/audio_effect.h>
+
+#if defined(BACKEND_NDK_IMPL)
+namespace aidl {
+#endif
+
+namespace android {
+
+// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
+// the string.
+::android::status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
+
+ConversionResult<::android::String8> aidl2legacy_string_view_String8(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String8_string(const ::android::String8& legacy);
+
+ConversionResult<::android::String16> aidl2legacy_string_view_String16(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String16_string(const ::android::String16& legacy);
+
+ConversionResult<std::optional<::android::String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<::android::String16> legacy);
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+        const media::audio::common::AudioChannelLayout& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioChannelLayout>
+legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
+
+audio_channel_mask_t aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+        int aidlLayout, bool isInput);
+int legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
+        audio_channel_mask_t legacy, bool isInput);
+
+enum class AudioPortDirection {
+    INPUT, OUTPUT
+};
+ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type);
+ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type);
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput);
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(
+        const media::audio::common::AudioConfigBase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput);
+
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+        audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+        audio_output_flags_t legacy);
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+        const media::audio::common::AudioIoFlags& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, bool isInput);
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(
+        media::audio::common::AudioContentType aidl);
+ConversionResult<media::audio::common::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
+
+ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
+        const media::audio::common::AudioDeviceDescription& aidl);
+ConversionResult<media::audio::common::AudioDeviceDescription>
+legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
+
+media::audio::common::AudioDeviceAddress::Tag suggestDeviceAddressTag(
+        const media::audio::common::AudioDeviceDescription& description);
+
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+        char* legacyAddress);
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+        ::android::String8* legacyAddress);
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+        std::string* legacyAddress);
+
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const char* legacyAddress);
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const ::android::String8& legacyAddress);
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const std::string& legacyAddress);
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+        const media::audio::common::ExtraAudioDescriptor& aidl);
+
+ConversionResult<media::audio::common::ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+        const audio_extra_audio_descriptor& legacy);
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+        media::audio::common::AudioEncapsulationMetadataType aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+        audio_encapsulation_metadata_type_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(
+        media::audio::common::AudioEncapsulationMode aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+        const media::audio::common::AudioEncapsulationType& aidl);
+ConversionResult<media::audio::common::AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+        const audio_encapsulation_type_t& legacy);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
+        const media::audio::common::AudioFormatDescription& aidl);
+ConversionResult<media::audio::common::AudioFormatDescription>
+legacy2aidl_audio_format_t_AudioFormatDescription(audio_format_t legacy);
+
+ConversionResult<audio_gain_mode_t>
+aidl2legacy_AudioGainMode_audio_gain_mode_t(media::audio::common::AudioGainMode aidl);
+ConversionResult<media::audio::common::AudioGainMode>
+legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+        const media::audio::common::AudioGainConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGainConfig>
+legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const media::audio::common::AudioGain& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput);
+
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(media::audio::common::AudioMode aidl);
+ConversionResult<media::audio::common::AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(
+        const media::audio::common::AudioOffloadInfo& aidl);
+ConversionResult<media::audio::common::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
+
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+
+ConversionResult<audio_stream_type_t>
+aidl2legacy_AudioStreamType_audio_stream_type_t(media::audio::common::AudioStreamType aidl);
+ConversionResult<media::audio::common::AudioStreamType>
+legacy2aidl_audio_stream_type_t_AudioStreamType(audio_stream_type_t legacy);
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+ConversionResult<audio_port_config_mix_ext_usecase>
+aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+        const media::audio::common::AudioPortMixExtUseCase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPortMixExtUseCase>
+legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+        const audio_port_config_mix_ext_usecase& legacy, bool isInput);
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+        const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+        legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+        const audio_port_config_device_ext& legacy);
+
+::android::status_t aidl2legacy_AudioPortConfig_audio_port_config(
+        const media::audio::common::AudioPortConfig& aidl, bool isInput,
+        audio_port_config* legacy, int32_t* portId);
+ConversionResult<media::audio::common::AudioPortConfig>
+legacy2aidl_audio_port_config_AudioPortConfig(
+        const audio_port_config& legacy, bool isInput, int32_t portId);
+
+ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+        const media::audio::common::AudioPortMixExt& aidl);
+ConversionResult<media::audio::common::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+        const audio_port_mix_ext& legacy);
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+        const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+        const audio_port_device_ext& legacy);
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(
+        const media::audio::common::AudioPort& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput);
+
+ConversionResult<audio_profile> aidl2legacy_AudioProfile_audio_profile(
+        const media::audio::common::AudioProfile& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioProfile> legacy2aidl_audio_profile_AudioProfile(
+        const audio_profile& legacy, bool isInput);
+
+ConversionResult<audio_standard_t> aidl2legacy_AudioStandard_audio_standard_t(
+        media::audio::common::AudioStandard aidl);
+ConversionResult<media::audio::common::AudioStandard> legacy2aidl_audio_standard_t_AudioStandard(
+        audio_standard_t legacy);
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
+        media::audio::common::AudioSource aidl);
+ConversionResult<media::audio::common::AudioSource> legacy2aidl_audio_source_t_AudioSource(
+        audio_source_t legacy);
+
+ConversionResult<audio_usage_t> aidl2legacy_AudioUsage_audio_usage_t(
+        media::audio::common::AudioUsage aidl);
+ConversionResult<media::audio::common::AudioUsage> legacy2aidl_audio_usage_t_AudioUsage(
+        audio_usage_t legacy);
+
+ConversionResult<audio_uuid_t> aidl2legacy_AudioUuid_audio_uuid_t(
+        const media::audio::common::AudioUuid &aidl);
+ConversionResult<media::audio::common::AudioUuid> legacy2aidl_audio_uuid_t_AudioUuid(
+        const audio_uuid_t& legacy);
+
+ConversionResult<audio_dual_mono_mode_t>
+aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::audio::common::AudioDualMonoMode aidl);
+ConversionResult<media::audio::common::AudioDualMonoMode>
+legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy);
+
+ConversionResult<audio_timestretch_fallback_mode_t>
+aidl2legacy_TimestretchFallbackMode_audio_timestretch_fallback_mode_t(
+        media::audio::common::AudioPlaybackRate::TimestretchFallbackMode aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchFallbackMode>
+legacy2aidl_audio_timestretch_fallback_mode_t_TimestretchFallbackMode(
+        audio_timestretch_fallback_mode_t legacy);
+
+ConversionResult<audio_timestretch_stretch_mode_t>
+aidl2legacy_TimestretchMode_audio_timestretch_stretch_mode_t(
+        media::audio::common::AudioPlaybackRate::TimestretchMode aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchMode>
+legacy2aidl_audio_timestretch_stretch_mode_t_TimestretchMode(
+        audio_timestretch_stretch_mode_t legacy);
+
+ConversionResult<audio_playback_rate_t>
+aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(
+        const media::audio::common::AudioPlaybackRate& aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate>
+legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
+
+ConversionResult<audio_latency_mode_t>
+aidl2legacy_AudioLatencyMode_audio_latency_mode_t(media::audio::common::AudioLatencyMode aidl);
+ConversionResult<media::audio::common::AudioLatencyMode>
+legacy2aidl_audio_latency_mode_t_AudioLatencyMode(audio_latency_mode_t legacy);
+
+ConversionResult<audio_microphone_location_t>
+aidl2legacy_MicrophoneInfoLocation_audio_microphone_location_t(
+        media::audio::common::MicrophoneInfo::Location aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Location>
+legacy2aidl_audio_microphone_location_t_MicrophoneInfoLocation(audio_microphone_location_t legacy);
+
+ConversionResult<audio_microphone_group_t> aidl2legacy_int32_t_audio_microphone_group_t(
+        int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_microphone_group_t_int32_t(
+        audio_microphone_group_t legacy);
+
+ConversionResult<audio_microphone_directionality_t>
+aidl2legacy_MicrophoneInfoDirectionality_audio_microphone_directionality_t(
+        media::audio::common::MicrophoneInfo::Directionality aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Directionality>
+legacy2aidl_audio_microphone_directionality_t_MicrophoneInfoDirectionality(
+        audio_microphone_directionality_t legacy);
+
+ConversionResult<audio_microphone_coordinate>
+aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
+        const media::audio::common::MicrophoneInfo::Coordinate& aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Coordinate>
+legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
+        const audio_microphone_coordinate& legacy);
+
+ConversionResult<audio_microphone_channel_mapping_t>
+aidl2legacy_MicrophoneDynamicInfoChannelMapping_audio_microphone_channel_mapping_t(
+        media::audio::common::MicrophoneDynamicInfo::ChannelMapping aidl);
+ConversionResult<media::audio::common::MicrophoneDynamicInfo::ChannelMapping>
+legacy2aidl_audio_microphone_channel_mapping_t_MicrophoneDynamicInfoChannelMapping(
+        audio_microphone_channel_mapping_t legacy);
+
+ConversionResult<audio_microphone_characteristic_t>
+aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+        const media::audio::common::MicrophoneInfo& aidlInfo,
+        const media::audio::common::MicrophoneDynamicInfo& aidlDynamic);
+::android::status_t
+legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfos(
+        const audio_microphone_characteristic_t& legacy,
+        media::audio::common::MicrophoneInfo* aidlInfo,
+        media::audio::common::MicrophoneDynamicInfo* aidlDynamic);
+
+}  // namespace android
+
+#if defined(BACKEND_NDK_IMPL)
+} // aidl
+#endif
+
+// (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK)) || \
+// (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP))
+#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
new file mode 100644
index 0000000..ea168a4
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+// Since conversion functions use ConversionResult, pull it in here.
+#include <media/AidlConversionUtil.h>
+
+// Include 'AidlConversionCppNdk.h' once if 'BACKEND_NDK' is defined,
+// or no 'BACKEND_*' is defined (C++ backend). Include twice if
+// 'BACKEND_CPP_NDK' is defined: once with 'BACKEND_NDK_IMPL', once w/o defines.
+
+#if defined(BACKEND_CPP_NDK) || defined(BACKEND_NDK)
+#define BACKEND_NDK_IMPL
+#include <media/AidlConversionCppNdk-impl.h>
+#undef BACKEND_NDK_IMPL
+#endif
+
+#if defined(BACKEND_CPP_NDK) || !defined(BACKEND_NDK)
+#include <media/AidlConversionCppNdk-impl.h>
+#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
new file mode 100644
index 0000000..5e245a7
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+/**
+ * Can only handle conversion between AIDL (NDK backend) and legacy type.
+ */
+#include <hardware/audio_effect.h>
+#include <media/AidlConversionUtil.h>
+#include <system/audio_effect.h>
+#include <system/audio_effects/audio_effects_utils.h>
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+
+namespace aidl {
+namespace android {
+
+template <typename P, typename T, typename P::Specific::Tag tag>
+ConversionResult<T> getParameterSpecific(const P& u) {
+    const auto& spec = VALUE_OR_RETURN(UNION_GET(u, specific));
+    return unionGetField<typename P::Specific, tag>(spec);
+}
+
+template <typename P, typename T, typename P::Specific::Tag tag, typename T::Tag field, typename F>
+ConversionResult<F> getParameterSpecificField(const P& u) {
+    const auto& spec =
+            VALUE_OR_RETURN((getParameterSpecific<std::decay_t<decltype(u)>, T, tag>(u)));
+    return VALUE_OR_RETURN((unionGetField<T, field>(spec)));
+}
+
+#define GET_PARAMETER_SPECIFIC_FIELD(_u, _effect, _tag, _field, _fieldType)                      \
+    getParameterSpecificField<std::decay_t<decltype(_u)>, _effect,                               \
+                              aidl::android::hardware::audio::effect::Parameter::Specific::_tag, \
+                              _effect::_field, _fieldType>(_u)
+
+#define MAKE_SPECIFIC_PARAMETER(_spec, _tag, _field, _value)                                 \
+    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter, specific,                  \
+               UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Specific, _tag, \
+                          UNION_MAKE(_spec, _field, _value)))
+
+#define MAKE_SPECIFIC_PARAMETER_ID(_spec, _tag, _field)                     \
+    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, _tag, \
+               UNION_MAKE(_spec::Id, commonTag, _field))
+
+#define MAKE_EXTENSION_PARAMETER_ID(_effect, _tag, _field)                  \
+    UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, _tag, \
+               UNION_MAKE(_effect::Id, vendorExtensionTag, _field))
+
+#define VENDOR_EXTENSION_GET_AND_RETURN(_effect, _tag, _param)                                    \
+    {                                                                                             \
+        aidl::android::hardware::audio::effect::VendorExtension _extId = VALUE_OR_RETURN_STATUS(  \
+                aidl::android::legacy2aidl_EffectParameterReader_Param_VendorExtension(_param));  \
+        aidl::android::hardware::audio::effect::Parameter::Id _id =                               \
+                MAKE_EXTENSION_PARAMETER_ID(_effect, _tag##Tag, _extId);                          \
+        aidl::android::hardware::audio::effect::Parameter _aidlParam;                             \
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(_id, &_aidlParam))); \
+        aidl::android::hardware::audio::effect::VendorExtension _ext =                            \
+                VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(                              \
+                        _aidlParam, _effect, _tag, _effect::vendor, VendorExtension));            \
+        return VALUE_OR_RETURN_STATUS(                                                            \
+                aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(_aidlParam,   \
+                                                                                    _param));     \
+    }
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(
+        ::aidl::android::hardware::audio::effect::Flags::Type type);
+ConversionResult<uint32_t> aidl2legacy_Flags_Insert_uint32(
+        ::aidl::android::hardware::audio::effect::Flags::Insert insert);
+ConversionResult<uint32_t> aidl2legacy_Flags_Volume_uint32(
+        ::aidl::android::hardware::audio::effect::Flags::Volume volume);
+ConversionResult<uint32_t> aidl2legacy_Flags_HardwareAccelerator_uint32(
+        ::aidl::android::hardware::audio::effect::Flags::HardwareAccelerator hwAcceleratorMode);
+ConversionResult<uint32_t> aidl2legacy_Flags_uint32(
+        const ::aidl::android::hardware::audio::effect::Flags aidl);
+
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::Type>
+legacy2aidl_uint32_Flags_Type(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::Insert>
+legacy2aidl_uint32_Flags_Insert(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::Volume>
+legacy2aidl_uint32_Flags_Volume(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::HardwareAccelerator>
+legacy2aidl_uint32_Flags_HardwareAccelerator(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags> legacy2aidl_uint32_Flags(
+        uint32_t hal);
+
+ConversionResult<effect_descriptor_t> aidl2legacy_Descriptor_effect_descriptor(
+        const ::aidl::android::hardware::audio::effect::Descriptor& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Descriptor>
+legacy2aidl_effect_descriptor_Descriptor(const effect_descriptor_t& hal);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_echoDelay(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_echoDelay_Parameter_aec(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_mobileMode(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_mobileMode_Parameter_aec(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_fixedDigitalGain(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_fixedDigitalGain_Parameter_agc(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_levelEstimator(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_levelEstimator_Parameter_agc(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_saturationMargin(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_saturationMargin_Parameter_agc(uint32_t legacy);
+
+ConversionResult<uint16_t> aidl2legacy_Parameter_BassBoost_uint16_strengthPm(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint16_strengthPm_Parameter_BassBoost(uint16_t legacy);
+
+ConversionResult<int16_t> aidl2legacy_Parameter_Downmix_int16_type(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_int16_type_Parameter_Downmix(int16_t legacy);
+
+ConversionResult<::aidl::android::hardware::audio::effect::DynamicsProcessing::ResolutionPreference>
+legacy2aidl_int32_DynamicsProcessing_ResolutionPreference(int32_t legacy);
+ConversionResult<int32_t> aidl2legacy_DynamicsProcessing_ResolutionPreference_int32(
+        ::aidl::android::hardware::audio::effect::DynamicsProcessing::ResolutionPreference aidl);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_ScalingMode_uint32(
+        ::aidl::android::hardware::audio::effect::Visualizer::ScalingMode aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Visualizer::ScalingMode>
+legacy2aidl_Parameter_Visualizer_uint32_ScalingMode(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_Visualizer_MeasurementMode_uint32(
+        ::aidl::android::hardware::audio::effect::Visualizer::MeasurementMode aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Visualizer::MeasurementMode>
+legacy2aidl_Parameter_Visualizer_uint32_MeasurementMode(uint32_t legacy);
+
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_EffectParameterReader_ParameterExtension(
+        ::android::effect::utils::EffectParamReader& param);
+ConversionResult<::android::status_t> aidl2legacy_ParameterExtension_EffectParameterWriter(
+        const ::aidl::android::hardware::audio::effect::Parameter& aidl,
+        ::android::effect::utils::EffectParamWriter& legacy);
+
+ConversionResult<::aidl::android::hardware::audio::effect::VendorExtension>
+legacy2aidl_EffectParameterReader_Param_VendorExtension(
+        ::android::effect::utils::EffectParamReader& param);
+ConversionResult<::aidl::android::hardware::audio::effect::VendorExtension>
+legacy2aidl_EffectParameterReader_Data_VendorExtension(
+        ::android::effect::utils::EffectParamReader& param);
+
+ConversionResult<::android::status_t> aidl2legacy_VendorExtension_EffectParameterWriter_Data(
+        ::android::effect::utils::EffectParamWriter& param,
+        ::aidl::android::hardware::audio::effect::VendorExtension ext);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_EffectParameterReader_ParameterExtension(
+        ::android::effect::utils::EffectParamReader& param);
+
+}  // namespace android
+}  // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
new file mode 100644
index 0000000..813a728
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * Can only handle conversion between AIDL (NDK backend) and legacy types.
+ */
+
+#include <string>
+#include <vector>
+
+#include <hardware/audio_effect.h>
+#include <system/audio_effect.h>
+
+#include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
+#include <aidl/android/media/audio/common/AudioConfig.h>
+#include <media/AidlConversionUtil.h>
+
+namespace aidl {
+namespace android {
+
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+        const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig> legacy2aidl_buffer_config_t_AudioConfig(
+        const buffer_config_t& legacy, bool isInput);
+
+::android::status_t aidl2legacy_AudioAttributesTags(
+        const std::vector<std::string>& aidl, char* legacy);
+ConversionResult<std::vector<std::string>> legacy2aidl_AudioAttributesTags(const char* legacy);
+
+ConversionResult<playback_track_metadata_v7>
+aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(
+        const hardware::audio::common::PlaybackTrackMetadata& aidl);
+ConversionResult<hardware::audio::common::PlaybackTrackMetadata>
+legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(
+        const playback_track_metadata_v7& legacy);
+
+ConversionResult<record_track_metadata_v7>
+aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(
+        const hardware::audio::common::RecordTrackMetadata& aidl);
+ConversionResult<hardware::audio::common::RecordTrackMetadata>
+legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
+
+}  // namespace android
+}  // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h b/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h
new file mode 100644
index 0000000..f4822aa
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionNdkCpp.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * Conversions between the NDK and CPP backends for common types.
+ */
+#include <aidl/android/media/audio/common/AudioFormatDescription.h>
+#include <aidl/android/media/audio/common/AudioHalEngineConfig.h>
+#include <aidl/android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <aidl/android/media/audio/common/AudioMMapPolicyType.h>
+#include <aidl/android/media/audio/common/AudioMode.h>
+#include <aidl/android/media/audio/common/AudioPort.h>
+#include <android/media/audio/common/AudioFormatDescription.h>
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioMode.h>
+#include <android/media/audio/common/AudioPort.h>
+#include <media/AidlConversionUtil.h>
+
+namespace android {
+
+#define DECLARE_CONVERTERS(packageName, className)                       \
+    ConversionResult<::aidl::packageName::className>                    \
+    cpp2ndk_##className(const ::packageName::className& cpp);           \
+    ConversionResult<::packageName::className>                          \
+    ndk2cpp_##className(const ::aidl::packageName::className& ndk);
+
+DECLARE_CONVERTERS(android::media::audio::common, AudioFormatDescription);
+DECLARE_CONVERTERS(android::media::audio::common, AudioHalEngineConfig);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMMapPolicyInfo);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMMapPolicyType);
+DECLARE_CONVERTERS(android::media::audio::common, AudioMode);
+DECLARE_CONVERTERS(android::media::audio::common, AudioPort);
+
+#undef DECLARE_CONVERTERS
+
+}  // namespace android
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
similarity index 68%
rename from media/libaudioclient/include/media/AidlConversionUtil.h
rename to media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
index 8817c35..f49f681 100644
--- a/media/libaudioclient/include/media/AidlConversionUtil.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,20 +14,41 @@
  * limitations under the License.
  */
 
-#pragma once
+// WARNING: This file is intended for multiple inclusion, one time
+// with BACKEND_NDK_IMPL defined, one time without it.
+// Do not include directly, use 'AidlConversionUtil.h'.
+#if (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK)) || \
+    (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP))
+#if defined(BACKEND_NDK_IMPL)
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK
+#else
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP
+#endif  // BACKEND_NDK_IMPL
 
 #include <limits>
 #include <type_traits>
 #include <utility>
 
-#include <binder/Enums.h>
+#include <android-base/expected.h>
 #include <binder/Status.h>
-#include <error/Result.h>
 
+#if defined(BACKEND_NDK_IMPL)
+#include <android/binder_auto_utils.h>
+#include <android/binder_enums.h>
+#include <android/binder_status.h>
+
+namespace aidl {
+#else
+#include <binder/Enums.h>
+#endif  // BACKEND_NDK_IMPL
 namespace android {
 
+#if defined(BACKEND_NDK_IMPL)
+// This adds `::aidl::android::ConversionResult` for convenience.
+// Otherwise, it would be required to write `::android::ConversionResult` everywhere.
 template <typename T>
-using ConversionResult = error::Result<T>;
+using ConversionResult = ::android::ConversionResult<T>;
+#endif  // BACKEND_NDK_IMPL
 
 /**
  * A generic template to safely cast between integral types, respecting limits of the destination
@@ -37,17 +58,17 @@
 ConversionResult<To> convertIntegral(From from) {
     // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
     // have the signed converted to unsigned and produce wrong results.
-    if (std::is_signed_v<From> && !std::is_signed_v<To>) {
+    if constexpr (std::is_signed_v<From> && !std::is_signed_v<To>) {
         if (from < 0 || from > std::numeric_limits<To>::max()) {
-            return base::unexpected(BAD_VALUE);
+            return ::android::base::unexpected(::android::BAD_VALUE);
         }
-    } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
+    } else if constexpr (std::is_signed_v<To> && !std::is_signed_v<From>) {
         if (from > std::numeric_limits<To>::max()) {
-            return base::unexpected(BAD_VALUE);
+            return ::android::base::unexpected(::android::BAD_VALUE);
         }
-    } else {
+    } else /* constexpr */ {
         if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
-            return base::unexpected(BAD_VALUE);
+            return ::android::base::unexpected(::android::BAD_VALUE);
         }
     }
     return static_cast<To>(from);
@@ -67,14 +88,14 @@
  * A generic template that helps convert containers of convertible types, using iterators.
  */
 template<typename InputIterator, typename OutputIterator, typename Func>
-status_t convertRange(InputIterator start,
+::android::status_t convertRange(InputIterator start,
                       InputIterator end,
                       OutputIterator out,
                       const Func& itemConversion) {
     for (InputIterator iter = start; iter != end; ++iter, ++out) {
         *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
     }
-    return OK;
+    return ::android::OK;
 }
 
 /**
@@ -82,7 +103,7 @@
  * Uses a limit as maximum conversion items.
  */
 template<typename InputIterator, typename OutputIterator, typename Func>
-status_t convertRangeWithLimit(InputIterator start,
+::android::status_t convertRangeWithLimit(InputIterator start,
                       InputIterator end,
                       OutputIterator out,
                       const Func& itemConversion,
@@ -94,7 +115,7 @@
     for (InputIterator iter = start; (iter != last); ++iter, ++out) {
         *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
     }
-    return OK;
+    return ::android::OK;
 }
 
 /**
@@ -140,7 +161,7 @@
     OutputContainer output;
     auto ins = std::inserter(output, output.begin());
     for (const auto& item1 : input1) {
-        RETURN_IF_ERROR(iter2 != input2.end() ? OK : BAD_VALUE);
+        RETURN_IF_ERROR(iter2 != input2.end() ? ::android::OK : ::android::BAD_VALUE);
         *ins = VALUE_OR_RETURN(itemConversion(item1, *iter2++));
     }
     return output;
@@ -248,7 +269,8 @@
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // Utilities for working with AIDL unions.
 // UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
-//   value of the respective field, or BAD_VALUE if the union is not set to the requested field.
+//   value of the respective field, or ::android::BAD_VALUE if the union is not set to the requested
+//   field.
 // UNION_SET(obj, fieldname, value) sets the requested field to the given value.
 
 template<typename T, typename T::Tag tag>
@@ -257,7 +279,7 @@
 template<typename T, typename T::Tag tag>
 ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
     if (u.getTag() != tag) {
-        return base::unexpected(BAD_VALUE);
+        return ::android::base::unexpected(::android::BAD_VALUE);
     }
     return u.template get<tag>();
 }
@@ -268,6 +290,8 @@
 #define UNION_SET(u, field, value) \
     (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
 
+#define UNION_MAKE(u, field, value) u::make<u::Tag::field>(value)
+
 namespace aidl_utils {
 
 /**
@@ -275,7 +299,11 @@
  */
 template <typename T>
 bool isValidEnum(T value) {
-    constexpr android::enum_range<T> er{};
+#if defined(BACKEND_NDK_IMPL)
+    constexpr ndk::enum_range<T> er{};
+#else
+    constexpr ::android::enum_range<T> er{};
+#endif
     return std::find(er.begin(), er.end(), value) != er.end();
 }
 
@@ -294,7 +322,7 @@
 }
 
 /**
- * Return the equivalent Android status_t from a binder exception code.
+ * Return the equivalent Android ::android::status_t from a binder exception code.
  *
  * Generally one should use statusTFromBinderStatus() instead.
  *
@@ -304,33 +332,33 @@
  * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
  * can be found from transactionError() or serviceSpecificErrorCode().
  */
-static inline status_t statusTFromExceptionCode(int32_t exceptionCode) {
+static inline ::android::status_t statusTFromExceptionCode(int32_t exceptionCode) {
     using namespace ::android::binder;
     switch (exceptionCode) {
         case Status::EX_NONE:
-            return OK;
-        case Status::EX_SECURITY: // Java SecurityException, rethrows locally in Java
-            return PERMISSION_DENIED;
-        case Status::EX_BAD_PARCELABLE: // Java BadParcelableException, rethrows in Java
-        case Status::EX_ILLEGAL_ARGUMENT: // Java IllegalArgumentException, rethrows in Java
-        case Status::EX_NULL_POINTER: // Java NullPointerException, rethrows in Java
-            return BAD_VALUE;
-        case Status::EX_ILLEGAL_STATE: // Java IllegalStateException, rethrows in Java
-        case Status::EX_UNSUPPORTED_OPERATION: // Java UnsupportedOperationException, rethrows
-            return INVALID_OPERATION;
+            return ::android::OK;
+        case Status::EX_SECURITY:  // Java SecurityException, rethrows locally in Java
+            return ::android::PERMISSION_DENIED;
+        case Status::EX_BAD_PARCELABLE:  // Java BadParcelableException, rethrows in Java
+        case Status::EX_ILLEGAL_ARGUMENT:  // Java IllegalArgumentException, rethrows in Java
+        case Status::EX_NULL_POINTER:  // Java NullPointerException, rethrows in Java
+            return ::android::BAD_VALUE;
+        case Status::EX_ILLEGAL_STATE:  // Java IllegalStateException, rethrows in Java
+        case Status::EX_UNSUPPORTED_OPERATION:  // Java UnsupportedOperationException, rethrows
+            return ::android::INVALID_OPERATION;
         case Status::EX_HAS_REPLY_HEADER: // Native strictmode violation
-        case Status::EX_PARCELABLE: // Java bootclass loader (not standard exception), rethrows
-        case Status::EX_NETWORK_MAIN_THREAD: // Java NetworkOnMainThreadException, rethrows
+        case Status::EX_PARCELABLE:  // Java bootclass loader (not standard exception), rethrows
+        case Status::EX_NETWORK_MAIN_THREAD:  // Java NetworkOnMainThreadException, rethrows
         case Status::EX_TRANSACTION_FAILED: // Native - see error code
-        case Status::EX_SERVICE_SPECIFIC:  // Java ServiceSpecificException,
-                                           // rethrows in Java with integer error code
-            return UNKNOWN_ERROR;
+        case Status::EX_SERVICE_SPECIFIC:   // Java ServiceSpecificException,
+                                            // rethrows in Java with integer error code
+            return ::android::UNKNOWN_ERROR;
     }
-    return UNKNOWN_ERROR;
+    return ::android::UNKNOWN_ERROR;
 }
 
 /**
- * Return the equivalent Android status_t from a binder status.
+ * Return the equivalent Android ::android::status_t from a binder status.
  *
  * Used to handle errors from a AIDL method declaration
  *
@@ -340,8 +368,8 @@
  *
  * return_type method(type0 param0, ...)
  */
-static inline status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
-    return status.isOk() ? OK // check OK,
+static inline ::android::status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
+    return status.isOk() ? ::android::OK // check ::android::OK,
         : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
                                             // (fromServiceSpecificError)
         ?: status.transactionError() // a native binder transaction error (fromStatusT)
@@ -349,6 +377,24 @@
                                                     // standard Java exception (fromExceptionCode)
 }
 
+#if defined(BACKEND_NDK_IMPL)
+static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
+    // What we want to do is to 'return statusTFromBinderStatus(status.get()->get())'
+    // However, since the definition of AStatus is not exposed, we have to do the same
+    // via methods of ScopedAStatus:
+    return status.isOk() ? ::android::OK // check ::android::OK,
+        : status.getServiceSpecificError() // service-side error, not standard Java exception
+                                           // (fromServiceSpecificError)
+        ?: status.getStatus() // a native binder transaction error (fromStatusT)
+        ?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
+                                                     // standard Java exception (fromExceptionCode)
+}
+
+static inline ::android::status_t statusTFromBinderStatusT(binder_status_t status) {
+    return statusTFromBinderStatus(::ndk::ScopedAStatus::fromStatus(status));
+}
+#endif
+
 /**
  * Return a binder::Status from native service status.
  *
@@ -356,23 +402,23 @@
  * where Java callers expect an exception, not an integer return value.
  */
 static inline ::android::binder::Status binderStatusFromStatusT(
-        status_t status, const char *optionalMessage = nullptr) {
+        ::android::status_t status, const char *optionalMessage = nullptr) {
     const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
     // From binder::Status instructions:
     //  Prefer a generic exception code when possible, then a service specific
-    //  code, and finally a status_t for low level failures or legacy support.
+    //  code, and finally a ::android::status_t for low level failures or legacy support.
     //  Exception codes and service specific errors map to nicer exceptions for
     //  Java clients.
 
     using namespace ::android::binder;
     switch (status) {
-        case OK:
+        case ::android::OK:
             return Status::ok();
-        case PERMISSION_DENIED: // throw SecurityException on Java side
+        case ::android::PERMISSION_DENIED: // throw SecurityException on Java side
             return Status::fromExceptionCode(Status::EX_SECURITY, emptyIfNull);
-        case BAD_VALUE: // throw IllegalArgumentException on Java side
+        case ::android::BAD_VALUE: // throw IllegalArgumentException on Java side
             return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT, emptyIfNull);
-        case INVALID_OPERATION: // throw IllegalStateException on Java side
+        case ::android::INVALID_OPERATION: // throw IllegalStateException on Java side
             return Status::fromExceptionCode(Status::EX_ILLEGAL_STATE, emptyIfNull);
     }
 
@@ -383,7 +429,14 @@
     return Status::fromServiceSpecificError(status, emptyIfNull);
 }
 
-
 } // namespace aidl_utils
 
 }  // namespace android
+
+#if defined(BACKEND_NDK_IMPL)
+}  // namespace aidl
+#endif
+
+// (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK)) || \
+// (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP))
+#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil.h b/media/audioaidlconversion/include/media/AidlConversionUtil.h
new file mode 100644
index 0000000..b846436
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <error/Result.h>
+
+namespace android {
+// `ConversionResult` is always defined in the `::android` namespace,
+// so that it can be found from any nested namespace.
+// See below for the convenience alias specific to the NDK backend.
+template <typename T>
+using ConversionResult = ::android::error::Result<T>;
+}  // namespace android
+
+// Include 'AidlConversionUtil.h' once if 'BACKEND_NDK' is defined,
+// or no 'BACKEND_*' is defined (C++ backend). Include twice if
+// 'BACKEND_CPP_NDK' is defined: once with 'BACKEND_NDK_IMPL', once w/o defines.
+
+#if defined(BACKEND_CPP_NDK) || defined(BACKEND_NDK)
+#define BACKEND_NDK_IMPL
+#include <media/AidlConversionUtil-impl.h>
+#undef BACKEND_NDK_IMPL
+#endif
+
+#if defined(BACKEND_CPP_NDK) || !defined(BACKEND_NDK)
+#include <media/AidlConversionUtil-impl.h>
+#endif
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
new file mode 100644
index 0000000..88b2cc9
--- /dev/null
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -0,0 +1,70 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "libaudio_aidl_conversion_tests_defaults",
+    test_suites: ["device-tests"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
+cc_test {
+    name: "audio_aidl_ndk_conversion_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_ndk_static",
+        "latest_android_hardware_audio_common_ndk_static",
+        "libaudio_aidl_conversion_tests_defaults",
+    ],
+    srcs: ["audio_aidl_ndk_conversion_tests.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+    ],
+    cflags: [
+        "-DBACKEND_NDK",
+    ],
+}
+
+cc_test {
+    name: "audio_aidl_ndk_cpp_conversion_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_static",
+        "latest_android_media_audio_common_types_ndk_static",
+        "libaudio_aidl_conversion_tests_defaults",
+    ],
+    srcs: ["audio_aidl_ndk_cpp_conversion_tests.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "libaudio_aidl_conversion_common_ndk_cpp",
+    ],
+    cflags: [
+        "-DBACKEND_CPP_NDK",
+    ],
+}
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
new file mode 100644
index 0000000..c505e60
--- /dev/null
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <type_traits>
+
+#include <gtest/gtest.h>
+
+#include <media/AidlConversionNdk.h>
+
+namespace {
+template<typename> struct mf_traits {};
+template<class T, class U> struct mf_traits<U T::*> {
+    using member_type = U;
+};
+}  // namespace
+
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+namespace aidl::android::hardware::audio::common {
+    template <typename P>
+    std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>,
+            std::ostream&> operator<<(std::ostream& os, const P& p) {
+        return os << p.toString();
+    }
+    template <typename E>
+    std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) {
+        return os << toString(e);
+    }
+}  // namespace aidl::android::hardware::audio::common
+
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::media::audio::common::AudioSource;
+using aidl::android::media::audio::common::AudioUsage;
+using namespace aidl::android;   // for conversion functions
+
+TEST(AudioPlaybackTrackMetadata, Aidl2Legacy2Aidl) {
+    const PlaybackTrackMetadata initial{ .usage = AudioUsage::UNKNOWN };
+    auto conv = aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioPlaybackTrackMetadata, NonVendorTags) {
+    PlaybackTrackMetadata initial{ .usage = AudioUsage::UNKNOWN };
+    initial.tags.emplace_back("random string");  // Must be filtered out.
+    initial.tags.emplace_back("VX_GOOGLE_42");
+    auto conv = aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_EQ(1, convBack.value().tags.size());
+    EXPECT_EQ(initial.tags[1], convBack.value().tags[0]);
+}
+
+TEST(AudioRecordTrackMetadata, Aidl2Legacy2Aidl) {
+    const RecordTrackMetadata initial{ .source = AudioSource::DEFAULT };
+    auto conv = aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioRecordTrackMetadata, NonVendorTags) {
+    RecordTrackMetadata initial{ .source = AudioSource::DEFAULT };
+    initial.tags.emplace_back("random string");  // Must be filtered out.
+    initial.tags.emplace_back("VX_GOOGLE_42");
+    auto conv = aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_EQ(1, convBack.value().tags.size());
+    EXPECT_EQ(initial.tags[1], convBack.value().tags[0]);
+}
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp
new file mode 100644
index 0000000..735a14b
--- /dev/null
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_cpp_conversion_tests.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <type_traits>
+
+#include <gtest/gtest.h>
+
+#include <media/AidlConversionNdkCpp.h>
+
+namespace {
+template<typename> struct mf_traits {};
+template<class T, class U> struct mf_traits<U T::*> {
+    using member_type = U;
+};
+}  // namespace
+
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+#define DEFINE_PRINTING_TEMPLATES()
+    template <typename P>                                                                         \
+    std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>, \
+            std::ostream&> operator<<(std::ostream& os, const P& p) {                             \
+        return os << p.toString();                                                                \
+    }                                                                                             \
+    template <typename E>                                                                         \
+    std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) { \
+        return os << toString(e);                                                                 \
+    }
+
+namespace aidl::android::media::audio::common {
+DEFINE_PRINTING_TEMPLATES();
+}  // namespace aidl::android::media::audio::common
+namespace android::hardware::audio::common {
+DEFINE_PRINTING_TEMPLATES();
+}  // namespace android::hardware::audio::common
+#undef DEFINE_PRINTING_TEMPLATES
+
+using namespace android;
+
+namespace {
+
+using namespace ::aidl::android::media::audio::common;
+
+AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
+    AudioFormatDescription result;
+    result.type = type;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
+    auto result = make_AudioFormatDescription(AudioFormatType::PCM);
+    result.pcm = pcm;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
+    AudioFormatDescription result;
+    result.encoding = encoding;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType transport, const std::string& encoding) {
+    auto result = make_AudioFormatDescription(encoding);
+    result.pcm = transport;
+    return result;
+}
+
+AudioFormatDescription make_AFD_Default() {
+    return AudioFormatDescription{};
+}
+
+AudioFormatDescription make_AFD_Invalid() {
+    return make_AudioFormatDescription(AudioFormatType::SYS_RESERVED_INVALID);
+}
+
+AudioFormatDescription make_AFD_Pcm16Bit() {
+    return make_AudioFormatDescription(PcmType::INT_16_BIT);
+}
+
+AudioFormatDescription make_AFD_Bitstream() {
+    return make_AudioFormatDescription("example");
+}
+
+AudioFormatDescription make_AFD_Encap() {
+    return make_AudioFormatDescription(PcmType::INT_16_BIT, "example.encap");
+}
+
+AudioFormatDescription make_AFD_Encap_with_Enc() {
+    auto afd = make_AFD_Encap();
+    afd.encoding += "+example";
+    return afd;
+}
+
+}  // namespace
+
+// There is no reason to write test for every type which gets converted via parcelable
+// since the conversion code is all the same.
+
+class AudioFormatDescriptionRoundTripTest :
+        public testing::TestWithParam<::aidl::android::media::audio::common::AudioFormatDescription>
+{
+};
+TEST_P(AudioFormatDescriptionRoundTripTest, Ndk2Cpp2Ndk) {
+    const auto& initial = GetParam();
+    auto conv = ndk2cpp_AudioFormatDescription(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = cpp2ndk_AudioFormatDescription(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioFormatDescriptionRoundTrip, AudioFormatDescriptionRoundTripTest,
+        testing::Values(make_AFD_Invalid(), make_AFD_Default(), make_AFD_Pcm16Bit(),
+                make_AFD_Bitstream(), make_AFD_Encap(), make_AFD_Encap_with_Enc()));
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index c4a6601..d62dd91 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -7,11 +7,10 @@
     ioprio rt 4
     task_profiles ProcessCapacityHigh HighPerformance
     onrestart restart vendor.audio-hal
+    onrestart restart vendor.audio-hal-aidl
+    onrestart restart vendor.audio-effect-hal-aidl
     onrestart restart vendor.audio-hal-4-0-msd
     onrestart restart audio_proxy_service
-    # Keep the original service names for backward compatibility
-    onrestart restart vendor.audio-hal-2-0
-    onrestart restart audio-hal-2-0
 
 on property:vts.native_server.on=1
     stop audioserver
@@ -20,42 +19,39 @@
 
 on property:init.svc.audioserver=stopped
     stop vendor.audio-hal
+    stop vendor.audio-hal-aidl
+    stop vendor.audio-effect-hal-aidl
     stop vendor.audio-hal-4-0-msd
     stop audio_proxy_service
-    # Keep the original service names for backward compatibility
-    stop vendor.audio-hal-2-0
-    stop audio-hal-2-0
     # See b/155364397. Need to have HAL service running for VTS.
     # Can't use 'restart' because then HAL service would restart
     # audioserver bringing it back into running state.
     start vendor.audio-hal
+    start vendor.audio-hal-aidl
+    start vendor.audio-effect-hal-aidl
     start vendor.audio-hal-4-0-msd
     start audio_proxy_service
-    # Keep the original service names for backward compatibility
-    start vendor.audio-hal-2-0
-    start audio-hal-2-0
 
 on property:init.svc.audioserver=running
     start vendor.audio-hal
+    start vendor.audio-hal-aidl
+    start vendor.audio-effect-hal-aidl
     start vendor.audio-hal-4-0-msd
     start audio_proxy_service
-    # Keep the original service names for backward compatibility
-    start vendor.audio-hal-2-0
-    start audio-hal-2-0
 
 on property:sys.audio.restart.hal=1
     # See b/159966243. Avoid restart loop between audioserver and HAL.
     # Keep the original service names for backward compatibility
     stop vendor.audio-hal
+    stop vendor.audio-hal-aidl
+    stop vendor.audio-effect-hal-aidl
     stop vendor.audio-hal-4-0-msd
     stop audio_proxy_service
-    stop vendor.audio-hal-2-0
-    stop audio-hal-2-0
     start vendor.audio-hal
+    start vendor.audio-hal-aidl
+    start vendor.audio-effect-hal-aidl
     start vendor.audio-hal-4-0-msd
     start audio_proxy_service
-    start vendor.audio-hal-2-0
-    start audio-hal-2-0
     # reset the property
     setprop sys.audio.restart.hal 0
 
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index e3db5b4..1e3bfe0 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -50,6 +50,8 @@
 
 int main(int argc __unused, char **argv)
 {
+    ALOGD("%s: starting", __func__);
+    const auto startTime = std::chrono::steady_clock::now();
     // TODO: update with refined parameters
     limitProcessMemory(
         "audio.maxmem", /* "ro.audio.maxmem", property that defines limit */
@@ -144,11 +146,36 @@
             setpgid(0, 0);                      // but if I die first, don't kill my parent
         }
         android::hardware::configureRpcThreadpool(4, false /*callerWillJoin*/);
-        sp<ProcessState> proc(ProcessState::self());
+
+        // Ensure threads for possible callbacks.  Note that get_audio_flinger() does
+        // this automatically when called from AudioPolicy, but we do this anyways here.
+        ProcessState::self()->startThreadPool();
+
+        // Instantiating AudioFlinger (making it public, e.g. through ::initialize())
+        // and then instantiating AudioPolicy (and making it public)
+        // leads to situations where AudioFlinger is accessed remotely before
+        // AudioPolicy is initialized.  Not only might this
+        // cause inaccurate results, but if AudioPolicy has slow audio HAL
+        // initialization, it can cause a TimeCheck abort to occur on an AudioFlinger
+        // call which tries to access AudioPolicy.
+        //
+        // We create AudioFlinger and AudioPolicy locally then make it public to ServiceManager.
+        // This requires both AudioFlinger and AudioPolicy to be in-proc.
+        //
+        const auto af = sp<AudioFlinger>::make();
+        const auto afAdapter = sp<AudioFlingerServerAdapter>::make(af);
+        ALOGD("%s: AudioFlinger created", __func__);
+        ALOGW_IF(AudioSystem::setLocalAudioFlinger(af) != OK,
+                "%s: AudioSystem already has an AudioFlinger instance!", __func__);
+        const auto aps = sp<AudioPolicyService>::make();
+        ALOGD("%s: AudioPolicy created", __func__);
+
+        // Add AudioFlinger and AudioPolicy to ServiceManager.
         sp<IServiceManager> sm = defaultServiceManager();
-        ALOGI("ServiceManager: %p", sm.get());
-        AudioFlinger::instantiate();
-        AudioPolicyService::instantiate();
+        sm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME), afAdapter,
+                false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+        sm->addService(String16(AudioPolicyService::getServiceName()), aps,
+                false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
 
         // AAudioService should only be used in OC-MR1 and later.
         // And only enable the AAudioService if the system MMAP policy explicitly allows it.
@@ -156,7 +183,6 @@
         // If we cannot get audio flinger here, there must be some serious problems. In that case,
         // attempting to call audio flinger on a null pointer could make the process crash
         // and attract attentions.
-        sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
         std::vector<AudioMMapPolicyInfo> policyInfos;
         status_t status = af->getMmapPolicyInfos(
                 AudioMMapPolicyType::DEFAULT, &policyInfos);
@@ -169,11 +195,14 @@
             })) {
             AAudioService::instantiate();
         } else {
-            ALOGD("Do not init aaudio service, status %d, policy info size %zu",
-                  status, policyInfos.size());
+            ALOGD("%s: Do not init aaudio service, status %d, policy info size %zu",
+                  __func__, status, policyInfos.size());
         }
-
-        ProcessState::self()->startThreadPool();
+        const auto endTime = std::chrono::steady_clock::now();
+        using FloatMillis = std::chrono::duration<float, std::milli>;
+        const float timeTaken = std::chrono::duration_cast<FloatMillis>(
+                endTime - startTime).count();
+        ALOGI("%s: initialization done in %.3f ms, joining thread pool", __func__, timeTaken);
         IPCThreadState::self()->joinThreadPool();
     }
 }
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 90bb054..8a894f3 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -8,17 +8,6 @@
   ],
   "presubmit-large": [
     {
-      "name": "CtsMediaMiscTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
       "name": "CtsMediaAudioTestCases",
       "options": [
         {
@@ -35,50 +24,6 @@
           "exclude-filter": "android.media.audio.cts.AudioRecordTest"
         }
       ]
-    },
-    {
-      "name": "CtsMediaDecoderTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
-      "name": "CtsMediaEncoderTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
-      "name": "CtsMediaCodecTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
-    },
-    {
-      "name": "CtsMediaPlayerTestCases",
-      "options": [
-        {
-          "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
-        }
-      ]
     }
   ]
 }
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 4e4a9a1..d1b08bd 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -275,7 +275,8 @@
       mStreamInfo(nullptr),
       mSignalledError(false),
       mOutputPortDelay(kDefaultOutputPortDelay),
-      mOutputDelayRingBuffer(nullptr) {
+      mOutputDelayRingBuffer(nullptr),
+      mDeviceApiLevel(android_get_device_api_level()) {
 }
 
 C2SoftAacDec::~C2SoftAacDec() {
@@ -891,7 +892,7 @@
             work->worklets.front()->output.configUpdate.push_back(
                     C2Param::Copy(currentBoostFactor));
 
-            if (android_get_device_api_level() < __ANDROID_API_S__) {
+            if (mDeviceApiLevel < __ANDROID_API_S__) {
                 // We used to report DRC compression mode in the output format
                 // in Q and R, but stopped doing that in S
                 C2StreamDrcCompressionModeTuning::input currentCompressMode(0u,
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index b45f148..f85d45f 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -97,6 +97,7 @@
     int32_t mOutputDelayRingBufferWritePos;
     int32_t mOutputDelayRingBufferReadPos;
     int32_t mOutputDelayRingBufferFilled;
+    int mDeviceApiLevel;
     bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
     int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
     int32_t outputDelayRingBufferSamplesAvailable();
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index a2a79d5..257cf4e 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -23,3 +23,23 @@
     srcs: ["C2SoftAomDec.cpp"],
     static_libs: ["libaom"],
 }
+
+cc_library {
+    name: "libcodec2_soft_av1enc",
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+    ],
+
+    static_libs: ["libaom"],
+
+    srcs: ["C2SoftAomEnc.cpp"],
+
+    export_include_dirs: ["."],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index 96b81d7..0eb47f4 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -578,7 +578,8 @@
     size_t srcVStride = img->stride[AOM_PLANE_V];
     C2PlanarLayout layout = wView.layout();
     size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-    size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
     if (img->fmt == AOM_IMG_FMT_I42016) {
         const uint16_t *srcY = (const uint16_t *)img->planes[AOM_PLANE_Y];
@@ -592,7 +593,7 @@
                     std::static_pointer_cast<const C2ColorAspectsStruct>(defaultColorAspects));
         } else {
             convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
-                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUStride,
                                         mWidth, mHeight);
         }
     } else {
@@ -600,7 +601,7 @@
         const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
         convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
-                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
+                                   srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight);
     }
     finishWork(*(int64_t*)img->user_priv, work, std::move(block));
     block = nullptr;
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
new file mode 100644
index 0000000..e08bf43
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -0,0 +1,1032 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAomEnc"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAomEnc.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
+
+#define DEFAULT_SPEED 10
+
+C2SoftAomEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+    : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_ENCODER,
+                                        C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+    noPrivateBuffers();  // TODO: account for our buffers here
+    noInputReferences();
+    noOutputReferences();
+    noInputLatency();
+    noTimeStretch();
+    setDerivedInstance(this);
+
+    addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+                         .withConstValue(new C2StreamUsageTuning::input(
+                                 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+                         .build());
+
+    addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                         .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+                         .withFields({
+                                 C2F(mSize, width).inRange(2, 2048, 2),
+                                 C2F(mSize, height).inRange(2, 2048, 2),
+                         })
+                         .withSetter(SizeSetter)
+                         .build());
+
+    addParameter(DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+                         .withDefault(new C2StreamBitrateModeTuning::output(
+                                 0u, C2Config::BITRATE_VARIABLE))
+                         .withFields({C2F(mBitrateMode, value)
+                                              .oneOf({C2Config::BITRATE_CONST,
+                                                      C2Config::BITRATE_VARIABLE,
+                                                      C2Config::BITRATE_IGNORE})})
+                         .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+                         .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+                         // TODO: More restriction?
+                         .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+                         .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+                         .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+                         .withFields({C2F(mSyncFramePeriod, value).any()})
+                         .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+                         .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
+                         .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
+                         .withSetter(BitrateSetter)
+                         .build());
+
+    addParameter(DefineParam(mComplexity, C2_PARAMKEY_COMPLEXITY)
+                         .withDefault(new C2StreamComplexityTuning::output(0u, 0))
+                         .withFields({C2F(mComplexity, value).inRange(0, 5)})
+                         .withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mQuality, C2_PARAMKEY_QUALITY)
+                         .withDefault(new C2StreamQualityTuning::output(0u, 80))
+                         .withFields({C2F(mQuality, value).inRange(0, 100)})
+                         .withSetter(Setter<decltype(*mQuality)>::NonStrictValueWithNoDeps)
+                         .build());
+
+    addParameter(DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+                         .withConstValue(new C2StreamIntraRefreshTuning::output(
+                                 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+                         .build());
+
+    addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                         .withDefault(new C2StreamProfileLevelInfo::output(0u, PROFILE_AV1_0,
+                                                                           LEVEL_AV1_4_1))
+                         .withFields({
+                                 C2F(mProfileLevel, profile).equalTo(PROFILE_AV1_0),
+                                 C2F(mProfileLevel, level)
+                                    .oneOf({LEVEL_AV1_2, LEVEL_AV1_2_1, LEVEL_AV1_2_2,
+                                            LEVEL_AV1_2_3, LEVEL_AV1_3, LEVEL_AV1_3_1,
+                                            LEVEL_AV1_3_2, LEVEL_AV1_3_3, LEVEL_AV1_4,
+                                            LEVEL_AV1_4_1}),
+                         })
+                         .withSetter(ProfileLevelSetter)
+                         .build());
+
+    std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+                                          HAL_PIXEL_FORMAT_YCBCR_420_888};
+    if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+    }
+    addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                         .withDefault(new C2StreamPixelFormatInfo::input(
+                              0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                         .withFields({C2F(mPixelFormat, value).oneOf({pixelFormats})})
+                         .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                         .build());
+
+    addParameter(DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+                         .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+                         .withFields({C2F(mRequestSync, value).oneOf({C2_FALSE, C2_TRUE})})
+                         .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+                         .build());
+    addParameter(
+            DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                    .withDefault(new C2StreamColorAspectsInfo::input(
+                            0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                            C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                    .withFields(
+                            {C2F(mColorAspects, range)
+                                     .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                             C2F(mColorAspects, primaries)
+                                     .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                              C2Color::PRIMARIES_OTHER),
+                             C2F(mColorAspects, transfer)
+                                     .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                              C2Color::TRANSFER_OTHER),
+                             C2F(mColorAspects, matrix)
+                                     .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
+                    .withSetter(ColorAspectsSetter)
+                    .build());
+
+    addParameter(
+            DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                    .withDefault(new C2StreamColorAspectsInfo::output(
+                            0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                            C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                    .withFields(
+                            {C2F(mCodedColorAspects, range)
+                                     .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                             C2F(mCodedColorAspects, primaries)
+                                     .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                              C2Color::PRIMARIES_OTHER),
+                             C2F(mCodedColorAspects, transfer)
+                                     .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                              C2Color::TRANSFER_OTHER),
+                             C2F(mCodedColorAspects, matrix)
+                                     .inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
+                    .withSetter(CodedColorAspectsSetter, mColorAspects)
+                    .build());
+}
+
+C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (me.v.value < 4096) {
+        me.set().value = 4096;
+    }
+    return res;
+}
+
+C2R C2SoftAomEnc::IntfImpl::SizeSetter(bool mayBlock,
+                                       const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                                       C2P<C2StreamPictureSizeInfo::input>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+        me.set().width = oldMe.v.width;
+    }
+    if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+        me.set().height = oldMe.v.height;
+    }
+    return res;
+}
+
+C2R C2SoftAomEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
+                                               C2P<C2StreamProfileLevelInfo::output>& me) {
+    (void)mayBlock;
+    if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+        me.set().profile = PROFILE_AV1_0;
+    }
+    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        me.set().level = LEVEL_AV1_4_1;
+    }
+    return C2R::Ok();
+}
+
+uint32_t C2SoftAomEnc::IntfImpl::getSyncFramePeriod() const {
+    if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+        return 0;
+    }
+    double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+    return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+}
+
+C2R C2SoftAomEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
+                                               C2P<C2StreamColorAspectsInfo::input>& me) {
+    (void)mayBlock;
+    if (me.v.range > C2Color::RANGE_OTHER) {
+        me.set().range = C2Color::RANGE_OTHER;
+    }
+    if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+        me.set().primaries = C2Color::PRIMARIES_OTHER;
+    }
+    if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+        me.set().transfer = C2Color::TRANSFER_OTHER;
+    }
+    if (me.v.matrix > C2Color::MATRIX_OTHER) {
+        me.set().matrix = C2Color::MATRIX_OTHER;
+    }
+    return C2R::Ok();
+}
+C2R C2SoftAomEnc::IntfImpl::CodedColorAspectsSetter(
+        bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+        const C2P<C2StreamColorAspectsInfo::input>& coded) {
+    (void)mayBlock;
+    me.set().range = coded.v.range;
+    me.set().primaries = coded.v.primaries;
+    me.set().transfer = coded.v.transfer;
+    me.set().matrix = coded.v.matrix;
+    return C2R::Ok();
+}
+
+C2SoftAomEnc::C2SoftAomEnc(const char* name, c2_node_id_t id,
+                           const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl),
+      mCodecContext(nullptr),
+      mCodecConfiguration(nullptr),
+      mCodecInterface(nullptr),
+      mStrideAlign(2),
+      mBitrateControlMode(AOM_VBR),
+      mMinQuantizer(0),
+      mMaxQuantizer(0),
+      mLastTimestamp(INT64_MAX),
+      mSignalledOutputEos(false),
+      mSignalledError(false),
+      mHeadersReceived(false),
+      mIs10Bit(false) {
+    ALOGV("Constructor");
+}
+
+C2SoftAomEnc::~C2SoftAomEnc() {
+    ALOGV("Destructor");
+    onRelease();
+}
+
+c2_status_t C2SoftAomEnc::onInit() {
+    return C2_OK;
+}
+
+c2_status_t C2SoftAomEnc::onStop() {
+    onRelease();
+    return C2_OK;
+}
+
+void C2SoftAomEnc::onReset() {
+    (void)onStop();
+}
+
+void C2SoftAomEnc::onRelease() {
+    if (mCodecContext) {
+        aom_codec_destroy(mCodecContext);
+        delete mCodecContext;
+        mCodecContext = nullptr;
+    }
+
+    if (mCodecConfiguration) {
+        delete mCodecConfiguration;
+        mCodecConfiguration = nullptr;
+    }
+
+    // this one is not allocated by us
+    mCodecInterface = nullptr;
+    mHeadersReceived = false;
+}
+
+c2_status_t C2SoftAomEnc::onFlush_sm() {
+    return onStop();
+}
+
+// c2Quality is in range of 0-100 (the more - the better),
+// for AOM quality we are using a range of 15-50 (the less - the better)
+static int MapC2QualityToAOMQuality (int c2Quality) {
+    return 15 + 35 * (100 - c2Quality) / 100;
+}
+
+static int MapC2ComplexityToAOMSpeed (int c2Complexity) {
+    int mapping[6] = {10, 9, 8, 7, 6, 6};
+    if (c2Complexity > 5 || c2Complexity < 0) {
+        ALOGW("Wrong complexity setting. Falling back to speed 10");
+        return 10;
+    }
+    return mapping[c2Complexity];
+}
+
+aom_codec_err_t C2SoftAomEnc::setupCodecParameters() {
+    aom_codec_err_t codec_return = AOM_CODEC_OK;
+
+    codec_return = aom_codec_control(mCodecContext, AOME_SET_CPUUSED,
+                                     MapC2ComplexityToAOMSpeed(mComplexity->value));
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ROW_MT, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_CDEF, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_TPL_MODEL, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_DELTAQ_MODE, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_ORDER_HINT, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_AQ_MODE, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_COEFF_COST_UPD_FREQ, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_MODE_COST_UPD_FREQ, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_MV_COST_UPD_FREQ, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_PALETTE, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_OBMC, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_NOISE_SENSITIVITY, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_WARPED_MOTION, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_GLOBAL_MOTION, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_REF_FRAME_MVS, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_CFL_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_SMOOTH_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_ANGLE_DELTA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_FILTER_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_DISABLE_TRELLIS_QUANT, 1);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DIST_WTD_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DIFF_WTD_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_DUAL_FILTER, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTERINTRA_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTERINTRA_WEDGE, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTRA_EDGE_FILTER, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_INTRABC, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_MASKED_COMP, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_PAETH_INTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_QM, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_RECT_PARTITIONS, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_RESTORATION, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_ENABLE_TX64, 0);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_MAX_REFERENCE_FRAMES, 3);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+    if (mBitrateControlMode == AOM_Q) {
+        const int aomCQLevel = MapC2QualityToAOMQuality(mQuality->value);
+        ALOGV("Set Q from %d to CQL %d",
+              mQuality->value, aomCQLevel);
+
+        codec_return = aom_codec_control(mCodecContext, AOME_SET_CQ_LEVEL, aomCQLevel);
+        if (codec_return != AOM_CODEC_OK) goto BailOut;
+    }
+
+    ColorAspects sfAspects;
+    if (!C2Mapper::map(mColorAspects->primaries, &sfAspects.mPrimaries)) {
+        sfAspects.mPrimaries = android::ColorAspects::PrimariesUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->range, &sfAspects.mRange)) {
+        sfAspects.mRange = android::ColorAspects::RangeUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->matrix, &sfAspects.mMatrixCoeffs)) {
+        sfAspects.mMatrixCoeffs = android::ColorAspects::MatrixUnspecified;
+    }
+    if (!C2Mapper::map(mColorAspects->transfer, &sfAspects.mTransfer)) {
+        sfAspects.mTransfer = android::ColorAspects::TransferUnspecified;
+    }
+    int32_t primaries, transfer, matrixCoeffs;
+    bool range;
+    ColorUtils::convertCodecColorAspectsToIsoAspects(sfAspects,
+            &primaries,
+            &transfer,
+            &matrixCoeffs,
+            &range);
+
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_COLOR_RANGE, range);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_COLOR_PRIMARIES, primaries);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_TRANSFER_CHARACTERISTICS, transfer);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+    codec_return = aom_codec_control(mCodecContext, AV1E_SET_MATRIX_COEFFICIENTS, matrixCoeffs);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
+BailOut:
+    return codec_return;
+}
+
+status_t C2SoftAomEnc::initEncoder() {
+    aom_codec_err_t codec_return;
+    status_t result = UNKNOWN_ERROR;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        // Fetch config
+        mSize = mIntf->getSize_l();
+        mBitrate = mIntf->getBitrate_l();
+        mBitrateMode = mIntf->getBitrateMode_l();
+        mFrameRate = mIntf->getFrameRate_l();
+        mIntraRefresh = mIntf->getIntraRefresh_l();
+        mRequestSync = mIntf->getRequestSync_l();
+        mColorAspects = mIntf->getCodedColorAspects_l();
+        mQuality = mIntf->getQuality_l();
+        mComplexity = mIntf->getComplexity_l();
+    }
+
+
+    switch (mBitrateMode->value) {
+        case C2Config::BITRATE_CONST:
+            mBitrateControlMode = AOM_CBR;
+            break;
+        case C2Config::BITRATE_IGNORE:
+            mBitrateControlMode = AOM_Q;
+            break;
+        case C2Config::BITRATE_VARIABLE:
+            [[fallthrough]];
+        default:
+            mBitrateControlMode = AOM_VBR;
+            break;
+    }
+
+    mCodecInterface = aom_codec_av1_cx();
+    if (!mCodecInterface) goto CleanUp;
+
+    ALOGD("AOM: initEncoder. BRMode: %u. KF: %u. QP: %u - %u, 10Bit: %d, comlexity %d",
+          (uint32_t)mBitrateControlMode,
+          mIntf->getSyncFramePeriod(), mMinQuantizer, mMaxQuantizer, mIs10Bit, mComplexity->value);
+
+    mCodecConfiguration = new aom_codec_enc_cfg_t;
+    if (!mCodecConfiguration) goto CleanUp;
+
+    codec_return = aom_codec_enc_config_default(mCodecInterface, mCodecConfiguration,
+                                                AOM_USAGE_REALTIME);  // RT mode
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("Error populating default configuration for aom encoder.");
+        goto CleanUp;
+    }
+
+    mCodecConfiguration->g_w = mSize->width;
+    mCodecConfiguration->g_h = mSize->height;
+    mCodecConfiguration->g_bit_depth = mIs10Bit ? AOM_BITS_10 : AOM_BITS_8;
+    mCodecConfiguration->g_input_bit_depth = mIs10Bit ? 10 : 8;
+
+
+    mCodecConfiguration->g_threads = 0;
+    mCodecConfiguration->g_error_resilient = 0;
+
+    // timebase unit is microsecond
+    // g_timebase is in seconds (i.e. 1/1000000 seconds)
+    mCodecConfiguration->g_timebase.num = 1;
+    mCodecConfiguration->g_timebase.den = 1000000;
+    // rc_target_bitrate is in kbps, mBitrate in bps
+    mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
+    mCodecConfiguration->rc_end_usage = mBitrateControlMode == AOM_Q ? AOM_Q : AOM_CBR;
+    // Disable frame drop - not allowed in MediaCodec now.
+    mCodecConfiguration->rc_dropframe_thresh = 0;
+    // Disable lagged encoding.
+    mCodecConfiguration->g_lag_in_frames = 0;
+
+    // Disable spatial resizing.
+    mCodecConfiguration->rc_resize_mode = 0;
+    // Single-pass mode.
+    mCodecConfiguration->g_pass = AOM_RC_ONE_PASS;
+
+    // Maximum key frame interval - for CBR boost to 3000
+    mCodecConfiguration->kf_max_dist = 3000;
+    // Encoder determines optimal key frame placement automatically.
+    mCodecConfiguration->kf_mode = AOM_KF_AUTO;
+    // The amount of data that may be buffered by the decoding
+    // application in ms.
+    mCodecConfiguration->rc_buf_sz = 1000;
+
+    if (mBitrateControlMode == AOM_CBR) {
+        // Initial value of the buffer level in ms.
+        mCodecConfiguration->rc_buf_initial_sz = 500;
+        // Amount of data that the encoder should try to maintain in ms.
+        mCodecConfiguration->rc_buf_optimal_sz = 600;
+        // Maximum amount of bits that can be subtracted from the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_undershoot_pct = 100;
+        // Maximum amount of bits that can be added to the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_overshoot_pct = 10;
+    } else {
+        // Maximum amount of bits that can be subtracted from the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_undershoot_pct = 100;
+        // Maximum amount of bits that can be added to the target
+        // bitrate - expressed as percentage of the target bitrate.
+        mCodecConfiguration->rc_overshoot_pct = 100;
+    }
+
+    if (mIntf->getSyncFramePeriod() >= 0) {
+        mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
+        mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
+        mCodecConfiguration->kf_mode = AOM_KF_AUTO;
+    }
+    if (mMinQuantizer > 0) {
+        mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
+    }
+    if (mMaxQuantizer > 0) {
+        mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+    } else {
+        if (mBitrateControlMode == AOM_VBR) {
+            // For VBR we are limiting MaxQP to 52 (down 11 steps) to maintain quality
+            // 52 comes from experiments done on libaom standalone app
+            mCodecConfiguration->rc_max_quantizer = 52;
+        }
+    }
+
+    mCodecContext = new aom_codec_ctx_t;
+    if (!mCodecContext) goto CleanUp;
+    codec_return = aom_codec_enc_init(mCodecContext, mCodecInterface, mCodecConfiguration,
+                                      mIs10Bit ? AOM_CODEC_USE_HIGHBITDEPTH : 0);
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("Error initializing aom encoder");
+        goto CleanUp;
+    }
+
+    codec_return = setupCodecParameters();
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("Error setting up codec parameters");
+        goto CleanUp;
+    }
+
+    mHeadersReceived = false;
+
+    {
+        uint32_t width = mSize->width;
+        uint32_t height = mSize->height;
+        if (((uint64_t)width * height) > ((uint64_t)INT32_MAX / 3)) {
+            ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
+        } else {
+            uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+            uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+            mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / (mIs10Bit? 1 : 2));
+            if (!mConversionBuffer.size()) {
+                ALOGE("Allocating conversion buffer failed.");
+            } else {
+                mNumInputFrames = -1;
+                return OK;
+            }
+        }
+    }
+
+CleanUp:
+    onRelease();
+    return result;
+}
+
+void C2SoftAomEnc::process(const std::unique_ptr<C2Work>& work,
+                           const std::shared_ptr<C2BlockPool>& pool) {
+    // Initialize output work
+    work->result = C2_OK;
+    work->workletsProcessed = 1u;
+    work->worklets.front()->output.flags = work->input.flags;
+
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    std::shared_ptr<C2GraphicView> rView;
+    std::shared_ptr<C2Buffer> inputBuffer;
+    if (!work->input.buffers.empty()) {
+        inputBuffer = work->input.buffers[0];
+        rView = std::make_shared<C2GraphicView>(
+                inputBuffer->data().graphicBlocks().front().map().get());
+        if (rView->error() != C2_OK) {
+            ALOGE("graphic view map err = %d", rView->error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    } else {
+        ALOGV("Empty input Buffer");
+        uint32_t flags = 0;
+        if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+        return;
+    }
+
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+    aom_image_t raw_frame;
+    const C2PlanarLayout& layout = rView->layout();
+    if (!mHeadersReceived) {
+        mIs10Bit = (layout.planes[layout.PLANE_Y].bitDepth == 10);
+
+        // Re-Initialize encoder
+        if (mCodecContext){
+            onRelease();
+        }
+    }
+    if (!mCodecContext && OK != initEncoder()) {
+        ALOGE("Failed to initialize encoder");
+        mSignalledError = true;
+        work->result = C2_CORRUPTED;
+        return;
+    }
+
+    //(b/279387842)
+    //workaround for incorrect crop size in view when using surface mode
+    rView->setCrop_be(C2Rect(mSize->width, mSize->height));
+
+    if (!mHeadersReceived) {
+        Av1Config av1_config;
+        constexpr uint32_t header_length = 2048;
+        uint8_t header[header_length];
+        size_t header_bytes;
+        aom_fixed_buf_t* obu_sequence_header = aom_codec_get_global_headers(mCodecContext);
+        int ret = 1;
+        if (obu_sequence_header) {
+            if (get_av1config_from_obu(reinterpret_cast<const uint8_t*>(obu_sequence_header->buf),
+                                       obu_sequence_header->sz, false, &av1_config) == 0) {
+                ret = write_av1config(&av1_config, header_length, &header_bytes, header);
+
+            } else {
+                ALOGE("Can not get config");
+            }
+            free(obu_sequence_header->buf);
+            free(obu_sequence_header);
+        }
+
+        if (ret) {
+            ALOGE("Can not write config");
+            mSignalledError = true;
+            work->result = C2_NO_MEMORY;
+            work->workletsProcessed = 1u;
+            return;
+        }
+
+        mHeadersReceived = true;
+        std::unique_ptr<C2StreamInitDataInfo::output> csd =
+                C2StreamInitDataInfo::output::AllocUnique(header_bytes, 0u);
+        if (!csd) {
+            ALOGE("CSD allocation failed");
+            mSignalledError = true;
+            work->result = C2_NO_MEMORY;
+            work->workletsProcessed = 1u;
+            return;
+        }
+        memcpy(csd->m.value, header, header_bytes);
+        work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+        ALOGV("CSD Produced of size %zu bytes", header_bytes);
+    }
+
+    const C2ConstGraphicBlock inBuffer = inputBuffer->data().graphicBlocks().front();
+    if (inBuffer.width() < mSize->width || inBuffer.height() < mSize->height) {
+        ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)", inBuffer.width(), mSize->width,
+              inBuffer.height(), mSize->height);
+        mSignalledError = true;
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+
+    uint32_t width = mSize->width;
+    uint32_t height = mSize->height;
+    if (width > 0x8000 || height > 0x8000) {
+        ALOGE("Image too big: %u x %u", width, height);
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+    uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
+    uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
+    switch (layout.type) {
+        case C2PlanarLayout::TYPE_RGB:
+        case C2PlanarLayout::TYPE_RGBA: {
+            std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
+            {
+                IntfImpl::Lock lock = mIntf->lock();
+                colorAspects = mIntf->getCodedColorAspects_l();
+            }
+            ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
+                                  mConversionBuffer.size(), *rView.get(), colorAspects->matrix,
+                                  colorAspects->range);
+            aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, width, height, mStrideAlign,
+                         mConversionBuffer.data());
+            break;
+        }
+        case C2PlanarLayout::TYPE_YUV: {
+            const bool isYUV420_10bit = IsYUV420_10bit(*rView);
+            if (!IsYUV420(*rView) && !isYUV420_10bit) {
+                ALOGE("input is not YUV420");
+                work->result = C2_BAD_VALUE;
+                return;
+            }
+            if (!isYUV420_10bit) {
+                if (IsI420(*rView)) {
+                    // I420 compatible - though with custom offset and stride
+                    aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, width, height, mStrideAlign,
+                                 (uint8_t*)rView->data()[0]);
+                    raw_frame.planes[1] = (uint8_t*)rView->data()[1];
+                    raw_frame.planes[2] = (uint8_t*)rView->data()[2];
+                    raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
+                    raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
+                    raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
+                } else {
+                    // TODO(kyslov): Add image wrap for NV12
+                    // copy to I420
+                    MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
+                    if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
+                        status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
+                        if (err != OK) {
+                            ALOGE("Buffer conversion failed: %d", err);
+                            work->result = C2_BAD_VALUE;
+                            return;
+                        }
+                        aom_img_wrap(&raw_frame, AOM_IMG_FMT_I420, stride, vstride, mStrideAlign,
+                                     mConversionBuffer.data());
+                        aom_img_set_rect(&raw_frame, 0, 0, width, height, 0);
+                    } else {
+                        ALOGE("Conversion buffer is too small: %u x %u for %zu", stride, vstride,
+                              mConversionBuffer.size());
+                        work->result = C2_BAD_VALUE;
+                        return;
+                    }
+                }
+            } else {  // 10 bits
+                if (IsP010(*rView)) {
+                    if (mConversionBuffer.size() >= stride * vstride * 3) {
+                        uint16_t *dstY, *dstU, *dstV;
+                        dstY = (uint16_t*)mConversionBuffer.data();
+                        dstU = dstY + stride * vstride;
+                        dstV = dstU + (stride * vstride) / 4;
+                        convertP010ToYUV420Planar16(dstY, dstU, dstV, (uint16_t*)(rView->data()[0]),
+                                                    (uint16_t*)(rView->data()[1]),
+                                                    layout.planes[layout.PLANE_Y].rowInc / 2,
+                                                    layout.planes[layout.PLANE_U].rowInc / 2,
+                                                    stride, stride / 2, stride / 2, stride,
+                                                    vstride);
+                        aom_img_wrap(&raw_frame, AOM_IMG_FMT_I42016, stride, vstride, mStrideAlign,
+                                     mConversionBuffer.data());
+                        aom_img_set_rect(&raw_frame, 0, 0, width, height, 0);
+                    } else {
+                        ALOGE("Conversion buffer is too small: %u x %u for %zu", stride, vstride,
+                              mConversionBuffer.size());
+                        work->result = C2_BAD_VALUE;
+                        return;
+                    }
+                } else {
+                    ALOGE("Image format conversion is not supported.");
+                    work->result = C2_BAD_VALUE;
+                    return;
+                }
+            }
+            break;
+        }
+        case C2PlanarLayout::TYPE_YUVA: {
+            if (mConversionBuffer.size() >= stride * vstride * 3) {
+                uint16_t *dstY, *dstU, *dstV;
+                dstY = (uint16_t*)mConversionBuffer.data();
+                dstU = dstY + stride * vstride;
+                dstV = dstU + (stride * vstride) / 4;
+                convertRGBA1010102ToYUV420Planar16(dstY, dstU, dstV, (uint32_t*)(rView->data()[0]),
+                                                   layout.planes[layout.PLANE_Y].rowInc / 4, stride,
+                                                   vstride, mColorAspects->matrix,
+                                                   mColorAspects->range);
+                aom_img_wrap(&raw_frame, AOM_IMG_FMT_I42016, stride, vstride, mStrideAlign,
+                                mConversionBuffer.data());
+                aom_img_set_rect(&raw_frame, 0, 0, width, height, 0);
+            } else {
+                ALOGE("Conversion buffer is too small: %u x %u for %zu", stride, vstride,
+                        mConversionBuffer.size());
+                work->result = C2_BAD_VALUE;
+                return;
+            }
+            break;
+        }
+
+        default:
+            ALOGE("Unrecognized plane type: %d", layout.type);
+            work->result = C2_BAD_VALUE;
+            return;
+    }
+
+    aom_enc_frame_flags_t flags = 0;
+    // handle dynamic config parameters
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh =
+                mIntf->getIntraRefresh_l();
+        std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+        std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync =
+                mIntf->getRequestSync_l();
+        lock.unlock();
+
+        if (intraRefresh != mIntraRefresh) {
+            mIntraRefresh = intraRefresh;
+            ALOGV("Got mIntraRefresh request");
+        }
+
+        if (requestSync != mRequestSync) {
+            // we can handle IDR immediately
+            if (requestSync->value) {
+                // unset request
+                C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
+                std::vector<std::unique_ptr<C2SettingResult>> failures;
+                mIntf->config({&clearSync}, C2_MAY_BLOCK, &failures);
+                ALOGV("Got sync request");
+                flags |= AOM_EFLAG_FORCE_KF;
+            }
+            mRequestSync = requestSync;
+        }
+
+        if (bitrate != mBitrate) {
+            mBitrate = bitrate;
+            mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
+            aom_codec_err_t res = aom_codec_enc_config_set(mCodecContext, mCodecConfiguration);
+            if (res != AOM_CODEC_OK) {
+                ALOGE("aom encoder failed to update bitrate: %s", aom_codec_err_to_string(res));
+                mSignalledError = true;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+        }
+    }
+
+    uint64_t input_timestamp = work->input.ordinal.timestamp.peekull();
+    uint32_t frame_duration;
+    if (input_timestamp > mLastTimestamp) {
+        frame_duration = (uint32_t)(input_timestamp - mLastTimestamp);
+    } else {
+        // Use default of 30 fps in case of 0 frame rate.
+        float frame_rate = mFrameRate->value;
+        if (frame_rate < 0.001) {
+            frame_rate = 30.0;
+        }
+        frame_duration = (uint32_t)(1000000 / frame_rate + 0.5);
+    }
+    mLastTimestamp = input_timestamp;
+
+    aom_codec_err_t codec_return =
+            aom_codec_encode(mCodecContext, &raw_frame, input_timestamp, frame_duration, flags);
+    if (codec_return != AOM_CODEC_OK) {
+        ALOGE("aom encoder failed to encode frame");
+        mSignalledError = true;
+        work->result = C2_CORRUPTED;
+        return;
+    }
+
+    bool populated = false;
+    aom_codec_iter_t encoded_packet_iterator = nullptr;
+    const aom_codec_cx_pkt_t* encoded_packet;
+    while ((encoded_packet = aom_codec_get_cx_data(mCodecContext, &encoded_packet_iterator))) {
+        if (encoded_packet->kind == AOM_CODEC_CX_FRAME_PKT) {
+            std::shared_ptr<C2LinearBlock> block;
+            C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+            c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
+            if (err != C2_OK) {
+                ALOGE("fetchLinearBlock for Output failed with status %d", err);
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            C2WriteView wView = block->map().get();
+            if (wView.error()) {
+                ALOGE("write view map failed %d", wView.error());
+                work->result = C2_CORRUPTED;
+                return;
+            }
+
+            memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
+            ++mNumInputFrames;
+
+            ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
+            uint32_t flags = 0;
+            if (end_of_stream) {
+                flags |= C2FrameData::FLAG_END_OF_STREAM;
+            }
+
+            work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+            work->worklets.front()->output.buffers.clear();
+            std::shared_ptr<C2Buffer> buffer =
+                    createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
+            if (encoded_packet->data.frame.flags & AOM_FRAME_IS_KEY) {
+                buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+                        0u /* stream id */, C2Config::SYNC_FRAME));
+            }
+            work->worklets.front()->output.buffers.push_back(buffer);
+            work->worklets.front()->output.ordinal = work->input.ordinal;
+            work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
+            work->workletsProcessed = 1u;
+            populated = true;
+            if (end_of_stream) {
+                mSignalledOutputEos = true;
+                ALOGV("signalled End Of Stream");
+            }
+        }
+    }
+    if (!populated) {
+        work->workletsProcessed = 0u;
+    }
+}
+
+c2_status_t C2SoftAomEnc::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    (void)pool;
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    return C2_OK;
+}
+
+class C2SoftAomEncFactory : public C2ComponentFactory {
+  public:
+    C2SoftAomEncFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftAomEnc(COMPONENT_NAME, id,
+                                 std::make_shared<C2SoftAomEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftAomEnc::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftAomEnc::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftAomEncFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftAomEncFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
new file mode 100644
index 0000000..3067735
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AV1_ENC_H_
+#define ANDROID_C2_SOFT_AV1_ENC_H_
+
+#include <inttypes.h>
+
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Component.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "aom/aom_encoder.h"
+#include "aom/aomcx.h"
+#include "common/av1_config.h"
+
+namespace android {
+struct C2SoftAomEnc : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftAomEnc(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+
+    // From SimpleC2Component
+    c2_status_t onInit() override final;
+    c2_status_t onStop() override final;
+    void onReset() override final;
+    void onRelease() override final;
+    c2_status_t onFlush_sm() override final;
+
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override final;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override final;
+
+  protected:
+    virtual ~C2SoftAomEnc();
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    // Initializes aom encoder with available settings.
+    status_t initEncoder();
+
+    // aom specific opaque data structure that
+    // stores encoder state
+    aom_codec_ctx_t* mCodecContext;
+
+    // aom specific data structure that
+    // stores encoder configuration
+    aom_codec_enc_cfg_t* mCodecConfiguration;
+
+    // aom specific read-only data structure
+    // that specifies algorithm interface
+    aom_codec_iface_t* mCodecInterface;
+
+    // align stride to the power of 2
+    int32_t mStrideAlign;
+
+    aom_rc_mode mBitrateControlMode;
+
+    // Minimum (best quality) quantizer
+    uint32_t mMinQuantizer;
+
+    // Maximum (worst quality) quantizer
+    uint32_t mMaxQuantizer;
+
+    // Last input buffer timestamp
+    uint64_t mLastTimestamp;
+
+    // Number of input frames
+    int64_t mNumInputFrames;
+
+    // Conversion buffer is needed to input to
+    // yuv420 planar format.
+    MemoryBlock mConversionBuffer;
+
+    // Signalled End Of Stream
+    bool mSignalledOutputEos;
+
+    // Signalled Error
+    bool mSignalledError;
+
+    bool mHeadersReceived;
+
+    bool mIs10Bit;
+
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+    std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+
+    aom_codec_err_t setupCodecParameters();
+};
+
+class C2SoftAomEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper);
+
+    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me);
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::input>& me);
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me);
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
+        return mIntraRefresh;
+    }
+    std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+    std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+    std::shared_ptr<C2StreamQualityTuning::output> getQuality_l() const { return mQuality; }
+    std::shared_ptr<C2StreamComplexityTuning::output> getComplexity_l() const {
+      return mComplexity;
+    }
+    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+        return mBitrateMode;
+    }
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+        return mRequestSync;
+    }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+        return mCodedColorAspects;
+    }
+    std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+    uint32_t getSyncFramePeriod() const;
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                       const C2P<C2StreamColorAspectsInfo::input>& coded);
+
+  private:
+    std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+    std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+    std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+    std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+    std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+    std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+    std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
+    std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
+    std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+
+};
+
+}  // namespace android
+#endif  // ANDROID_C2_SOFT_AV1_ENC_H_
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 953afc5..96a4c4a 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -671,6 +671,9 @@
 void C2SoftAvcDec::resetPlugin() {
     mSignalledOutputEos = false;
     mTimeStart = mTimeEnd = systemTime();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
 }
 
 status_t C2SoftAvcDec::deleteDecoder() {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 8b46d3f..e424860 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -334,7 +334,10 @@
         // By default needsUpdate = false in case the supplied level does meet
         // the requirements. For Level 1b, we want to update the level anyway,
         // so we set it to true in that case.
-        bool needsUpdate = (me.v.level == LEVEL_AVC_1B);
+        bool needsUpdate = false;
+        if (me.v.level == LEVEL_AVC_1B || !me.F(me.v.level).supportsAtAll(me.v.level)) {
+            needsUpdate = true;
+        }
         for (const LevelLimits &limit : kLimits) {
             if (mbs <= limit.mbs && mbsPerSec <= limit.mbsPerSec &&
                     bitrate.v.value <= limit.bitrate) {
@@ -356,7 +359,7 @@
                 needsUpdate = true;
             }
         }
-        if (!found) {
+        if (!found || me.v.level > LEVEL_AVC_5) {
             // We set to the highest supported level.
             me.set().level = LEVEL_AVC_5;
         }
@@ -392,9 +395,9 @@
     static C2R PictureQuantizationSetter(bool mayBlock,
                                          C2P<C2StreamPictureQuantizationTuning::output> &me) {
         (void)mayBlock;
-        (void)me;
 
-        // TODO: refactor with same algorithm in the SetQp()
+        // these are the ones we're going to set, so want them to default
+        // to the DEFAULT values for the codec
         int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
         int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
 
@@ -419,13 +422,14 @@
         ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
               iMin, iMax, pMin, pMax, bMin, bMax);
 
-        // ensure we have legal values
-        iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
-        iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
-        pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
-        pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
-        bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
-        bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
+        // min is clamped to [AVC_QP_MIN, max] to avoid error
+        // cases where layer.min > layer.max
+        iMax = std::clamp(iMax, AVC_QP_MIN, AVC_QP_MAX);
+        iMin = std::clamp(iMin, AVC_QP_MIN, iMax);
+        pMax = std::clamp(pMax, AVC_QP_MIN, AVC_QP_MAX);
+        pMin = std::clamp(pMin, AVC_QP_MIN, pMax);
+        bMax = std::clamp(bMax, AVC_QP_MIN, AVC_QP_MAX);
+        bMin = std::clamp(bMin, AVC_QP_MIN, bMax);
 
         // put them back into the structure
         for (size_t i = 0; i < me.v.flexCount(); ++i) {
@@ -819,7 +823,8 @@
     s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
     s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
 
-    // TODO: refactor with same algorithm in the PictureQuantizationSetter()
+    // we resolved out-of-bound and unspecified values in PictureQuantizationSetter()
+    // so we can start with defaults that are overridden as needed.
     int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
     int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
 
@@ -1766,17 +1771,20 @@
     //         }
     //     }
     // }
-    std::shared_ptr<const C2GraphicView> view;
+    std::shared_ptr<C2GraphicView> view;
     std::shared_ptr<C2Buffer> inputBuffer;
     if (!work->input.buffers.empty()) {
         inputBuffer = work->input.buffers[0];
-        view = std::make_shared<const C2GraphicView>(
+        view = std::make_shared<C2GraphicView>(
                 inputBuffer->data().graphicBlocks().front().map().get());
         if (view->error() != C2_OK) {
             ALOGE("graphic view map err = %d", view->error());
             work->workletsProcessed = 1u;
             return;
         }
+        //(b/232396154)
+        //workaround for incorrect crop size in view when using surface mode
+        view->setCrop_be(C2Rect(mSize->width, mSize->height));
     }
 
     do {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 293867d..cde6604 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -103,8 +103,8 @@
 /** limits as specified by h264
  *  (QP_MIN==4 is actually a limitation of this SW codec, not the H.264 standard)
  **/
-#define CODEC_QP_MIN                4
-#define CODEC_QP_MAX                51
+#define AVC_QP_MIN                4
+#define AVC_QP_MAX                51
 
 
 #define MIN(a, b) ((a) < (b))? (a) : (b)
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 9d4f049..55a1164 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -38,8 +38,8 @@
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
-                                size_t dstUVStride, uint32_t width, uint32_t height,
-                                bool isMonochrome) {
+                                size_t dstUStride, size_t dstVStride, uint32_t width,
+                                uint32_t height, bool isMonochrome) {
     for (size_t i = 0; i < height; ++i) {
         memcpy(dstY, srcY, width);
         srcY += srcYStride;
@@ -51,8 +51,8 @@
         for (size_t i = 0; i < (height + 1) / 2; ++i) {
             memset(dstV, kNeutralUVBitDepth8, (width + 1) / 2);
             memset(dstU, kNeutralUVBitDepth8, (width + 1) / 2);
-            dstV += dstUVStride;
-            dstU += dstUVStride;
+            dstV += dstVStride;
+            dstU += dstUStride;
         }
         return;
     }
@@ -60,13 +60,13 @@
     for (size_t i = 0; i < (height + 1) / 2; ++i) {
         memcpy(dstV, srcV, (width + 1) / 2);
         srcV += srcVStride;
-        dstV += dstUVStride;
+        dstV += dstVStride;
     }
 
     for (size_t i = 0; i < (height + 1) / 2; ++i) {
         memcpy(dstU, srcU, (width + 1) / 2);
         srcU += srcUStride;
-        dstU += dstUVStride;
+        dstU += dstUStride;
     }
 }
 
@@ -190,7 +190,7 @@
 // matrix conversion coefficients
 // (see media/libstagefright/colorconverter/ColorConverter.cpp for more details)
 struct Coeffs {
-    int32_t _y, _b_u, _g_u, _g_v, _r_v, _c16;
+    int32_t _y, _r_v, _g_u, _g_v, _b_u, _c16;
 };
 
 static const struct Coeffs GetCoeffsForAspects(const C2ColorAspectsStruct &aspects) {
@@ -414,6 +414,98 @@
         dstUV += dstUVStride;
     }
 }
+
+void convertP010ToYUV420Planar16(uint16_t *dstY, uint16_t *dstU, uint16_t *dstV,
+                                 const uint16_t *srcY, const uint16_t *srcUV,
+                                 size_t srcYStride, size_t srcUVStride, size_t dstYStride,
+                                 size_t dstUStride, size_t dstVStride, size_t width,
+                                 size_t height, bool isMonochrome) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] >> 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t y = 0; y < (height + 1) / 2; ++y) {
+            for (size_t x = 0; x < (width + 1) / 2; ++x) {
+                dstU[x] = kNeutralUVBitDepth10;
+                dstV[x] = kNeutralUVBitDepth10;
+            }
+            dstU += dstUStride;
+            dstV += dstVStride;
+        }
+        return;
+    }
+
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        for (size_t x = 0; x < (width + 1) / 2; ++x) {
+            dstU[x] = srcUV[2 * x] >> 6;
+            dstV[x] = srcUV[2 * x + 1] >> 6;
+        }
+        dstU += dstUStride;
+        dstV += dstVStride;
+        srcUV += srcUVStride;
+    }
+}
+
+static const int16_t bt709Matrix_10bit[2][3][3] = {
+    { { 218, 732, 74 }, { -117, -395, 512 }, { 512, -465, -47 } }, /* RANGE_FULL */
+    { { 186, 627, 63 }, { -103, -345, 448 }, { 448, -407, -41 } }, /* RANGE_LIMITED */
+};
+
+static const int16_t bt2020Matrix_10bit[2][3][3] = {
+    { { 269, 694, 61 }, { -143, -369, 512 }, { 512, -471, -41 } }, /* RANGE_FULL */
+    { { 230, 594, 52 }, { -125, -323, 448 }, { 448, -412, -36 } }, /* RANGE_LIMITED */
+};
+
+void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
+                                        const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
+                                        size_t height, C2Color::matrix_t colorMatrix,
+                                        C2Color::range_t colorRange) {
+    uint16_t r, g, b;
+    int32_t i32Y, i32U, i32V;
+    uint16_t zeroLvl =  colorRange == C2Color::RANGE_FULL ? 0 : 64;
+    uint16_t maxLvlLuma =  colorRange == C2Color::RANGE_FULL ? 1023 : 940;
+    uint16_t maxLvlChroma =  colorRange == C2Color::RANGE_FULL ? 1023 : 960;
+    // set default range as limited
+    if (colorRange != C2Color::RANGE_FULL) {
+        colorRange = C2Color::RANGE_LIMITED;
+    }
+    const int16_t(*weights)[3] = (colorMatrix == C2Color::MATRIX_BT709)
+                                         ? bt709Matrix_10bit[colorRange - 1]
+                                         : bt2020Matrix_10bit[colorRange - 1];
+
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            b = (srcRGBA[x]  >> 20) & 0x3FF;
+            g = (srcRGBA[x]  >> 10) & 0x3FF;
+            r = srcRGBA[x] & 0x3FF;
+
+            i32Y = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2] + 512) >> 10) +
+                   zeroLvl;
+            dstY[x] = CLIP3(zeroLvl, i32Y, maxLvlLuma);
+            if (y % 2 == 0 && x % 2 == 0) {
+                i32U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2] + 512) >> 10) +
+                       512;
+                i32V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2] + 512) >> 10) +
+                       512;
+                dstU[x >> 1] = CLIP3(zeroLvl, i32U, maxLvlChroma);
+                dstV[x >> 1] = CLIP3(zeroLvl, i32V, maxLvlChroma);
+            }
+        }
+        srcRGBA += srcRGBStride;
+        dstY += width;
+        if (y % 2 == 0) {
+            dstU += width / 2;
+            dstV += width / 2;
+        }
+    }
+}
+
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 7600c5b..bc27474 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -21,6 +21,7 @@
 #include <unordered_map>
 
 #include <C2Component.h>
+#include <C2Config.h>
 
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/ALooper.h>
@@ -33,8 +34,8 @@
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
-                                size_t dstUVStride, uint32_t width, uint32_t height,
-                                bool isMonochrome = false);
+                                size_t dstUStride, size_t dstVStride, uint32_t width,
+                                uint32_t height, bool isMonochrome = false);
 
 void convertYUV420Planar16ToY410OrRGBA1010102(
         uint32_t *dst, const uint16_t *srcY,
@@ -55,6 +56,17 @@
                                  size_t dstUVStride, size_t width, size_t height,
                                  bool isMonochrome = false);
 
+void convertP010ToYUV420Planar16(uint16_t *dstY, uint16_t *dstU, uint16_t *dstV,
+                                 const uint16_t *srcY, const uint16_t *srcUV,
+                                 size_t srcYStride, size_t srcUVStride, size_t dstYStride,
+                                 size_t dstUStride, size_t dstVStride, size_t width,
+                                 size_t height, bool isMonochrome = false);
+
+void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
+                                        const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
+                                        size_t height, C2Color::matrix_t colorMatrix,
+                                        C2Color::range_t colorRange);
+
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
 public:
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 162339f..9781b6d 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -21,7 +21,10 @@
     ],
 
     srcs: ["C2SoftGav1Dec.cpp"],
-    static_libs: ["libgav1"],
+    static_libs: [
+        "libgav1",
+        "libyuv_static",
+    ],
 
     apex_available: [
         "//apex_available:platform",
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 4dec57f..3e4247b 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -24,10 +24,19 @@
 #include <Codec2CommonUtils.h>
 #include <Codec2Mapper.h>
 #include <SimpleC2Interface.h>
+#include <libyuv.h>
 #include <log/log.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
 namespace android {
 
 // codecname set and passed in as a compile flag from Android.bp
@@ -102,6 +111,29 @@
             .withSetter(Hdr10PlusInfoOutputSetter)
             .build());
 
+    // default static info
+    C2HdrStaticMetadataStruct defaultStaticInfo{};
+    helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+    addParameter(
+        DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+            .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+            .withFields({
+                C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)
+            })
+            .withSetter(HdrStaticInfoSetter)
+            .build());
+
     addParameter(
         DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
@@ -331,6 +363,47 @@
   // unsafe getters
   std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
 
+  static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output> &me) {
+    (void)mayBlock;
+    if (me.v.mastering.red.x > 1) {
+      me.set().mastering.red.x = 1;
+    }
+    if (me.v.mastering.red.y > 1) {
+      me.set().mastering.red.y = 1;
+    }
+    if (me.v.mastering.green.x > 1) {
+      me.set().mastering.green.x = 1;
+    }
+    if (me.v.mastering.green.y > 1) {
+      me.set().mastering.green.y = 1;
+    }
+    if (me.v.mastering.blue.x > 1) {
+      me.set().mastering.blue.x = 1;
+    }
+    if (me.v.mastering.blue.y > 1) {
+      me.set().mastering.blue.y = 1;
+    }
+    if (me.v.mastering.white.x > 1) {
+      me.set().mastering.white.x = 1;
+    }
+    if (me.v.mastering.white.y > 1) {
+      me.set().mastering.white.y = 1;
+    }
+    if (me.v.mastering.maxLuminance > 65535.0) {
+      me.set().mastering.maxLuminance = 65535.0;
+    }
+    if (me.v.mastering.minLuminance > 6.5535) {
+      me.set().mastering.minLuminance = 6.5535;
+    }
+    if (me.v.maxCll > 65535.0) {
+      me.set().maxCll = 65535.0;
+    }
+    if (me.v.maxFall > 65535.0) {
+      me.set().maxFall = 65535.0;
+    }
+    return C2R::Ok();
+  }
+
  private:
   std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
   std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -343,6 +416,7 @@
   std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
   std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
   std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+  std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
 };
 
 C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
@@ -558,6 +632,76 @@
   }
 }
 
+void C2SoftGav1Dec::getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
+                                       const std::unique_ptr<C2Work> &work) {
+  C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+  bool infoPresent = false;
+  if (buffer->has_hdr_mdcv) {
+    // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
+    hdrStaticMetadataInfo.mastering.red.x = buffer->hdr_mdcv.primary_chromaticity_x[0] / 65536.0;
+    hdrStaticMetadataInfo.mastering.red.y = buffer->hdr_mdcv.primary_chromaticity_y[0] / 65536.0;
+
+    hdrStaticMetadataInfo.mastering.green.x = buffer->hdr_mdcv.primary_chromaticity_x[1] / 65536.0;
+    hdrStaticMetadataInfo.mastering.green.y = buffer->hdr_mdcv.primary_chromaticity_y[1] / 65536.0;
+
+    hdrStaticMetadataInfo.mastering.blue.x = buffer->hdr_mdcv.primary_chromaticity_x[2] / 65536.0;
+    hdrStaticMetadataInfo.mastering.blue.y = buffer->hdr_mdcv.primary_chromaticity_y[2] / 65536.0;
+
+    // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
+    hdrStaticMetadataInfo.mastering.white.x = buffer->hdr_mdcv.white_point_chromaticity_x / 65536.0;
+    hdrStaticMetadataInfo.mastering.white.y = buffer->hdr_mdcv.white_point_chromaticity_y / 65536.0;
+
+    // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
+    hdrStaticMetadataInfo.mastering.maxLuminance = buffer->hdr_mdcv.luminance_max / 256.0;
+    // hdr_mdcv.luminance_min is in 18.14 format.
+    hdrStaticMetadataInfo.mastering.minLuminance = buffer->hdr_mdcv.luminance_min / 16384.0;
+    infoPresent = true;
+  }
+
+  if (buffer->has_hdr_cll) {
+    hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
+    hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
+    infoPresent = true;
+  }
+  // config if static info has changed
+  if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+    mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+    work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(mHdrStaticMetadataInfo));
+  }
+}
+
+void C2SoftGav1Dec::getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
+                                         const std::unique_ptr<C2Work> &work) {
+  if (buffer->has_itut_t35) {
+    std::vector<uint8_t> payload;
+    size_t payloadSize = buffer->itut_t35.payload_size;
+    if (payloadSize > 0) {
+      payload.push_back(buffer->itut_t35.country_code);
+      if (buffer->itut_t35.country_code == 0xFF) {
+        payload.push_back(buffer->itut_t35.country_code_extension_byte);
+      }
+      payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
+                     buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
+    }
+
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+            C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+    if (!hdr10PlusInfo) {
+      ALOGE("Hdr10PlusInfo allocation failed");
+      mSignalledError = true;
+      work->result = C2_NO_MEMORY;
+      return;
+    }
+    memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+    // config if hdr10Plus info has changed
+    if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+      mHdr10PlusInfo = std::move(hdr10PlusInfo);
+      work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+    }
+  }
+}
+
 void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
     VuiColorAspects vuiColorAspects;
     vuiColorAspects.primaries = buffer->color_primary;
@@ -590,6 +734,24 @@
     }
 }
 
+void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
+    mSignalledError = true;
+    work->result = error;
+    work->workletsProcessed = 1u;
+}
+
+bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
+    if (size > mTmpFrameBufferSize) {
+        mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+        if (mTmpFrameBuffer == nullptr) {
+            mTmpFrameBufferSize = 0;
+            return false;
+        }
+        mTmpFrameBufferSize = size;
+    }
+    return true;
+}
+
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
                                  const std::unique_ptr<C2Work> &work) {
   if (!(work && pool)) return false;
@@ -633,14 +795,21 @@
   }
 
   getVuiParams(buffer);
-  if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
+  getHDRStaticParams(buffer, work);
+  getHDR10PlusInfoData(buffer, work);
+
+#if LIBYUV_VERSION < 1779
+  if (buffer->bitdepth == 10 &&
+      !(buffer->image_format == libgav1::kImageFormatYuv420 ||
         buffer->image_format == libgav1::kImageFormatMonochrome400)) {
-    ALOGE("image_format %d not supported", buffer->image_format);
+    ALOGE("image_format %d not supported for 10bit", buffer->image_format);
     mSignalledError = true;
     work->workletsProcessed = 1u;
     work->result = C2_CORRUPTED;
     return false;
   }
+#endif
+
   const bool isMonochrome =
       buffer->image_format == libgav1::kImageFormatMonochrome400;
 
@@ -657,6 +826,7 @@
       allowRGBA1010102 = true;
     }
     format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+#if !HAVE_LIBYUV_I410_I210_TO_AB30
     if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
         (buffer->image_format != libgav1::kImageFormatYuv420)) {
         ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
@@ -665,6 +835,7 @@
       work->workletsProcessed = 1u;
       return false;
     }
+#endif
   }
 
   if (mHalPixelFormat != format) {
@@ -713,40 +884,154 @@
   uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
   uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
   uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
-  size_t srcYStride = buffer->stride[0];
-  size_t srcUStride = buffer->stride[1];
-  size_t srcVStride = buffer->stride[2];
 
   C2PlanarLayout layout = wView.layout();
   size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-  size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+  size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+  size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
   if (buffer->bitdepth == 10) {
     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
     const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+    size_t srcYStride = buffer->stride[0] / 2;
+    size_t srcUStride = buffer->stride[1] / 2;
+    size_t srcVStride = buffer->stride[2] / 2;
 
     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-        convertYUV420Planar16ToY410OrRGBA1010102(
-                (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
-                srcUStride / 2, srcVStride / 2,
-                dstYStride / sizeof(uint32_t), mWidth, mHeight,
-                std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+        bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+        if (buffer->image_format == libgav1::kImageFormatYuv444) {
+            libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                     dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                     mWidth, mHeight);
+            processed = true;
+        } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+            libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                     dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                     mWidth, mHeight);
+            processed = true;
+        }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+        if (!processed) {
+            if (isMonochrome) {
+                const size_t tmpSize = mWidth;
+                const bool needFill = tmpSize > mTmpFrameBufferSize;
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                srcU = srcV = mTmpFrameBuffer.get();
+                srcUStride = srcVStride = 0;
+                if (needFill) {
+                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+                }
+            }
+            convertYUV420Planar16ToY410OrRGBA1010102(
+                    (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
+                    srcUStride, srcVStride,
+                    dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                    std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+        }
     } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+        dstYStride /= 2;
+        dstUStride /= 2;
+        dstVStride /= 2;
+#if LIBYUV_VERSION >= 1779
+        if (buffer->image_format == libgav1::kImageFormatYuv444 ||
+            buffer->image_format == libgav1::kImageFormatYuv422) {
+            // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
+            // libyuv::I210ToP010 when they are available.
+            // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
+            // guarantees.
+            const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+            if (!allocTmpFrameBuffer(tmpSize)) {
+                ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                setError(work, C2_NO_MEMORY);
+                return false;
+            }
+            uint16_t *const tmpY = mTmpFrameBuffer.get();
+            uint16_t *const tmpU = tmpY + dstYStride * mHeight;
+            uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+            if (buffer->image_format == libgav1::kImageFormatYuv444) {
+                libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                   tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+                                   mWidth, mHeight);
+            } else {
+                libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                   tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+                                   mWidth, mHeight);
+            }
+            libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+                               (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
+                               mWidth, mHeight);
+        } else {
+            convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+                                        srcYStride, srcUStride, srcVStride, dstYStride,
+                                        dstUStride, mWidth, mHeight, isMonochrome);
+        }
+#else  // LIBYUV_VERSION < 1779
         convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
-                                    srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
-                                    dstUVStride / 2, mWidth, mHeight, isMonochrome);
+                                    srcYStride, srcUStride, srcVStride, dstYStride,
+                                    dstUStride, mWidth, mHeight, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
     } else {
-        convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride, mWidth,
-                                    mHeight, isMonochrome);
+#if LIBYUV_VERSION >= 1779
+        if (buffer->image_format == libgav1::kImageFormatYuv444) {
+            // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
+            // it's available.
+            const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+            if (!allocTmpFrameBuffer(tmpSize)) {
+                ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                setError(work, C2_NO_MEMORY);
+                return false;
+            }
+            uint16_t *const tmpY = mTmpFrameBuffer.get();
+            uint16_t *const tmpU = tmpY + dstYStride * mHeight;
+            uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+            libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                               tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+                               mWidth, mHeight);
+            libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
+                               dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                               mWidth, mHeight);
+        } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+            libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                               dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                               mWidth, mHeight);
+        } else {
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                        srcUStride, srcVStride, dstYStride, dstUStride,
+                                        mWidth, mHeight, isMonochrome);
+        }
+#else  // LIBYUV_VERSION < 1779
+        convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                    srcUStride, srcVStride, dstYStride, dstUStride,
+                                    mWidth, mHeight, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
     }
   } else {
     const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
-    convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
-                               srcVStride, dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
+    size_t srcYStride = buffer->stride[0];
+    size_t srcUStride = buffer->stride[1];
+    size_t srcVStride = buffer->stride[2];
+
+    if (buffer->image_format == libgav1::kImageFormatYuv444) {
+        libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                           dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                           mWidth, mHeight);
+    } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
+        libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                           dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                           mWidth, mHeight);
+    } else {
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
+                                   isMonochrome);
+    }
   }
   finishWork(buffer->user_private_data, work, std::move(block));
   block = nullptr;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 3d4db55..c3b27ea 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -19,12 +19,14 @@
 
 #include <inttypes.h>
 
+#include <memory>
+
 #include <media/stagefright/foundation/ColorUtils.h>
 
 #include <SimpleC2Component.h>
 #include <C2Config.h>
-#include "libgav1/src/gav1/decoder.h"
-#include "libgav1/src/gav1/decoder_settings.h"
+#include <gav1/decoder.h>
+#include <gav1/decoder_settings.h>
 
 namespace android {
 
@@ -60,6 +62,12 @@
   uint32_t mHeight;
   bool mSignalledOutputEos;
   bool mSignalledError;
+  // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+  std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+  size_t mTmpFrameBufferSize = 0;
+
+  C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+  std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
 
   // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
   // converting them to C2 values for each frame
@@ -86,10 +94,17 @@
   nsecs_t mTimeEnd = 0;    // Time at the end of decode()
 
   bool initDecoder();
+  void getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
+                  const std::unique_ptr<C2Work> &work);
+  void getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
+                  const std::unique_ptr<C2Work> &work);
   void getVuiParams(const libgav1::DecoderBuffer *buffer);
   void destroyDecoder();
   void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
                   const std::shared_ptr<C2GraphicBlock>& block);
+  // Sets |work->result| and mSignalledError. Returns false.
+  void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
+  bool allocTmpFrameBuffer(size_t size);
   bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
                     const std::unique_ptr<C2Work>& work);
   c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 5a660c5..15d6dcd 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -664,6 +664,9 @@
 void C2SoftHevcDec::resetPlugin() {
     mSignalledOutputEos = false;
     mTimeStart = mTimeEnd = systemTime();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
 }
 
 status_t C2SoftHevcDec::deleteDecoder() {
@@ -917,7 +920,8 @@
         if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN128(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
+                mStride = ALIGN128(ps_decode_op->u4_pic_wd);
+                setParams(mStride, IVD_DECODE_FRAME);
             }
             if (ps_decode_op->u4_pic_wd != mWidth ||  ps_decode_op->u4_pic_ht != mHeight) {
                 mWidth = ps_decode_op->u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 60d5875..ec1dd14 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -341,6 +341,9 @@
         // By default needsUpdate = false in case the supplied level does meet
         // the requirements.
         bool needsUpdate = false;
+        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+            needsUpdate = true;
+        }
         for (const LevelLimits &limit : kLimits) {
             if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
                     bitrate.v.value <= limit.bitrate) {
@@ -362,7 +365,7 @@
                 needsUpdate = true;
             }
         }
-        if (!found) {
+        if (!found || me.v.level > LEVEL_HEVC_MAIN_5_2) {
             // We set to the highest supported level.
             me.set().level = LEVEL_HEVC_MAIN_5_2;
         }
@@ -1109,14 +1112,14 @@
         }
     }
 
-    std::shared_ptr<const C2GraphicView> view;
+    std::shared_ptr<C2GraphicView> view;
     std::shared_ptr<C2Buffer> inputBuffer = nullptr;
     bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
     if (eos) mSignalledEos = true;
 
     if (!work->input.buffers.empty()) {
         inputBuffer = work->input.buffers[0];
-        view = std::make_shared<const C2GraphicView>(
+        view = std::make_shared<C2GraphicView>(
             inputBuffer->data().graphicBlocks().front().map().get());
         if (view->error() != C2_OK) {
             ALOGE("graphic view map err = %d", view->error());
@@ -1125,6 +1128,9 @@
             work->workletsProcessed = 1u;
             return;
         }
+        //(b/232396154)
+        //workaround for incorrect crop size in view when using surface mode
+        view->setCrop_be(C2Rect(mSize->width, mSize->height));
     }
     IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
 
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 9a41910..439323c 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -732,6 +732,9 @@
 void C2SoftMpeg2Dec::resetPlugin() {
     mSignalledOutputEos = false;
     mTimeStart = mTimeEnd = systemTime();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
 }
 
 status_t C2SoftMpeg2Dec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 54a1d0e..2137964 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -256,7 +256,9 @@
     mFramesConfigured = false;
     mSignalledOutputEos = false;
     mSignalledError = false;
-
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
     return C2_OK;
 }
 
@@ -601,7 +603,8 @@
 
         C2PlanarLayout layout = wView.layout();
         size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-        size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+        size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+        size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
         size_t srcYStride = align(mWidth, 16);
         size_t srcUStride = srcYStride / 2;
         size_t srcVStride = srcYStride / 2;
@@ -611,8 +614,8 @@
         const uint8_t *srcV = (const uint8_t *)srcY + vStride * srcYStride * 5 / 4;
 
         convertYUV420Planar8ToYV12(outputBufferY, outputBufferU, outputBufferV, srcY, srcU, srcV,
-                                   srcYStride, srcUStride, srcVStride, dstYStride, dstUVStride,
-                                   mWidth, mHeight);
+                                   srcYStride, srcUStride, srcVStride, dstYStride, dstUStride,
+                                   dstVStride, mWidth, mHeight);
 
         inPos += inSize - (size_t)tmpInSize;
         finishWork(workIndex, work);
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 3bfec66..acc42e9 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -49,6 +49,8 @@
 const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_H263;
 #endif
 
+constexpr float VBV_DELAY = 5.0f;
+
 } // namepsace
 
 class C2SoftMpeg4Enc::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -131,7 +133,7 @@
                             C2Config::LEVEL_MP4V_1,
                             C2Config::LEVEL_MP4V_2})
                 })
-                .withSetter(ProfileLevelSetter)
+                .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
                 .build());
 #else
         addParameter(
@@ -148,7 +150,7 @@
                             C2Config::LEVEL_H263_40,
                             C2Config::LEVEL_H263_45})
                 })
-                .withSetter(ProfileLevelSetter)
+                .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
                 .build());
 #endif
     }
@@ -179,7 +181,10 @@
 
     static C2R ProfileLevelSetter(
             bool mayBlock,
-            C2P<C2StreamProfileLevelInfo::output> &me) {
+            C2P<C2StreamProfileLevelInfo::output> &me,
+            const C2P<C2StreamPictureSizeInfo::input> &size,
+            const C2P<C2StreamFrameRateInfo::output> &frameRate,
+            const C2P<C2StreamBitrateInfo::output> &bitrate) {
         (void)mayBlock;
         if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
 #ifdef MPEG4
@@ -188,13 +193,91 @@
             me.set().profile = PROFILE_H263_BASELINE;
 #endif
         }
-        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+
+        struct LevelLimits {
+            C2Config::level_t level;
+            uint32_t sampleRate;
+            uint32_t width;
+            uint32_t height;
+            uint32_t frameRate;
+            uint32_t bitrate;
+            uint32_t vbvSize;
+        };
+
+        constexpr LevelLimits kLimits[] = {
 #ifdef MPEG4
-            me.set().level = LEVEL_MP4V_2;
+            { LEVEL_MP4V_0, 380160, 176, 144, 15, 64000, 163840 },
+            // { LEVEL_MP4V_0B, 380160, 176, 144, 15, 128000, 163840 },
+            { LEVEL_MP4V_1, 380160, 176, 144, 30, 64000, 163840 },
+            { LEVEL_MP4V_2, 1520640, 352, 288, 30, 128000, 655360 },
 #else
-            me.set().level = LEVEL_H263_45;
+            // HRD Buffer Size = (B + BPPmaxKb * 1024 bits)
+            // where, (BPPmaxKb * 1024) is maximum number of bits per picture
+            // that has been negotiated for use in the bitstream Sec 3.6 of T-Rec-H.263
+            // and B = 4 * Rmax / PCF. Rmax is max bit rate and PCF is picture
+            // clock frequency
+            { LEVEL_H263_10, 380160, 176, 144, 15, 64000, 74077 },
+            { LEVEL_H263_45, 380160, 176, 144, 15, 128000, 82619 },
+            { LEVEL_H263_20, 1520640, 352, 288, 30, 128000, 279227 },
+            { LEVEL_H263_30, 3041280, 352, 288, 30, 384000, 313395 },
+            { LEVEL_H263_40, 3041280, 352, 288, 30, 2048000, 535483 },
+            // { LEVEL_H263_50, 5068800, 352, 288, 60, 4096000, 808823 },
 #endif
+        };
+
+        auto mbs = ((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
+        auto sampleRate = mbs * frameRate.v.value * 16 * 16;
+        auto vbvSize = bitrate.v.value * VBV_DELAY;
+
+        // Check if the supplied level meets the MB / bitrate requirements. If
+        // not, update the level with the lowest level meeting the requirements.
+        bool found = false;
+
+        // By default needsUpdate = false in case the supplied level does meet
+        // the requirements.
+        bool needsUpdate = false;
+#ifdef MPEG4
+        // For Level 0b, we want to update the level anyway, as library does not
+        // seem to accept this value.
+        if (me.v.level == LEVEL_MP4V_0B) {
+            needsUpdate = true;
         }
+#endif
+        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+            needsUpdate = true;
+        }
+        for (const LevelLimits &limit : kLimits) {
+            if (sampleRate <= limit.sampleRate && size.v.width <= limit.width &&
+                    vbvSize <= limit.vbvSize && size.v.height <= limit.height &&
+                    bitrate.v.value <= limit.bitrate && frameRate.v.value <= limit.frameRate) {
+                // This is the lowest level that meets the requirements, and if
+                // we haven't seen the supplied level yet, that means we don't
+                // need the update.
+                if (needsUpdate) {
+                    ALOGD("Given level %x does not cover current configuration: "
+                          "adjusting to %x", me.v.level, limit.level);
+                    me.set().level = limit.level;
+                }
+                found = true;
+                break;
+            }
+            if (me.v.level == limit.level) {
+                // We break out of the loop when the lowest feasible level is
+                // found. The fact that we're here means that our level doesn't
+                // meet the requirement and needs to be updated.
+                needsUpdate = true;
+            }
+        }
+        // If not found or exceeds max level, set to the highest supported level.
+#ifdef MPEG4
+        if (!found || me.v.level > LEVEL_MP4V_2) {
+            me.set().level = LEVEL_MP4V_2;
+        }
+#else
+        if (!found || (me.v.level != LEVEL_H263_45 && me.v.level > LEVEL_H263_40)) {
+            me.set().level = LEVEL_H263_40;
+        }
+#endif
         return C2R::Ok();
     }
 
@@ -325,7 +408,7 @@
     mEncParams->encHeight[0] = mSize->height;
     mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
     mEncParams->rcType = VBR_1;
-    mEncParams->vbvDelay = 5.0f;
+    mEncParams->vbvDelay = VBV_DELAY;
     mEncParams->profile_level = CORE_PROFILE_LEVEL2;
     mEncParams->packetSize = 32;
     mEncParams->rvlcEnable = PV_OFF;
@@ -464,18 +547,21 @@
         }
     }
 
-    std::shared_ptr<const C2GraphicView> rView;
+    std::shared_ptr<C2GraphicView> rView;
     std::shared_ptr<C2Buffer> inputBuffer;
     bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
     if (!work->input.buffers.empty()) {
         inputBuffer = work->input.buffers[0];
-        rView = std::make_shared<const C2GraphicView>(
+        rView = std::make_shared<C2GraphicView>(
                 inputBuffer->data().graphicBlocks().front().map().get());
         if (rView->error() != C2_OK) {
             ALOGE("graphic view map err = %d", rView->error());
             work->result = rView->error();
             return;
         }
+        //(b/232396154)
+        //workaround for incorrect crop size in view when using surface mode
+        rView->setCrop_be(C2Rect(mSize->width, mSize->height));
     } else {
         fillEmptyWork(work);
         if (eos) {
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 8087396..dab7b89 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -69,8 +69,8 @@
                 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
                 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
                 .withFields({
-                    C2F(mSize, width).inRange(2, 2048, 2),
-                    C2F(mSize, height).inRange(2, 2048, 2),
+                    C2F(mSize, width).inRange(2, 2048),
+                    C2F(mSize, height).inRange(2, 2048),
                 })
                 .withSetter(SizeSetter)
                 .build());
@@ -734,7 +734,12 @@
     }
 
     C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
-    c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
+                                              align(mHeight, 2), format, usage,
+                                              &block);
     if (err != C2_OK) {
         ALOGE("fetchGraphicBlock for Output failed with status %d", err);
         work->result = err;
@@ -761,7 +766,8 @@
     size_t srcVStride = img->stride[VPX_PLANE_V];
     C2PlanarLayout layout = wView.layout();
     size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-    size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
     if (img->fmt == VPX_IMG_FMT_I42016) {
         const uint16_t *srcY = (const uint16_t *)img->planes[VPX_PLANE_Y];
@@ -799,10 +805,10 @@
         } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
             convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
                                         srcYStride / 2, srcUStride / 2, srcVStride / 2,
-                                        dstYStride / 2, dstUVStride / 2, mWidth, mHeight);
+                                        dstYStride / 2, dstUStride / 2, mWidth, mHeight);
         } else {
             convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
-                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUStride,
                                         mWidth, mHeight);
         }
     } else {
@@ -811,7 +817,7 @@
         const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
 
         convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
-                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
+                                   srcVStride, dstYStride, dstVStride, dstVStride, mWidth, mHeight);
     }
     finishWork(((c2_cntr64_t *)img->user_priv)->peekull(), work, std::move(block));
     return OK;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index f99ee24..e903069 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -59,7 +59,7 @@
 
     addParameter(
         DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-            .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+            .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
             .withFields({
                 C2F(mSize, width).inRange(2, 2048, 2),
                 C2F(mSize, height).inRange(2, 2048, 2),
@@ -81,7 +81,7 @@
 
     addParameter(
         DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
-            .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+            .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
             // TODO: More restriction?
             .withFields({C2F(mFrameRate, value).greaterThan(0.)})
             .withSetter(
@@ -127,10 +127,18 @@
                 C2F(mProfileLevel, profile).equalTo(
                     PROFILE_VP9_0
                 ),
-                C2F(mProfileLevel, level).equalTo(
-                    LEVEL_VP9_4_1),
+                C2F(mProfileLevel, level).oneOf({
+                        C2Config::LEVEL_VP9_1,
+                        C2Config::LEVEL_VP9_1_1,
+                        C2Config::LEVEL_VP9_2,
+                        C2Config::LEVEL_VP9_2_1,
+                        C2Config::LEVEL_VP9_3,
+                        C2Config::LEVEL_VP9_3_1,
+                        C2Config::LEVEL_VP9_4,
+                        C2Config::LEVEL_VP9_4_1,
+                }),
             })
-            .withSetter(ProfileLevelSetter)
+            .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
             .build());
 #else
     addParameter(
@@ -144,7 +152,7 @@
                 C2F(mProfileLevel, level).equalTo(
                     LEVEL_UNUSED),
             })
-            .withSetter(ProfileLevelSetter)
+            .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
             .build());
 #endif
     addParameter(
@@ -217,14 +225,84 @@
 }
 
 C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
-                                               C2P<C2StreamProfileLevelInfo::output>& me) {
+                                               C2P<C2StreamProfileLevelInfo::output>& me,
+                                               const C2P<C2StreamPictureSizeInfo::input>& size,
+                                               const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                               const C2P<C2StreamBitrateInfo::output>& bitrate) {
     (void)mayBlock;
+#ifdef VP9
     if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
         me.set().profile = PROFILE_VP9_0;
     }
+    struct LevelLimits {
+        C2Config::level_t level;
+        float samplesPerSec;
+        uint64_t samples;
+        uint32_t bitrate;
+        size_t dimension;
+    };
+    constexpr LevelLimits kLimits[] = {
+            {LEVEL_VP9_1, 829440, 36864, 200000, 512},
+            {LEVEL_VP9_1_1, 2764800, 73728, 800000, 768},
+            {LEVEL_VP9_2, 4608000, 122880, 1800000, 960},
+            {LEVEL_VP9_2_1, 9216000, 245760, 3600000, 1344},
+            {LEVEL_VP9_3, 20736000, 552960, 7200000, 2048},
+            {LEVEL_VP9_3_1, 36864000, 983040, 12000000, 2752},
+            {LEVEL_VP9_4, 83558400, 2228224, 18000000, 4160},
+            {LEVEL_VP9_4_1, 160432128, 2228224, 30000000, 4160},
+    };
+
+    uint64_t samples = size.v.width * size.v.height;
+    float samplesPerSec = float(samples) * frameRate.v.value;
+    size_t dimension = std::max(size.v.width, size.v.height);
+
+    // Check if the supplied level meets the samples / bitrate requirements.
+    // If not, update the level with the lowest level meeting the requirements.
+    bool found = false;
+
+    // By default needsUpdate = false in case the supplied level does meet
+    // the requirements.
+    bool needsUpdate = false;
     if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        needsUpdate = true;
+    }
+    for (const LevelLimits& limit : kLimits) {
+        if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
+            bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
+            // This is the lowest level that meets the requirements, and if
+            // we haven't seen the supplied level yet, that means we don't
+            // need the update.
+            if (needsUpdate) {
+                ALOGD("Given level %x does not cover current configuration: "
+                        "adjusting to %x",
+                        me.v.level, limit.level);
+                me.set().level = limit.level;
+            }
+            found = true;
+            break;
+        }
+        if (me.v.level == limit.level) {
+            // We break out of the loop when the lowest feasible level is
+            // found. The fact that we're here means that our level doesn't
+            // meet the requirement and needs to be updated.
+            needsUpdate = true;
+        }
+    }
+    if (!found) {
+        // We set to the highest supported level.
         me.set().level = LEVEL_VP9_4_1;
     }
+#else
+    (void)size;
+    (void)frameRate;
+    (void)bitrate;
+    if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+        me.set().profile = PROFILE_VP8_0;
+    }
+    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        me.set().level = LEVEL_UNUSED;
+    }
+#endif
     return C2R::Ok();
 }
 
@@ -683,17 +761,20 @@
         return;
     }
 
-    std::shared_ptr<const C2GraphicView> rView;
+    std::shared_ptr<C2GraphicView> rView;
     std::shared_ptr<C2Buffer> inputBuffer;
     if (!work->input.buffers.empty()) {
         inputBuffer = work->input.buffers[0];
-        rView = std::make_shared<const C2GraphicView>(
+        rView = std::make_shared<C2GraphicView>(
                     inputBuffer->data().graphicBlocks().front().map().get());
         if (rView->error() != C2_OK) {
             ALOGE("graphic view map err = %d", rView->error());
             work->result = C2_CORRUPTED;
             return;
         }
+        //(b/232396154)
+        //workaround for incorrect crop size in view when using surface mode
+        rView->setCrop_be(C2Rect(mSize->width, mSize->height));
     } else {
         ALOGV("Empty input Buffer");
         uint32_t flags = 0;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 714fadb..bfb4444 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -243,9 +243,10 @@
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
                           C2P<C2StreamPictureSizeInfo::input> &me);
 
-    static C2R ProfileLevelSetter(
-            bool mayBlock,
-            C2P<C2StreamProfileLevelInfo::output> &me);
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
+                                  const C2P<C2StreamPictureSizeInfo::input>& size,
+                                  const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                  const C2P<C2StreamBitrateInfo::output>& bitrate);
 
     static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
 
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 6ff3dbc..9a3399d 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -2016,7 +2016,8 @@
 constexpr char C2_PARAMKEY_MAX_CODED_CHANNEL_COUNT[] = "coded.max-channel-count";
 
 /**
- * Audio channel mask. Used by decoder to express audio channel mask of decoded content.
+ * Audio channel mask. Used by decoder to express audio channel mask of decoded content,
+ * or by encoder for the channel mask of the encoded content once decoded.
  * Channel representation is specified according to the Java android.media.AudioFormat
  * CHANNEL_OUT_* constants.
  */
@@ -2503,7 +2504,8 @@
  * Note: This parameter allows a decoder to ignore the video peek machinery and
  * to revert to its preferred behavior.
  */
-typedef C2StreamParam<C2Tuning, C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>,
+typedef C2StreamParam<C2Tuning,
+        C2SimpleValueStruct<C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>>,
         kParamIndexTunnelPeekMode> C2StreamTunnelPeekModeTuning;
 constexpr char C2_PARAMKEY_TUNNEL_PEEK_MODE[] =
         "output.tunnel-peek-mode";
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index 147a52e..dd68e7e 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -145,6 +145,21 @@
 }
 
 cc_fuzz {
+    name: "C2FuzzerAV1Dec",
+    defaults: ["C2Fuzzer-defaults"],
+
+    cflags: [
+        "-DC2COMPONENTNAME=\"c2.android.av1.decoder\"",
+    ],
+
+    static_libs: [
+        "libgav1",
+        "libyuv_static",
+        "libcodec2_soft_av1dec_gav1",
+    ],
+}
+
+cc_fuzz {
     name: "C2FuzzerAacDec",
     defaults: ["C2Fuzzer-defaults"],
 
diff --git a/media/codec2/fuzzer/C2Fuzzer.cpp b/media/codec2/fuzzer/C2Fuzzer.cpp
index e469d8b..d6793e0 100644
--- a/media/codec2/fuzzer/C2Fuzzer.cpp
+++ b/media/codec2/fuzzer/C2Fuzzer.cpp
@@ -246,7 +246,8 @@
   bufferSource->parse();
   c2_status_t status = C2_OK;
   size_t numFrames = 0;
-  while (!bufferSource->isEos()) {
+  int32_t iterationCount = 0;
+  while (!bufferSource->isEos() && ++iterationCount <= kMaxIterations) {
     uint8_t* frame = nullptr;
     size_t frameSize = 0;
     FrameData frameData = bufferSource->getFrame();
diff --git a/media/codec2/fuzzer/C2Fuzzer.h b/media/codec2/fuzzer/C2Fuzzer.h
index da76885..4e3e3ea 100644
--- a/media/codec2/fuzzer/C2Fuzzer.h
+++ b/media/codec2/fuzzer/C2Fuzzer.h
@@ -37,6 +37,7 @@
 #define C2FUZZER_ALIGN(_sz, _align) (((_sz) + ((_align)-1)) & ~((_align)-1))
 
 constexpr std::chrono::milliseconds kC2FuzzerTimeOut = 5000ms;
+constexpr int32_t kMaxIterations = 100;
 constexpr int32_t kNumberOfC2WorkItems = 8;
 constexpr uint32_t kWidthOfVideo = 3840;
 constexpr uint32_t kHeightOfVideo = 2160;
diff --git a/media/codec2/hidl/client/Android.bp b/media/codec2/hal/client/Android.bp
similarity index 98%
rename from media/codec2/hidl/client/Android.bp
rename to media/codec2/hal/client/Android.bp
index f32711d..31244fe 100644
--- a/media/codec2/hidl/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -45,6 +45,7 @@
         "android.hardware.media.c2@1.2",
         "libbase",
         "libbinder",
+        "libbinder_ndk",
         "libcodec2",
         "libcodec2_hidl_client@1.0",
         "libcodec2_hidl_client@1.1",
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hal/client/client.cpp
similarity index 98%
rename from media/codec2/hidl/client/client.cpp
rename to media/codec2/hal/client/client.cpp
index 0acf7d7..97c0806 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -610,16 +610,9 @@
 
 // Codec2Client
 Codec2Client::Codec2Client(sp<Base> const& base,
+                           sp<IConfigurable> const& configurable,
                            size_t serviceIndex)
-      : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
-                        base->getConfigurable();
-                return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
-                        nullptr;
-            }()
-        },
+      : Configurable{configurable},
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
         mBase1_2{Base1_2::castFrom(base)},
@@ -1003,7 +996,11 @@
     CHECK(baseStore) << "Codec2 service \"" << name << "\""
                         " inaccessible for unknown reasons.";
     LOG(VERBOSE) << "Client to Codec2 service \"" << name << "\" created";
-    return std::make_shared<Codec2Client>(baseStore, index);
+    Return<sp<IConfigurable>> transResult = baseStore->getConfigurable();
+    CHECK(transResult.isOk()) << "Codec2 service \"" << name << "\""
+                                "does not have IConfigurable.";
+    sp<IConfigurable> configurable = static_cast<sp<IConfigurable>>(transResult);
+    return std::make_shared<Codec2Client>(baseStore, configurable, index);
 }
 
 c2_status_t Codec2Client::ForAllServices(
@@ -1523,8 +1520,8 @@
     uint64_t consumerUsage = kDefaultConsumerUsage;
     {
         if (surface) {
-            int usage = 0;
-            status_t err = surface->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+            uint64_t usage = 0;
+            status_t err = surface->getConsumerUsage(&usage);
             if (err != NO_ERROR) {
                 ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
                         err, asString(err));
@@ -1537,8 +1534,7 @@
                 // they do not exist inside of C2 scope. Any buffer usage shall be communicated
                 // through the sideband channel.
 
-                // do an unsigned conversion as bit-31 may be 1
-                consumerUsage = (uint32_t)usage | kDefaultConsumerUsage;
+                consumerUsage = usage | kDefaultConsumerUsage;
             }
         }
 
@@ -1562,6 +1558,8 @@
                     static_cast<uint64_t>(blockPoolId),
                     bqId == 0 ? nullHgbp : igbp);
 
+    mOutputBufferQueue->expireOldWaiters();
+
     if (!transStatus.isOk()) {
         LOG(ERROR) << "setOutputSurface -- transaction failed.";
         return C2_TRANSACTION_FAILED;
@@ -1582,6 +1580,10 @@
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
+void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+    mOutputBufferQueue->pollForRenderedFrames(delta);
+}
+
 void Codec2Client::Component::setOutputSurfaceMaxDequeueCount(
         int maxDequeueCount) {
     mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
@@ -1603,6 +1605,7 @@
                        << status << ".";
         }
     }
+    mOutputBufferQueue->expireOldWaiters();
 }
 
 c2_status_t Codec2Client::Component::connectToInputSurface(
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
similarity index 97%
rename from media/codec2/hidl/client/include/codec2/hidl/client.h
rename to media/codec2/hal/client/include/codec2/hidl/client.h
index 49d9b28..5267394 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -23,6 +23,7 @@
 #include <C2Param.h>
 #include <C2.h>
 
+#include <gui/FrameTimestamps.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <hidl/HidlSupport.h>
 #include <utils/StrongPointer.h>
@@ -145,6 +146,8 @@
     typedef ::android::hardware::media::c2::V1_2::IComponentStore Base1_2;
     typedef Base1_0 Base;
 
+    typedef ::android::hardware::media::c2::V1_0::IConfigurable IConfigurable;
+
     struct Listener;
 
     typedef Codec2ConfigurableClient Configurable;
@@ -229,8 +232,11 @@
     static std::shared_ptr<InputSurface> CreateInputSurface(
             char const* serviceName = nullptr);
 
-    // base cannot be null.
-    Codec2Client(sp<Base> const& base, size_t serviceIndex);
+    // base and/or configurable cannot be null.
+    Codec2Client(
+            sp<Base> const& base,
+            sp<IConfigurable> const& configurable,
+            size_t serviceIndex);
 
 protected:
     sp<Base1_0> mBase1_0;
@@ -408,6 +414,9 @@
             const QueueBufferInput& input,
             QueueBufferOutput* output);
 
+    // Retrieve frame event history from the output surface.
+    void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
     // Set max dequeue count for output surface.
     void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
 
@@ -508,4 +517,3 @@
 }  // namespace android
 
 #endif  // CODEC2_HIDL_CLIENT_H
-
diff --git a/media/codec2/hidl/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
similarity index 90%
rename from media/codec2/hidl/client/include/codec2/hidl/output.h
rename to media/codec2/hal/client/include/codec2/hidl/output.h
index a13edf3..2e89c3b 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -17,6 +17,7 @@
 #ifndef CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
 #define CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
 
+#include <gui/FrameTimestamps.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <codec2/hidl/1.0/types.h>
 #include <codec2/hidl/1.2/types.h>
@@ -50,6 +51,10 @@
                    int maxDequeueBufferCount,
                    std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
 
+    // If there are waiters to allocate from the old surface, wake up and expire
+    // them.
+    void expireOldWaiters();
+
     // Stop using the current output surface. Pending buffer opeations will not
     // perform anymore.
     void stop();
@@ -60,6 +65,9 @@
             const BnGraphicBufferProducer::QueueBufferInput& input,
             BnGraphicBufferProducer::QueueBufferOutput* output);
 
+    // Retrieve frame event history from the output surface.
+    void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
     // Call holdBufferQueueBlock() on output blocks in the given workList.
     // The OutputBufferQueue will take the ownership of output blocks.
     //
@@ -86,6 +94,8 @@
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
     bool mStopped;
+    std::mutex mOldMutex;
+    std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
 
     bool registerBuffer(const C2ConstGraphicBlock& block);
 };
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hal/client/output.cpp
similarity index 96%
rename from media/codec2/hidl/client/output.cpp
rename to media/codec2/hal/client/output.cpp
index f789030..4eebd1c 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -217,6 +217,7 @@
     sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
     std::weak_ptr<_C2BlockPoolData>
             poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
+    std::shared_ptr<C2SurfaceSyncMemory> oldMem;
     {
         std::scoped_lock<std::mutex> l(mMutex);
         bool stopped = mStopped;
@@ -238,7 +239,7 @@
             }
             return false;
         }
-        std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
+        oldMem = mSyncMem;
         C2SyncVariables *oldSync = mSyncMem ? mSyncMem->mem() : nullptr;
         if (oldSync) {
             oldSync->lock();
@@ -314,11 +315,26 @@
             newSync->unlock();
         }
     }
+    {
+        std::scoped_lock<std::mutex> l(mOldMutex);
+        mOldMem = oldMem;
+    }
     ALOGD("remote graphic buffer migration %zu/%zu",
           success, tryNum);
     return true;
 }
 
+void OutputBufferQueue::expireOldWaiters() {
+    std::scoped_lock<std::mutex> l(mOldMutex);
+    if (mOldMem) {
+        C2SyncVariables *oldSync = mOldMem->mem();
+        if (oldSync) {
+            oldSync->notifyAll();
+        }
+        mOldMem.reset();
+    }
+}
+
 void OutputBufferQueue::stop() {
     std::scoped_lock<std::mutex> l(mMutex);
     mStopped = true;
@@ -476,6 +492,12 @@
     return OK;
 }
 
+void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+    if (mIgbp) {
+        mIgbp->getFrameTimestamps(delta);
+    }
+}
+
 void OutputBufferQueue::holdBufferQueueBlocks(
         const std::list<std::unique_ptr<C2Work>>& workList) {
     forEachBlock(workList,
@@ -500,4 +522,3 @@
 }  // namespace media
 }  // namespace hardware
 }  // namespace android
-
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hal/hidl/1.0/utils/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/Android.bp
rename to media/codec2/hal/hidl/1.0/utils/Android.bp
diff --git a/media/codec2/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/Component.cpp
rename to media/codec2/hal/hidl/1.0/utils/Component.cpp
diff --git a/media/codec2/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/ComponentInterface.cpp
rename to media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
diff --git a/media/codec2/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/ComponentStore.cpp
rename to media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
diff --git a/media/codec2/hidl/1.0/utils/Configurable.cpp b/media/codec2/hal/hidl/1.0/utils/Configurable.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/Configurable.cpp
rename to media/codec2/hal/hidl/1.0/utils/Configurable.cpp
diff --git a/media/codec2/hidl/1.0/utils/InputBufferManager.cpp b/media/codec2/hal/hidl/1.0/utils/InputBufferManager.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/InputBufferManager.cpp
rename to media/codec2/hal/hidl/1.0/utils/InputBufferManager.cpp
diff --git a/media/codec2/hidl/1.0/utils/InputSurface.cpp b/media/codec2/hal/hidl/1.0/utils/InputSurface.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/utils/InputSurface.cpp
rename to media/codec2/hal/hidl/1.0/utils/InputSurface.cpp
diff --git a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp b/media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
similarity index 99%
rename from media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
rename to media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
index 7c2e014..d3fdd6b 100644
--- a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
@@ -145,7 +145,7 @@
         //         C2AndroidMemoryUsage(C2MemoryUsage(usage.value)).
         //         asGrallocUsage();
 
-        uint32_t grallocUsage =
+        uint64_t grallocUsage =
                 mSinkName.compare(0, 11, "c2.android.") == 0 ?
                 GRALLOC_USAGE_SW_READ_OFTEN :
                 GRALLOC_USAGE_HW_VIDEO_ENCODER;
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Configurable.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurfaceConnection.h
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
similarity index 100%
rename from media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
rename to media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hal/hidl/1.0/utils/types.cpp
similarity index 99%
rename from media/codec2/hidl/1.0/utils/types.cpp
rename to media/codec2/hal/hidl/1.0/utils/types.cpp
index 5c24bd7..319ba62 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/types.cpp
@@ -1447,6 +1447,10 @@
 bool objcpy(C2BaseBlock* d, const BaseBlock& s) {
     switch (s.getDiscriminator()) {
     case BaseBlock::hidl_discriminator::nativeBlock: {
+            if (s.nativeBlock() == nullptr) {
+                LOG(ERROR) << "Null BaseBlock::nativeBlock handle";
+                return false;
+            }
             native_handle_t* sHandle =
                     native_handle_clone(s.nativeBlock());
             if (sHandle == nullptr) {
@@ -1609,6 +1613,7 @@
     // assuming blob is const here
     size_t size = blob.size();
     size_t ix = 0;
+    size_t old_ix = 0;
     const uint8_t *data = blob.data();
     C2Param *p = nullptr;
 
@@ -1616,8 +1621,13 @@
         p = C2ParamUtils::ParseFirst(data + ix, size - ix);
         if (p) {
             params->emplace_back(p);
+            old_ix = ix;
             ix += p->size();
             ix = align(ix, PARAMS_ALIGNMENT);
+            if (ix <= old_ix || ix > size) {
+                android_errorWriteLog(0x534e4554, "238083570");
+                break;
+            }
         }
     } while (p);
 
diff --git a/media/codec2/hidl/1.0/vts/OWNERS b/media/codec2/hal/hidl/1.0/vts/OWNERS
similarity index 100%
rename from media/codec2/hidl/1.0/vts/OWNERS
rename to media/codec2/hal/hidl/1.0/vts/OWNERS
diff --git a/media/codec2/hidl/1.0/vts/functional/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
similarity index 99%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 5409b66..222c3d2 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -333,7 +333,7 @@
             ASSERT_TRUE(false) << "Wait for generating C2Work exceeded timeout";
         }
         int64_t timestamp = (*Info)[frameID].timestamp;
-        if ((*Info)[frameID].flags) flags = 1u << ((*Info)[frameID].flags - 1);
+        flags = ((*Info)[frameID].flags == FLAG_CONFIG_DATA) ? C2FrameData::FLAG_CODEC_CONFIG : 0;
         if (signalEOS && ((frameID == (int)Info->size() - 1) || (frameID == (offset + range - 1))))
             flags |= C2FrameData::FLAG_END_OF_STREAM;
 
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
similarity index 99%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index bd7ec0d..327717b 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -372,9 +372,8 @@
             ULock l(queueLock);
             flushedIndices.emplace_back(frameID);
         }
-        char* data = (char*)malloc(bytesCount);
-        ASSERT_NE(data, nullptr);
-        eleStream.read(data, bytesCount);
+        std::vector<char> eleData(bytesCount);
+        eleStream.read(eleData.data(), bytesCount);
         // if we have reached at the end of input stream, signal eos
         if (eleStream.gcount() < bytesCount) {
             bytesCount = eleStream.gcount();
@@ -396,12 +395,11 @@
         ASSERT_EQ(0u, view.offset());
         ASSERT_EQ((size_t)bytesCount, view.size());
 
-        memcpy(view.base(), data, bytesCount);
+        memcpy(view.base(), eleData.data(), bytesCount);
         work->input.buffers.clear();
         work->input.buffers.emplace_back(new LinearBuffer(block));
         work->worklets.clear();
         work->worklets.emplace_back(new C2Worklet);
-        free(data);
 
         std::list<std::unique_ptr<C2Work>> items;
         items.push_back(std::move(work));
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/common/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/common/README.md b/media/codec2/hal/hidl/1.0/vts/functional/common/README.md
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/common/README.md
rename to media/codec2/hal/hidl/1.0/vts/functional/common/README.md
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
similarity index 99%
rename from media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
rename to media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index 2222aaf..ecab0cb 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -28,6 +28,8 @@
 #include <fstream>
 
 #define FLAG_NON_DISPLAY_FRAME (1 << 4)
+#define FLAG_CONFIG_DATA (1 << 5)
+
 #define MAX_RETRY 20
 #define TIME_OUT 400ms
 #define MAX_INPUT_BUFFERS 8
diff --git a/media/codec2/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/component/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/master/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_352x288_420p_30fps_32frames.yuv
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.aac
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_aac_stereo_128kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.amrwb
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_amrwb_1ch_14kbps_16000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.av1
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_176_144_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.av1
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_av1_640_360_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.h264
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_176x144_300kbps_60fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.h264
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_avc_640x360_768kbps_30fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.flac
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_flac_stereo_680kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711alaw_1ch_8khz.raw
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_g711mulaw_1ch_8khz.raw
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_gsm_1ch_8khz_13kbps.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.h263
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_h263_352x288_300kbps_12fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.hevc
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_176x144_176kbps_60fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.hevc
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_hevc_640x360_1600kbps_30fps_chksum.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mp3_stereo_192kbps_48000hz.mp3
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_176x144_105kbps_25fps.m2v
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg2_352x288_1mbps_60fps.m2v
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_mpeg4_352x288_512kbps_30fps.m4v
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_opus_stereo_128kbps_48000hz.opus
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_16khz_s16le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s16le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_2ch_48khz_s16le.raw
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vorbis_stereo_128kbps_48000hz.vorbis
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_176x144_240kbps_60fps.vp8
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps.vp8
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp8_640x360_2mbps_30fps_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_176x144_285kbps_60fps.vp9
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps.vp9
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_640x360_1600kbps_30fps_chksm.md5
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.info
diff --git a/media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9 b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9
rename to media/codec2/hal/hidl/1.0/vts/functional/res/bbb_vp9_704x480_280kbps_24fps_altref_2.vp9
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb b/media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb
rename to media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.amrnb
Binary files differ
diff --git a/media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info b/media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info
rename to media/codec2/hal/hidl/1.0/vts/functional/res/sine_amrnb_1ch_12kbps_8000hz.info
diff --git a/media/codec2/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/Android.bp
rename to media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
similarity index 98%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 67873fa..386e81b 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -214,10 +214,8 @@
             calc_md5_cksum(uPlane, uvStride, cropWidth / 2, cropHeight / 2, au1_u_chksum);
             calc_md5_cksum(vPlane, uvStride, cropWidth / 2, cropHeight / 2, au1_v_chksum);
         } else if (bitDepth == 8 && layoutType == C2PlanarLayout::TYPE_YUV && colInc == 2) {
-            uint8_t* cbPlane = (uint8_t*)malloc(cropWidth * cropHeight / 4);
-            uint8_t* crPlane = (uint8_t*)malloc(cropWidth * cropHeight / 4);
-            ASSERT_NE(cbPlane, nullptr);
-            ASSERT_NE(crPlane, nullptr);
+            std::vector<uint8_t> cbPlane(cropWidth * cropHeight / 4);
+            std::vector<uint8_t> crPlane(cropWidth * cropHeight / 4);
             size_t count = 0;
             for (size_t k = 0; k < (cropHeight / 2); k++) {
                 for (size_t l = 0; l < (cropWidth); l = l + 2) {
@@ -227,10 +225,10 @@
                 }
             }
             calc_md5_cksum(yPlane, yStride, cropWidth, cropHeight, au1_y_chksum);
-            calc_md5_cksum(cbPlane, cropWidth / 2, cropWidth / 2, cropHeight / 2, au1_u_chksum);
-            calc_md5_cksum(crPlane, cropWidth / 2, cropWidth / 2, cropHeight / 2, au1_v_chksum);
-            free(cbPlane);
-            free(crPlane);
+            calc_md5_cksum(cbPlane.data(), cropWidth / 2, cropWidth / 2, cropHeight / 2,
+                           au1_u_chksum);
+            calc_md5_cksum(crPlane.data(), cropWidth / 2, cropWidth / 2, cropHeight / 2,
+                           au1_v_chksum);
         } else {
             mMd5Enable = false;
             ALOGV("Disabling MD5 chksm flag");
@@ -461,7 +459,8 @@
             ASSERT_TRUE(false) << "Wait for generating C2Work exceeded timeout";
         }
         int64_t timestamp = (*Info)[frameID].timestamp;
-        if ((*Info)[frameID].flags) flags = (1 << ((*Info)[frameID].flags - 1));
+
+        flags = ((*Info)[frameID].flags == FLAG_CONFIG_DATA) ? C2FrameData::FLAG_CODEC_CONFIG : 0;
         if (signalEOS && ((frameID == (int)Info->size() - 1) || (frameID == (offset + range - 1))))
             flags |= C2FrameData::FLAG_END_OF_STREAM;
 
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml
rename to media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.xml
diff --git a/media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h b/media/codec2/hal/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
similarity index 100%
rename from media/codec2/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
rename to media/codec2/hal/hidl/1.0/vts/functional/video/media_c2_video_hidl_test_common.h
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hal/hidl/1.1/utils/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/Android.bp
rename to media/codec2/hal/hidl/1.1/utils/Android.bp
diff --git a/media/codec2/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/Component.cpp
rename to media/codec2/hal/hidl/1.1/utils/Component.cpp
diff --git a/media/codec2/hidl/1.1/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentInterface.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/ComponentInterface.cpp
rename to media/codec2/hal/hidl/1.1/utils/ComponentInterface.cpp
diff --git a/media/codec2/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/ComponentStore.cpp
rename to media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
diff --git a/media/codec2/hidl/1.1/utils/Configurable.cpp b/media/codec2/hal/hidl/1.1/utils/Configurable.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/Configurable.cpp
rename to media/codec2/hal/hidl/1.1/utils/Configurable.cpp
diff --git a/media/codec2/hidl/1.1/utils/InputBufferManager.cpp b/media/codec2/hal/hidl/1.1/utils/InputBufferManager.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/InputBufferManager.cpp
rename to media/codec2/hal/hidl/1.1/utils/InputBufferManager.cpp
diff --git a/media/codec2/hidl/1.1/utils/InputSurface.cpp b/media/codec2/hal/hidl/1.1/utils/InputSurface.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/InputSurface.cpp
rename to media/codec2/hal/hidl/1.1/utils/InputSurface.cpp
diff --git a/media/codec2/hidl/1.1/utils/InputSurfaceConnection.cpp b/media/codec2/hal/hidl/1.1/utils/InputSurfaceConnection.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/InputSurfaceConnection.cpp
rename to media/codec2/hal/hidl/1.1/utils/InputSurfaceConnection.cpp
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentInterface.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Configurable.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputBufferManager.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurface.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/InputSurfaceConnection.h
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/types.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/types.h
similarity index 100%
rename from media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/types.h
rename to media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/types.h
diff --git a/media/codec2/hidl/1.1/utils/types.cpp b/media/codec2/hal/hidl/1.1/utils/types.cpp
similarity index 100%
rename from media/codec2/hidl/1.1/utils/types.cpp
rename to media/codec2/hal/hidl/1.1/utils/types.cpp
diff --git a/media/codec2/hidl/1.2/utils/Android.bp b/media/codec2/hal/hidl/1.2/utils/Android.bp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/Android.bp
rename to media/codec2/hal/hidl/1.2/utils/Android.bp
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/Component.cpp
rename to media/codec2/hal/hidl/1.2/utils/Component.cpp
diff --git a/media/codec2/hidl/1.2/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentInterface.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/ComponentInterface.cpp
rename to media/codec2/hal/hidl/1.2/utils/ComponentInterface.cpp
diff --git a/media/codec2/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/ComponentStore.cpp
rename to media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
diff --git a/media/codec2/hidl/1.2/utils/Configurable.cpp b/media/codec2/hal/hidl/1.2/utils/Configurable.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/Configurable.cpp
rename to media/codec2/hal/hidl/1.2/utils/Configurable.cpp
diff --git a/media/codec2/hidl/1.2/utils/InputBufferManager.cpp b/media/codec2/hal/hidl/1.2/utils/InputBufferManager.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/InputBufferManager.cpp
rename to media/codec2/hal/hidl/1.2/utils/InputBufferManager.cpp
diff --git a/media/codec2/hidl/1.2/utils/InputSurface.cpp b/media/codec2/hal/hidl/1.2/utils/InputSurface.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/InputSurface.cpp
rename to media/codec2/hal/hidl/1.2/utils/InputSurface.cpp
diff --git a/media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp b/media/codec2/hal/hidl/1.2/utils/InputSurfaceConnection.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/InputSurfaceConnection.cpp
rename to media/codec2/hal/hidl/1.2/utils/InputSurfaceConnection.cpp
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentInterface.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Configurable.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputBufferManager.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurface.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/InputSurfaceConnection.h
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
similarity index 100%
rename from media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
rename to media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/types.h
diff --git a/media/codec2/hidl/1.2/utils/types.cpp b/media/codec2/hal/hidl/1.2/utils/types.cpp
similarity index 100%
rename from media/codec2/hidl/1.2/utils/types.cpp
rename to media/codec2/hal/hidl/1.2/utils/types.cpp
diff --git a/media/codec2/hidl/plugin/Android.bp b/media/codec2/hal/plugin/Android.bp
similarity index 100%
rename from media/codec2/hidl/plugin/Android.bp
rename to media/codec2/hal/plugin/Android.bp
diff --git a/media/codec2/hidl/plugin/DefaultFilterPlugin.cpp b/media/codec2/hal/plugin/DefaultFilterPlugin.cpp
similarity index 100%
rename from media/codec2/hidl/plugin/DefaultFilterPlugin.cpp
rename to media/codec2/hal/plugin/DefaultFilterPlugin.cpp
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
similarity index 100%
rename from media/codec2/hidl/plugin/FilterWrapper.cpp
rename to media/codec2/hal/plugin/FilterWrapper.cpp
diff --git a/media/codec2/hidl/plugin/FilterWrapperStub.cpp b/media/codec2/hal/plugin/FilterWrapperStub.cpp
similarity index 100%
rename from media/codec2/hidl/plugin/FilterWrapperStub.cpp
rename to media/codec2/hal/plugin/FilterWrapperStub.cpp
diff --git a/media/codec2/hidl/plugin/include/codec2/hidl/plugin/FilterPlugin.h b/media/codec2/hal/plugin/include/codec2/hidl/plugin/FilterPlugin.h
similarity index 100%
rename from media/codec2/hidl/plugin/include/codec2/hidl/plugin/FilterPlugin.h
rename to media/codec2/hal/plugin/include/codec2/hidl/plugin/FilterPlugin.h
diff --git a/media/codec2/hidl/plugin/internal/DefaultFilterPlugin.h b/media/codec2/hal/plugin/internal/DefaultFilterPlugin.h
similarity index 100%
rename from media/codec2/hidl/plugin/internal/DefaultFilterPlugin.h
rename to media/codec2/hal/plugin/internal/DefaultFilterPlugin.h
diff --git a/media/codec2/hidl/plugin/internal/FilterWrapper.h b/media/codec2/hal/plugin/internal/FilterWrapper.h
similarity index 100%
rename from media/codec2/hidl/plugin/internal/FilterWrapper.h
rename to media/codec2/hal/plugin/internal/FilterWrapper.h
diff --git a/media/codec2/hidl/plugin/samples/Android.bp b/media/codec2/hal/plugin/samples/Android.bp
similarity index 97%
rename from media/codec2/hidl/plugin/samples/Android.bp
rename to media/codec2/hal/plugin/samples/Android.bp
index 32b760d..e0f8280 100644
--- a/media/codec2/hidl/plugin/samples/Android.bp
+++ b/media/codec2/hal/plugin/samples/Android.bp
@@ -28,6 +28,7 @@
         "libGLESv1_CM",
         "libGLESv2",
         "libGLESv3",
+        "libvulkan",
         "libbase",
         "libcodec2",
         "libcutils",
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
similarity index 98%
rename from media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
rename to media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
index 5c13b0e..b5383ad 100644
--- a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
@@ -417,6 +417,7 @@
         }
         std::unique_lock lock(mQueueMutex);
         mQueue.splice(mQueue.end(), *items);
+        mQueueCondition.notify_all();
         return C2_OK;
     }
 
@@ -665,6 +666,7 @@
                         grallocHandle, GraphicBuffer::CLONE_HANDLE,
                         width, height, format, 1, usage, stride);
 
+                native_handle_delete(grallocHandle);
                 std::shared_ptr<C2GraphicBlock> dstBlock;
                 C2BlockPool::local_id_t poolId = mIntf->getPoolId();
                 std::shared_ptr<C2BlockPool> pool;
@@ -683,6 +685,7 @@
                         grallocHandle, GraphicBuffer::CLONE_HANDLE,
                         width, height, format, 1, usage, stride);
 
+                native_handle_delete(grallocHandle);
                 Rect sourceCrop(0, 0, width, height);
 
                 renderengine::DisplaySettings clientCompositionDisplay;
@@ -707,10 +710,6 @@
                 layerSettings.source.buffer.fence = Fence::NO_FENCE;
                 layerSettings.source.buffer.textureName = textureName;
                 layerSettings.source.buffer.usePremultipliedAlpha = false;
-                layerSettings.source.buffer.isY410BT2020 =
-                    (layerSettings.sourceDataspace == ui::Dataspace::BT2020_ITU_PQ ||
-                     layerSettings.sourceDataspace == ui::Dataspace::BT2020_ITU_HLG) &&
-                    format == HAL_PIXEL_FORMAT_RGBA_1010102;
                 layerSettings.source.buffer.maxMasteringLuminance =
                     (hdrStaticInfo && *hdrStaticInfo &&
                      hdrStaticInfo->mastering.maxLuminance > 0 &&
@@ -807,8 +806,6 @@
     // affectedParams
     {
         C2StreamHdrStaticInfo::output::PARAM_TYPE,
-        C2StreamHdr10PlusInfo::output::PARAM_TYPE,  // will be deprecated
-        C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
         C2StreamColorAspectsInfo::output::PARAM_TYPE,
     },
 };
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hal/services/Android.bp
similarity index 96%
rename from media/codec2/hidl/services/Android.bp
rename to media/codec2/hal/services/Android.bp
index b36e80a..524519c 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hal/services/Android.bp
@@ -85,6 +85,9 @@
         arm64: {
             src: "seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy",
         },
+        riscv64: {
+            src: "seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy",
+        },
         x86: {
             src: "seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy",
         },
@@ -96,3 +99,4 @@
     // This may be removed.
     required: ["crash_dump.policy"],
 }
+
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hal/services/android.hardware.media.c2@1.2-default-service.rc
similarity index 100%
rename from media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
rename to media/codec2/hal/services/android.hardware.media.c2@1.2-default-service.rc
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_0_default.xml b/media/codec2/hal/services/manifest_media_c2_V1_0_default.xml
similarity index 100%
rename from media/codec2/hidl/services/manifest_media_c2_V1_0_default.xml
rename to media/codec2/hal/services/manifest_media_c2_V1_0_default.xml
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_1_default.xml b/media/codec2/hal/services/manifest_media_c2_V1_1_default.xml
similarity index 100%
rename from media/codec2/hidl/services/manifest_media_c2_V1_1_default.xml
rename to media/codec2/hal/services/manifest_media_c2_V1_1_default.xml
diff --git a/media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml b/media/codec2/hal/services/manifest_media_c2_V1_2_default.xml
similarity index 100%
rename from media/codec2/hidl/services/manifest_media_c2_V1_2_default.xml
rename to media/codec2/hal/services/manifest_media_c2_V1_2_default.xml
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-arm64.policy
diff --git a/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy
new file mode 100644
index 0000000..27f0b95
--- /dev/null
+++ b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-riscv64.policy
@@ -0,0 +1,75 @@
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+futex: 1
+# ioctl calls are filtered via the selinux policy.
+ioctl: 1
+sched_yield: 1
+close: 1
+dup: 1
+ppoll: 1
+mprotect: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
+mmap: arg2 in ~PROT_EXEC || arg2 in ~PROT_WRITE
+getuid: 1
+getrlimit: 1
+fstat: 1
+newfstatat: 1
+fstatfs: 1
+memfd_create: 1
+ftruncate: 1
+
+mremap: arg3 == 3 || arg3 == MREMAP_MAYMOVE
+munmap: 1
+prctl: 1
+writev: 1
+sigaltstack: 1
+clone: 1
+exit: 1
+lseek: 1
+rt_sigprocmask: 1
+openat: 1
+write: 1
+nanosleep: 1
+setpriority: 1
+set_tid_address: 1
+getdents64: 1
+readlinkat: 1
+read: 1
+pread64: 1
+gettimeofday: 1
+faccessat: 1
+exit_group: 1
+restart_syscall: 1
+rt_sigreturn: 1
+getrandom: 1
+madvise: 1
+
+# crash dump policy additions
+clock_gettime: 1
+getpid: 1
+gettid: 1
+pipe2: 1
+recvmsg: 1
+process_vm_readv: 1
+tgkill: 1
+rt_sigaction: 1
+rt_tgsigqueueinfo: 1
+#mprotect: arg2 in 0x1|0x2
+munmap: 1
+#mmap: arg2 in 0x1|0x2
+geteuid: 1
+getgid: 1
+getegid: 1
+getgroups: 1
+
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86.policy
diff --git a/media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy b/media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
similarity index 100%
rename from media/codec2/hidl/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
rename to media/codec2/hal/services/seccomp_policy/android.hardware.media.c2@1.2-default-x86_64.policy
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hal/services/vendor.cpp
similarity index 100%
rename from media/codec2/hidl/services/vendor.cpp
rename to media/codec2/hal/services/vendor.cpp
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 5a652a3..e4daf5c 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -44,7 +44,7 @@
     ],
 
     static_libs: [
-        "SurfaceFlingerProperties",
+        "libSurfaceFlingerProperties",
     ],
 
     shared_libs: [
@@ -71,10 +71,11 @@
         "libstagefright_codecbase",
         "libstagefright_foundation",
         "libstagefright_omx",
-	"libstagefright_surface_utils",
+        "libstagefright_surface_utils",
         "libstagefright_xmlparser",
         "libui",
         "libutils",
+        "server_configurable_flags",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ed7d69c..92cfe31 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -230,6 +230,12 @@
             err = OK;
             break;
         }
+        case OMX_IndexParamConsumerUsageBits64: {
+            OMX_U64 *usage = (OMX_U64 *)params;
+            *usage = mUsage;
+            err = OK;
+            break;
+        }
         case OMX_IndexParamPortDefinition: {
             if (size < sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
                 return BAD_VALUE;
@@ -293,6 +299,13 @@
             }
             mUsage = *((OMX_U32 *)params);
             return OK;
+
+        case OMX_IndexParamConsumerUsageBits64:
+            if (size != sizeof(OMX_U64)) {
+                return BAD_VALUE;
+            }
+            mUsage = *((OMX_U64 *)params);
+            return OK;
     }
     return ERROR_UNSUPPORTED;
 }
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 296d7ed..5e53acc 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -206,12 +206,19 @@
         mNode = new C2OMXNode(comp);
         mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(mNode);
         mNode->setFrameSize(mWidth, mHeight);
-
         // Usage is queried during configure(), so setting it beforehand.
-        OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
-        (void)mNode->setParameter(
-                (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
-                &usage, sizeof(usage));
+        // 64 bit set parameter is existing only in C2OMXNode.
+        OMX_U64 usage64 = mConfig.mUsage;
+        status_t res = mNode->setParameter(
+                (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits64,
+                &usage64, sizeof(usage64));
+
+        if (res != OK) {
+            OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
+            (void)mNode->setParameter(
+                    (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+                    &usage, sizeof(usage));
+        }
 
         return GetStatus(mSource->configure(
                 mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace)));
@@ -877,6 +884,16 @@
                     if (msg->findInt32(KEY_PUSH_BLANK_BUFFERS_ON_STOP, &pushBlankBuffersOnStop)) {
                         config->mPushBlankBuffersOnStop = pushBlankBuffersOnStop == 1;
                     }
+                    // secure compoment or protected content default with
+                    // "push-blank-buffers-on-shutdown" flag
+                    if (!config->mPushBlankBuffersOnStop) {
+                        int32_t usageProtected;
+                        if (comp->getName().find(".secure") != std::string::npos) {
+                            config->mPushBlankBuffersOnStop = true;
+                        } else if (msg->findInt32("protected", &usageProtected) && usageProtected) {
+                            config->mPushBlankBuffersOnStop = true;
+                        }
+                    }
                 }
             }
             setSurface(surface);
@@ -1014,7 +1031,7 @@
             C2StoreFlexiblePixelFormatDescriptorsInfo *pixelFormatInfo = nullptr;
             int vendorSdkVersion = base::GetIntProperty(
                     "ro.vendor.build.version.sdk", android_get_device_api_level());
-            if (vendorSdkVersion >= __ANDROID_API_S__ && mClient->query(
+            if (mClient->query(
                         {},
                         {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
                         C2_MAY_BLOCK,
@@ -1075,8 +1092,7 @@
             } else {
                 if ((config->mDomain & Config::IS_ENCODER) || !surface) {
                     if (vendorSdkVersion < __ANDROID_API_S__ &&
-                            (format == COLOR_FormatYUV420Flexible ||
-                             format == COLOR_FormatYUV420Planar ||
+                            (format == COLOR_FormatYUV420Planar ||
                              format == COLOR_FormatYUV420PackedPlanar ||
                              format == COLOR_FormatYUV420SemiPlanar ||
                              format == COLOR_FormatYUV420PackedSemiPlanar)) {
@@ -1812,6 +1828,7 @@
         mCallback->onError(err2, ACTION_CODE_FATAL);
         return;
     }
+
     err2 = mChannel->start(inputFormat, outputFormat, buffersBoundToCodec);
     if (err2 != OK) {
         mCallback->onError(err2, ACTION_CODE_FATAL);
@@ -1869,18 +1886,14 @@
         }
         state->set(STOPPING);
     }
-    {
-        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-        const std::unique_ptr<Config> &config = *configLocked;
-        if (config->mPushBlankBuffersOnStop) {
-            mChannel->pushBlankBufferToOutputSurface();
-        }
-    }
     mChannel->reset();
-    (new AMessage(kWhatStop, this))->post();
+    bool pushBlankBuffer = mConfig.lock().get()->mPushBlankBuffersOnStop;
+    sp<AMessage> stopMessage(new AMessage(kWhatStop, this));
+    stopMessage->setInt32("pushBlankBuffer", pushBlankBuffer);
+    stopMessage->post();
 }
 
-void CCodec::stop() {
+void CCodec::stop(bool pushBlankBuffer) {
     std::shared_ptr<Codec2Client::Component> comp;
     {
         Mutexed<State>::Locked state(mState);
@@ -1899,6 +1912,7 @@
         comp = state->comp;
     }
     status_t err = comp->stop();
+    mChannel->stopUseOutputSurface(pushBlankBuffer);
     if (err != C2_OK) {
         // TODO: convert err into status_t
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -1963,21 +1977,16 @@
             config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
-    {
-        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-        const std::unique_ptr<Config> &config = *configLocked;
-        if (config->mPushBlankBuffersOnStop) {
-            mChannel->pushBlankBufferToOutputSurface();
-        }
-    }
 
     mChannel->reset();
+    bool pushBlankBuffer = mConfig.lock().get()->mPushBlankBuffersOnStop;
     // thiz holds strong ref to this while the thread is running.
     sp<CCodec> thiz(this);
-    std::thread([thiz, sendCallback] { thiz->release(sendCallback); }).detach();
+    std::thread([thiz, sendCallback, pushBlankBuffer]
+                { thiz->release(sendCallback, pushBlankBuffer); }).detach();
 }
 
-void CCodec::release(bool sendCallback) {
+void CCodec::release(bool sendCallback, bool pushBlankBuffer) {
     std::shared_ptr<Codec2Client::Component> comp;
     {
         Mutexed<State>::Locked state(mState);
@@ -1992,6 +2001,7 @@
         comp = state->comp;
     }
     comp->release();
+    mChannel->stopUseOutputSurface(pushBlankBuffer);
 
     {
         Mutexed<State>::Locked state(mState);
@@ -2005,6 +2015,7 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
+    bool pushBlankBuffer = false;
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
@@ -2030,8 +2041,9 @@
                 return err;
             }
         }
+        pushBlankBuffer = config->mPushBlankBuffersOnStop;
     }
-    return mChannel->setSurface(surface);
+    return mChannel->setSurface(surface, pushBlankBuffer);
 }
 
 void CCodec::signalFlush() {
@@ -2120,6 +2132,25 @@
         RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
     }
 
+    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+    if (err != OK) {
+        if (err == NO_MEMORY) {
+            // NO_MEMORY happens here when all the buffers are still
+            // with the codec. That is not an error as it is momentarily
+            // and the buffers are send to the client as soon as the codec
+            // releases them
+            ALOGI("Resuming with all input buffers still with codec");
+        } else {
+            ALOGE("Resume request for Input Buffers failed");
+            mCallback->onError(err, ACTION_CODE_FATAL);
+            return;
+        }
+    }
+
+    // channel start should be called after prepareInitialBuffers
+    // Calling before can cause a failure during prepare when
+    // buffers are sent to the client before preparation from onWorkDone
     (void)mChannel->start(nullptr, nullptr, [&]{
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
@@ -2137,13 +2168,6 @@
         state->set(RUNNING);
     }
 
-    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
-    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
-    if (err != OK) {
-        ALOGE("Resume request for Input Buffers failed");
-        mCallback->onError(err, ACTION_CODE_FATAL);
-        return;
-    }
     mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
 }
 
@@ -2329,7 +2353,11 @@
         case kWhatStop: {
             // C2Component::stop() should return within 500ms.
             setDeadline(now, 1500ms, "stop");
-            stop();
+            int32_t pushBlankBuffer;
+            if (!msg->findInt32("pushBlankBuffer", &pushBlankBuffer)) {
+                pushBlankBuffer = 0;
+            }
+            stop(static_cast<bool>(pushBlankBuffer));
             break;
         }
         case kWhatFlush: {
@@ -2552,53 +2580,6 @@
             pendingDeadline = true;
         }
     }
-    bool tunneled = false;
-    bool isMediaTypeKnown = false;
-    {
-        static const std::set<std::string> kKnownMediaTypes{
-            MIMETYPE_VIDEO_VP8,
-            MIMETYPE_VIDEO_VP9,
-            MIMETYPE_VIDEO_AV1,
-            MIMETYPE_VIDEO_AVC,
-            MIMETYPE_VIDEO_HEVC,
-            MIMETYPE_VIDEO_MPEG4,
-            MIMETYPE_VIDEO_H263,
-            MIMETYPE_VIDEO_MPEG2,
-            MIMETYPE_VIDEO_RAW,
-            MIMETYPE_VIDEO_DOLBY_VISION,
-
-            MIMETYPE_AUDIO_AMR_NB,
-            MIMETYPE_AUDIO_AMR_WB,
-            MIMETYPE_AUDIO_MPEG,
-            MIMETYPE_AUDIO_AAC,
-            MIMETYPE_AUDIO_QCELP,
-            MIMETYPE_AUDIO_VORBIS,
-            MIMETYPE_AUDIO_OPUS,
-            MIMETYPE_AUDIO_G711_ALAW,
-            MIMETYPE_AUDIO_G711_MLAW,
-            MIMETYPE_AUDIO_RAW,
-            MIMETYPE_AUDIO_FLAC,
-            MIMETYPE_AUDIO_MSGSM,
-            MIMETYPE_AUDIO_AC3,
-            MIMETYPE_AUDIO_EAC3,
-
-            MIMETYPE_IMAGE_ANDROID_HEIC,
-        };
-        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
-        const std::unique_ptr<Config> &config = *configLocked;
-        tunneled = config->mTunneled;
-        isMediaTypeKnown = (kKnownMediaTypes.count(config->mCodingMediaType) != 0);
-    }
-    if (!tunneled && isMediaTypeKnown && name.empty()) {
-        constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
-        std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
-        if (elapsed >= kWorkDurationThreshold) {
-            name = "queue";
-        }
-        if (elapsed > 0s) {
-            pendingDeadline = true;
-        }
-    }
     if (name.empty()) {
         // We're not stuck.
         if (pendingDeadline) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 5ecb130..d72d228 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -34,6 +34,7 @@
 
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/drm/1.0/types.h>
+#include <android-base/parseint.h>
 #include <android-base/properties.h>
 #include <android-base/stringprintf.h>
 #include <binder/MemoryBase.h>
@@ -52,6 +53,7 @@
 #include <media/stagefright/SurfaceUtils.h>
 #include <media/MediaCodecBuffer.h>
 #include <mediadrm/ICrypto.h>
+#include <server_configurable_flags/get_flags.h>
 #include <system/window.h>
 
 #include "CCodecBufferChannel.h"
@@ -75,7 +77,6 @@
 namespace {
 
 constexpr size_t kSmoothnessFactor = 4;
-constexpr size_t kRenderingDepth = 3;
 
 // This is for keeping IGBP's buffer dropping logic in legacy mode other
 // than making it non-blocking. Do not change this value.
@@ -147,10 +148,12 @@
       mCCodecCallback(callback),
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
+      mIsSurfaceToDisplay(false),
+      mHasPresentFenceTimes(false),
+      mRenderingDepth(3u),
       mMetaMode(MODE_NONE),
       mInputMetEos(false),
       mSendEncryptedInfoBuffer(false) {
-    mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
     {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
@@ -165,11 +168,16 @@
         Mutexed<Output>::Locked output(mOutput);
         output->outputDelay = 0u;
         output->numSlots = kSmoothnessFactor;
+        output->bounded = false;
     }
     {
         Mutexed<BlockPools>::Locked pools(mBlockPools);
         pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
     }
+    std::string value = server_configurable_flags::GetServerConfigurableFlag(
+            "media_native", "ccodec_rendering_depth", "3");
+    android::base::ParseInt(value, &mRenderingDepth);
+    mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + mRenderingDepth;
 }
 
 CCodecBufferChannel::~CCodecBufferChannel() {
@@ -226,6 +234,9 @@
     if (buffer->meta()->findInt32("tunnel-first-frame", &tmp) && tmp) {
         tunnelFirstFrame = true;
     }
+    if (buffer->meta()->findInt32("decode-only", &tmp) && tmp) {
+        flags |= C2FrameData::FLAG_DROP_FRAME;
+    }
     ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
     std::list<std::unique_ptr<C2Work>> items;
     std::unique_ptr<C2Work> work(new C2Work);
@@ -423,7 +434,8 @@
         size_t offset,
         const CryptoPlugin::SubSample *subSamples,
         size_t numSubSamples,
-        const sp<MediaCodecBuffer> &buffer) {
+        const sp<MediaCodecBuffer> &buffer,
+        AString* errorDetailMsg) {
     static const C2MemoryUsage kSecureUsage{C2MemoryUsage::READ_PROTECTED, 0};
     static const C2MemoryUsage kDefaultReadWriteUsage{
         C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
@@ -453,7 +465,6 @@
     ssize_t result = -1;
     ssize_t codecDataOffset = 0;
     if (mCrypto) {
-        AString errorDetailMsg;
         int32_t heapSeqNum = getHeapSeqNum(memory);
         hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
         hardware::drm::V1_0::DestinationBuffer dst;
@@ -467,7 +478,7 @@
         }
         result = mCrypto->decrypt(
                 key, iv, mode, pattern, src, 0, subSamples, numSubSamples,
-                dst, &errorDetailMsg);
+                dst, errorDetailMsg);
         if (result < 0) {
             ALOGI("[%s] attachEncryptedBuffer: decrypt failed: result = %zd", mName, result);
             return result;
@@ -512,7 +523,9 @@
                     result = (ssize_t)_bytesWritten;
                     detailedError = _detailedError;
                 });
-
+        if (errorDetailMsg) {
+            errorDetailMsg->setTo(detailedError.c_str(), detailedError.size());
+        }
         if (!returnVoid.isOk() || status != CasStatus::OK || result < 0) {
             ALOGI("[%s] descramble failed, trans=%s, status=%d, result=%zd",
                     mName, returnVoid.description().c_str(), status, result);
@@ -720,7 +733,7 @@
         Mutexed<Output>::Locked output(mOutput);
         if (!output->buffers ||
                 output->buffers->hasPending() ||
-                output->buffers->numActiveSlots() >= output->numSlots) {
+                (!output->bounded && output->buffers->numActiveSlots() >= output->numSlots)) {
             return;
         }
     }
@@ -899,7 +912,7 @@
     }
 
     // TODO: revisit this after C2Fence implementation.
-    android::IGraphicBufferProducer::QueueBufferInput qbi(
+    IGraphicBufferProducer::QueueBufferInput qbi(
             timestampNs,
             false, // droppable
             dataSpace,
@@ -960,12 +973,12 @@
                     hdrDynamicInfo->m.data + hdrDynamicInfo->flexCount());
         }
         qbi.setHdrMetadata(hdr);
-
-        SetHdrMetadataToGralloc4Handle(hdrStaticInfo, hdrDynamicInfo, block.handle());
     }
-    // we don't have dirty regions
-    qbi.setSurfaceDamage(Region::INVALID_REGION);
-    android::IGraphicBufferProducer::QueueBufferOutput qbo;
+    SetMetadataToGralloc4Handle(dataSpace, hdrStaticInfo, hdrDynamicInfo, block.handle());
+
+    qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
+    qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
+    IGraphicBufferProducer::QueueBufferOutput qbo;
     status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
     if (result != OK) {
         ALOGI("[%s] queueBuffer failed: %d", mName, result);
@@ -983,11 +996,123 @@
 
     int64_t mediaTimeUs = 0;
     (void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-    mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
+    if (mIsSurfaceToDisplay) {
+        trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
+        processRenderedFrames(qbo.frameTimestamps);
+    } else {
+        // When the surface is an intermediate surface, onFrameRendered is triggered immediately
+        // when the frame is queued to the non-display surface
+        mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
+    }
 
     return OK;
 }
 
+void CCodecBufferChannel::initializeFrameTrackingFor(ANativeWindow * window) {
+    mTrackedFrames.clear();
+
+    int isSurfaceToDisplay = 0;
+    window->query(window, NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &isSurfaceToDisplay);
+    mIsSurfaceToDisplay = isSurfaceToDisplay == 1;
+    // No frame tracking is needed if we're not sending frames to the display
+    if (!mIsSurfaceToDisplay) {
+        // Return early so we don't call into SurfaceFlinger (requiring permissions)
+        return;
+    }
+
+    int hasPresentFenceTimes = 0;
+    window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
+    mHasPresentFenceTimes = hasPresentFenceTimes == 1;
+    if (!mHasPresentFenceTimes) {
+        ALOGI("Using latch times for frame rendered signals - present fences not supported");
+    }
+}
+
+void CCodecBufferChannel::trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
+                                             int64_t mediaTimeUs, int64_t desiredRenderTimeNs) {
+    // If the render time is earlier than now, then we're suggesting it should be rendered ASAP,
+    // so track the frame as if the desired render time is now.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    if (desiredRenderTimeNs < nowNs) {
+        desiredRenderTimeNs = nowNs;
+    }
+    // We've just queued a frame to the surface, so keep track of it and later check to see if it is
+    // actually rendered.
+    TrackedFrame frame;
+    frame.number = qbo.nextFrameNumber - 1;
+    frame.mediaTimeUs = mediaTimeUs;
+    frame.desiredRenderTimeNs = desiredRenderTimeNs;
+    frame.latchTime = -1;
+    frame.presentFence = nullptr;
+    mTrackedFrames.push_back(frame);
+}
+
+void CCodecBufferChannel::processRenderedFrames(const FrameEventHistoryDelta& deltas) {
+    // Grab the latch times and present fences from the frame event deltas
+    for (const auto& delta : deltas) {
+        for (auto& frame : mTrackedFrames) {
+            if (delta.getFrameNumber() == frame.number) {
+                delta.getLatchTime(&frame.latchTime);
+                delta.getDisplayPresentFence(&frame.presentFence);
+            }
+        }
+    }
+
+    // Scan all frames and check to see if the frames that SHOULD have been rendered by now, have,
+    // in fact, been rendered.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    while (!mTrackedFrames.empty()) {
+        TrackedFrame & frame = mTrackedFrames.front();
+        // Frames that should have been rendered at least 100ms in the past are checked
+        if (frame.desiredRenderTimeNs > nowNs - 100*1000*1000LL) {
+            break;
+        }
+
+        // If we don't have a render time by now, then consider the frame as dropped
+        int64_t renderTimeNs = getRenderTimeNs(frame);
+        if (renderTimeNs != -1) {
+            mCCodecCallback->onOutputFramesRendered(frame.mediaTimeUs, renderTimeNs);
+        }
+        mTrackedFrames.pop_front();
+    }
+}
+
+int64_t CCodecBufferChannel::getRenderTimeNs(const TrackedFrame& frame) {
+    // If the device doesn't have accurate present fence times, then use the latch time as a proxy
+    if (!mHasPresentFenceTimes) {
+        if (frame.latchTime == -1) {
+            ALOGD("no latch time for frame %d", (int) frame.number);
+            return -1;
+        }
+        return frame.latchTime;
+    }
+
+    if (frame.presentFence == nullptr) {
+        ALOGW("no present fence for frame %d", (int) frame.number);
+        return -1;
+    }
+
+    nsecs_t actualRenderTimeNs = frame.presentFence->getSignalTime();
+
+    if (actualRenderTimeNs == Fence::SIGNAL_TIME_INVALID) {
+        ALOGW("invalid signal time for frame %d", (int) frame.number);
+        return -1;
+    }
+
+    if (actualRenderTimeNs == Fence::SIGNAL_TIME_PENDING) {
+        ALOGD("present fence has not fired for frame %d", (int) frame.number);
+        return -1;
+    }
+
+    return actualRenderTimeNs;
+}
+
+void CCodecBufferChannel::pollForRenderedBuffers() {
+    FrameEventHistoryDelta delta;
+    mComponent->pollForRenderedFrames(&delta);
+    processRenderedFrames(delta);
+}
+
 status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
     ALOGV("[%s] discardBuffer: %p", mName, buffer.get());
     bool released = false;
@@ -1267,7 +1392,7 @@
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
             maxDequeueCount = output->maxDequeueBuffers = numOutputSlots +
-                    reorderDepth.value + kRenderingDepth;
+                    reorderDepth.value + mRenderingDepth;
             outputSurface = output->surface ?
                     output->surface->getIGraphicBufferProducer() : nullptr;
             if (outputSurface) {
@@ -1390,6 +1515,7 @@
         Mutexed<Output>::Locked output(mOutput);
         output->outputDelay = outputDelayValue;
         output->numSlots = numOutputSlots;
+        output->bounded = bool(outputSurface);
         if (graphic) {
             if (outputSurface || !buffersBoundToCodec) {
                 output->buffers.reset(new GraphicOutputBuffers(mName));
@@ -1578,6 +1704,25 @@
     mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
 }
 
+void CCodecBufferChannel::stopUseOutputSurface(bool pushBlankBuffer) {
+    sp<Surface> surface = mOutputSurface.lock()->surface;
+    if (surface) {
+        C2BlockPool::local_id_t outputPoolId;
+        {
+            Mutexed<BlockPools>::Locked pools(mBlockPools);
+            outputPoolId = pools->outputPoolId;
+        }
+        if (mComponent) mComponent->stopUsingOutputSurface(outputPoolId);
+
+        if (pushBlankBuffer) {
+            sp<ANativeWindow> anw = static_cast<ANativeWindow *>(surface.get());
+            if (anw) {
+                pushBlankBuffersToNativeWindow(anw.get());
+            }
+        }
+    }
+}
+
 void CCodecBufferChannel::reset() {
     stop();
     if (mInputSurface != nullptr) {
@@ -1593,14 +1738,8 @@
         Mutexed<Output>::Locked output(mOutput);
         output->buffers.reset();
     }
-    if (mOutputSurface.lock()->surface) {
-        C2BlockPool::local_id_t outputPoolId;
-        {
-            Mutexed<BlockPools>::Locked pools(mBlockPools);
-            outputPoolId = pools->outputPoolId;
-        }
-        mComponent->stopUsingOutputSurface(outputPoolId);
-    }
+    // reset the frames that are being tracked for onFrameRendered callbacks
+    mTrackedFrames.clear();
 }
 
 void CCodecBufferChannel::release() {
@@ -1921,7 +2060,7 @@
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
             maxDequeueCount = output->maxDequeueBuffers =
-                    numOutputSlots + reorderDepth + kRenderingDepth;
+                    numOutputSlots + reorderDepth + mRenderingDepth;
             if (output->surface) {
                 output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
             }
@@ -1962,7 +2101,10 @@
     // csd cannot be re-ordered and will always arrive first.
     if (initData != nullptr) {
         Mutexed<Output>::Locked output(mOutput);
-        if (output->buffers && outputFormat) {
+        if (!output->buffers) {
+            return false;
+        }
+        if (outputFormat) {
             output->buffers->updateSkipCutBuffer(outputFormat);
             output->buffers->setFormat(outputFormat);
         }
@@ -1971,12 +2113,15 @@
         }
         size_t index;
         sp<MediaCodecBuffer> outBuffer;
-        if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
+        if (output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
             outBuffer->meta()->setInt64("timeUs", timestamp.peek());
             outBuffer->meta()->setInt32("flags", BUFFER_FLAG_CODEC_CONFIG);
             ALOGV("[%s] onWorkDone: csd index = %zu [%p]", mName, index, outBuffer.get());
 
-            output.unlock();
+            // TRICKY: we want popped buffers reported in order, so sending
+            // the callback while holding the lock here. This assumes that
+            // onOutputBufferAvailable() does not block. onOutputBufferAvailable()
+            // callbacks are always sent with the Output lock held.
             mCallback->onOutputBufferAvailable(index, outBuffer);
         } else {
             ALOGD("[%s] onWorkDone: unable to register csd", mName);
@@ -1992,6 +2137,12 @@
         drop = true;
     }
 
+    // Workaround: if C2FrameData::FLAG_DROP_FRAME is not implemented in
+    // HAL, the flag is then removed in the corresponding output buffer.
+    if (work->input.flags & C2FrameData::FLAG_DROP_FRAME) {
+        flags |= BUFFER_FLAG_DECODE_ONLY;
+    }
+
     if (notifyClient && !buffer && !flags) {
         if (mTunneled && drop && outputFormat) {
             ALOGV("[%s] onWorkDone: Keep tunneled, drop frame with format change (%lld)",
@@ -2060,7 +2211,10 @@
         case OutputBuffers::DISCARD:
             break;
         case OutputBuffers::NOTIFY_CLIENT:
-            output.unlock();
+            // TRICKY: we want popped buffers reported in order, so sending
+            // the callback while holding the lock here. This assumes that
+            // onOutputBufferAvailable() does not block. onOutputBufferAvailable()
+            // callbacks are always sent with the Output lock held.
             mCallback->onOutputBufferAvailable(index, outBuffer);
             break;
         case OutputBuffers::REALLOCATE:
@@ -2094,14 +2248,20 @@
     }
 }
 
-status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface) {
+status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface, bool pushBlankBuffer) {
     static std::atomic_uint32_t surfaceGeneration{0};
     uint32_t generation = (getpid() << 10) |
             ((surfaceGeneration.fetch_add(1, std::memory_order_relaxed) + 1)
                 & ((1 << 10) - 1));
 
     sp<IGraphicBufferProducer> producer;
-    int maxDequeueCount = mOutputSurface.lock()->maxDequeueBuffers;
+    int maxDequeueCount;
+    sp<Surface> oldSurface;
+    {
+        Mutexed<OutputSurface>::Locked outputSurface(mOutputSurface);
+        maxDequeueCount = outputSurface->maxDequeueBuffers;
+        oldSurface = outputSurface->surface;
+    }
     if (newSurface) {
         newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
         newSurface->setDequeueTimeout(kDequeueTimeoutNs);
@@ -2136,6 +2296,16 @@
         Mutexed<OutputSurface>::Locked output(mOutputSurface);
         output->surface = newSurface;
         output->generation = generation;
+        initializeFrameTrackingFor(static_cast<ANativeWindow *>(newSurface.get()));
+    }
+
+    if (oldSurface && pushBlankBuffer) {
+        // When ReleaseSurface was set from MediaCodec,
+        // pushing a blank buffer at the end might be necessary.
+        sp<ANativeWindow> anw = static_cast<ANativeWindow *>(oldSurface.get());
+        if (anw) {
+            pushBlankBuffersToNativeWindow(anw.get());
+        }
     }
 
     return OK;
@@ -2227,13 +2397,4 @@
     }
 }
 
-status_t CCodecBufferChannel::pushBlankBufferToOutputSurface() {
-  Mutexed<OutputSurface>::Locked output(mOutputSurface);
-  sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(output->surface.get());
-  if (nativeWindow == nullptr) {
-      return INVALID_OPERATION;
-  }
-  return pushBlankBuffersToNativeWindow(nativeWindow.get());
-}
-
 }  // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index f29a225..2d87aa9 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -18,6 +18,7 @@
 
 #define CCODEC_BUFFER_CHANNEL_H_
 
+#include <deque>
 #include <map>
 #include <memory>
 #include <vector>
@@ -85,9 +86,11 @@
             size_t offset,
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
-            const sp<MediaCodecBuffer> &buffer) override;
+            const sp<MediaCodecBuffer> &buffer,
+            AString* errorDetailMsg) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+    virtual void pollForRenderedBuffers() override;
     virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
     virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
     virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
@@ -102,7 +105,7 @@
     /**
      * Set output graphic surface for rendering.
      */
-    status_t setSurface(const sp<Surface> &surface);
+    status_t setSurface(const sp<Surface> &surface, bool pushBlankBuffer);
 
     /**
      * Set GraphicBufferSource object from which the component extracts input
@@ -149,6 +152,14 @@
             std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers);
 
     /**
+     * Stop using buffers of the current output surface for other Codec
+     * instances to use the surface safely.
+     *
+     * \param pushBlankBuffer[in]       push a blank buffer at the end if true
+     */
+    void stopUseOutputSurface(bool pushBlankBuffer);
+
+    /**
      * Stop queueing buffers to the component. This object should never queue
      * buffers after this call, until start() is called.
      */
@@ -195,11 +206,6 @@
 
     void setMetaMode(MetaMode mode);
 
-    /**
-     * Push a blank buffer to the configured native output surface.
-     */
-    status_t pushBlankBufferToOutputSurface();
-
 private:
     class QueueGuard;
 
@@ -257,6 +263,14 @@
         bool mRunning;
     };
 
+    struct TrackedFrame {
+        uint64_t number;
+        int64_t mediaTimeUs;
+        int64_t desiredRenderTimeNs;
+        nsecs_t latchTime;
+        sp<Fence> presentFence;
+    };
+
     void feedInputBufferIfAvailable();
     void feedInputBufferIfAvailableInternal();
     status_t queueInputBufferInternal(sp<MediaCodecBuffer> buffer,
@@ -269,6 +283,12 @@
     void ensureDecryptDestination(size_t size);
     int32_t getHeapSeqNum(const sp<hardware::HidlMemory> &memory);
 
+    void initializeFrameTrackingFor(ANativeWindow * window);
+    void trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
+                            int64_t mediaTimeUs, int64_t desiredRenderTimeNs);
+    void processRenderedFrames(const FrameEventHistoryDelta& delta);
+    int64_t getRenderTimeNs(const TrackedFrame& frame);
+
     QueueSync mSync;
     sp<MemoryDealer> mDealer;
     sp<IMemory> mDecryptDestination;
@@ -301,6 +321,9 @@
         std::unique_ptr<OutputBuffers> buffers;
         size_t numSlots;
         uint32_t outputDelay;
+        // true iff the underlying block pool is bounded --- for example,
+        // a BufferQueue-based block pool would be bounded by the BufferQueue.
+        bool bounded;
     };
     Mutexed<Output> mOutput;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mFlushedConfigs;
@@ -310,6 +333,10 @@
 
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
+    std::deque<TrackedFrame> mTrackedFrames;
+    bool mIsSurfaceToDisplay;
+    bool mHasPresentFenceTimes;
+
     struct OutputSurface {
         sp<Surface> surface;
         uint32_t generation;
@@ -317,6 +344,7 @@
         std::map<uint64_t, int> rotation;
     };
     Mutexed<OutputSurface> mOutputSurface;
+    int mRenderingDepth;
 
     struct BlockPools {
         C2Allocator::id_t inputAllocatorId;
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index c8e9930..6335f13 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -72,7 +72,7 @@
     virtual void getArray(Vector<sp<MediaCodecBuffer>> *) const {}
 
     /**
-     * Return number of buffers the client owns.
+     * Return number of buffers owned by the client or the component.
      */
     virtual size_t numActiveSlots() const = 0;
 
@@ -595,8 +595,7 @@
     void flush();
 
     /**
-     * Return the number of buffers that are sent to the client but not released
-     * yet.
+     * Return the number of buffers that are sent to the client or the component.
      */
     size_t numActiveSlots() const;
 
@@ -716,8 +715,7 @@
     void grow(size_t newSize, std::function<sp<Codec2Buffer>()> alloc);
 
     /**
-     * Return the number of buffers that are sent to the client but not released
-     * yet.
+     * Return the number of buffers that are sent to the client or the component.
      */
     size_t numActiveSlots() const;
 
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 5208be6..6d49fa8 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -285,6 +285,12 @@
         }
     }
 
+    // Updates or adds a mapper for a "sdkkey"
+    void updateConfigMappersForKey(const SdkKey& key,
+            const std::vector<ConfigMapper>& vec_cm) {
+        mConfigMappers.insert_or_assign(key, vec_cm);
+    }
+
     /**
      * Returns all paths for a specific domain.
      *
@@ -389,7 +395,7 @@
             // read back always as int
             float value;
             if (v.get(&value)) {
-                return (int32_t)value;
+                return (int32_t) (value + 0.5);
             }
             return C2Value();
         }));
@@ -619,30 +625,30 @@
         .limitTo(D::OUTPUT & D::READ));
 
     deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
-        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT & (D::CONFIG | D::PARAM)));
 
     deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
-        .limitTo(D::VIDEO & D::OUTPUT));
+        .limitTo(D::VIDEO & D::OUTPUT & D::READ));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".type",
             C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "type")
-        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT & (D::CONFIG | D::PARAM)));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".data",
             C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "data")
-        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT & (D::CONFIG | D::PARAM)));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".type",
             C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "type")
-        .limitTo(D::VIDEO & D::OUTPUT));
+        .limitTo(D::VIDEO & D::OUTPUT & D::READ));
 
     add(ConfigMapper(
             std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".data",
             C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "data")
-        .limitTo(D::VIDEO & D::OUTPUT));
+        .limitTo(D::VIDEO & D::OUTPUT & D::READ));
 
     add(ConfigMapper(C2_PARAMKEY_TEMPORAL_LAYERING, C2_PARAMKEY_TEMPORAL_LAYERING, "")
         .limitTo(D::ENCODER & D::VIDEO & D::OUTPUT));
@@ -934,6 +940,9 @@
     add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
         .limitTo(D::AUDIO & D::DECODER & D::READ));
 
+    add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
+        .limitTo(D::AUDIO & D::ENCODER & D::CONFIG));
+
     add(ConfigMapper(KEY_AAC_SBR_MODE, C2_PARAMKEY_AAC_SBR_MODE, "value")
         .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM | D::READ))
         .withMapper([](C2Value v) -> C2Value {
@@ -955,7 +964,7 @@
         .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
     add(ConfigMapper(KEY_FLAC_COMPRESSION_LEVEL, C2_PARAMKEY_COMPLEXITY, "value")
         .limitTo(D::AUDIO & D::ENCODER));
-    add(ConfigMapper("complexity", C2_PARAMKEY_COMPLEXITY, "value")
+    add(ConfigMapper(KEY_COMPLEXITY, C2_PARAMKEY_COMPLEXITY, "value")
         .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
 
     add(ConfigMapper(KEY_GRID_COLUMNS, C2_PARAMKEY_TILE_LAYOUT, "columns")
@@ -1914,6 +1923,67 @@
         const sp<AMessage> &sdkParams, Domain configDomain,
         c2_blocking_t blocking,
         std::vector<std::unique_ptr<C2Param>> *configUpdate) const {
+    // update the mappers if we know something more of this format.
+    // AV1 10b or 8b encoding request.
+    AString mime;
+    int32_t requestedSdkProfile = -1;
+    if ((mDomain == (IS_VIDEO | IS_ENCODER)) &&
+            sdkParams->findString(KEY_MIME, &mime) &&
+            mime == MIMETYPE_VIDEO_AV1) {
+
+        sdkParams->findInt32(KEY_PROFILE, &requestedSdkProfile);
+        bool is10bAv1EncodeRequested = (requestedSdkProfile == AV1ProfileMain10);
+
+        int32_t bitDepth = (is10bAv1EncodeRequested) ? 10 : 8;
+        // we always initilze with an 8b mapper. Update this only if needed.
+        if (bitDepth != 8) {
+            std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+                C2Mapper::GetBitDepthProfileLevelMapper(mCodingMediaType, bitDepth);
+            mStandardParams->updateConfigMappersForKey(StandardParams::SdkKey(KEY_PROFILE),
+                {
+                    ConfigMapper(KEY_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+                    .limitTo(Domain::CODED)
+                    .withMappers([mapper](C2Value v) -> C2Value {
+                        C2Config::profile_t c2 = PROFILE_UNUSED;
+                        int32_t sdk;
+                        if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+                            return c2;
+                        }
+                        return PROFILE_UNUSED;
+                        }, [mapper](C2Value v) -> C2Value {
+                        C2Config::profile_t c2;
+                        int32_t sdk;
+                        using C2ValueType =
+                            typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+                        if (mapper && v.get((C2ValueType*)&c2) && mapper->mapProfile(c2, &sdk)) {
+                            return sdk;
+                        }
+                        return C2Value();
+                })});
+            mStandardParams->updateConfigMappersForKey(StandardParams::SdkKey(KEY_LEVEL),
+                {
+                    ConfigMapper(KEY_LEVEL, C2_PARAMKEY_PROFILE_LEVEL, "level")
+                    .limitTo(Domain::CODED)
+                    .withMappers([mapper](C2Value v) -> C2Value {
+                        C2Config::level_t c2 = LEVEL_UNUSED;
+                        int32_t sdk;
+                        if (mapper && v.get(&sdk) && mapper->mapLevel(sdk, &c2)) {
+                            return c2;
+                        }
+                        return LEVEL_UNUSED;
+                        }, [mapper](C2Value v) -> C2Value {
+                        C2Config::level_t c2;
+                        int32_t sdk;
+                        using C2ValueType =
+                            typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+                        if (mapper && v.get((C2ValueType*)&c2) && mapper->mapLevel(c2, &sdk)) {
+                            return sdk;
+                        }
+                        return C2Value();
+                })});
+        }
+    }
+
     ReflectedParamUpdater::Dict params = getReflectedFormat(sdkParams, configDomain);
 
     std::vector<C2Param::Index> indices;
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index cde4c72..82c31a8 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -22,6 +22,7 @@
 
 #include <aidl/android/hardware/graphics/common/Cta861_3.h>
 #include <aidl/android/hardware/graphics/common/Smpte2086.h>
+#include <android-base/no_destructor.h>
 #include <android-base/properties.h>
 #include <android/hardware/cas/native/1.0/types.h>
 #include <android/hardware/drm/1.0/types.h>
@@ -533,7 +534,7 @@
                         * align(mHeight, 64) / plane.rowSampling;
             }
 
-            if (minPtr == mView.data()[0] && (maxPtr - minPtr + 1) <= planeSize) {
+            if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
                 // FIXME: this is risky as reading/writing data out of bound results
                 //        in an undefined behavior, but gralloc does assume a
                 //        contiguous mapping
@@ -545,8 +546,7 @@
                     mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
                     mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
                 }
-                mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr),
-                                       maxPtr - minPtr + 1);
+                mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
                 ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
             }
         }
@@ -843,6 +843,10 @@
         }
     }
     sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * bpp / 8));
+    if (aBuffer == nullptr) {
+        ALOGD("%s: failed to allocate buffer", __func__);
+        return nullptr;
+    }
     return new ConstGraphicBlockBuffer(
             format,
             aBuffer,
@@ -998,9 +1002,11 @@
 }
 
 using ::aidl::android::hardware::graphics::common::Cta861_3;
+using ::aidl::android::hardware::graphics::common::Dataspace;
 using ::aidl::android::hardware::graphics::common::Smpte2086;
 
 using ::android::gralloc4::MetadataType_Cta861_3;
+using ::android::gralloc4::MetadataType_Dataspace;
 using ::android::gralloc4::MetadataType_Smpte2086;
 using ::android::gralloc4::MetadataType_Smpte2094_40;
 
@@ -1013,8 +1019,8 @@
 namespace {
 
 sp<IMapper4> GetMapper4() {
-    static sp<IMapper4> sMapper = IMapper4::getService();
-    return sMapper;
+    static ::android::base::NoDestructor<sp<IMapper4>> sMapper(IMapper4::getService());
+    return *sMapper;
 }
 
 class Gralloc4Buffer {
@@ -1132,6 +1138,11 @@
             err = C2_CORRUPTED;
         }
     }
+
+    if (err != C2_OK) {
+        staticInfo->reset();
+    }
+
     if (dynamicInfo) {
         ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
         dynamicInfo->reset();
@@ -1156,7 +1167,8 @@
     return err;
 }
 
-c2_status_t SetHdrMetadataToGralloc4Handle(
+c2_status_t SetMetadataToGralloc4Handle(
+        android_dataspace_t dataSpace,
         const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
         const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
         const C2Handle *const handle) {
@@ -1167,6 +1179,17 @@
         // Gralloc4 not supported; nothing to do
         return err;
     }
+    {
+        hidl_vec<uint8_t> metadata;
+        if (gralloc4::encodeDataspace(static_cast<Dataspace>(dataSpace), &metadata) == OK) {
+            Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Dataspace, metadata);
+            if (!ret.isOk()) {
+                err = C2_REFUSED;
+            } else if (ret != Error4::NONE) {
+                err = C2_CORRUPTED;
+            }
+        }
+    }
     if (staticInfo && *staticInfo) {
         ALOGV("Setting static HDR info as gralloc4 metadata");
         std::optional<Smpte2086> smpte2086 = Smpte2086{
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index b02b042..b73acab 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -410,14 +410,16 @@
         std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo);
 
 /**
- * Set HDR metadata to Gralloc4 handle.
+ * Set metadata to Gralloc4 handle.
  *
+ * \param[in]   dataSpace   Dataspace to set.
  * \param[in]   staticInfo  HDR static info to set. Ignored if null or invalid.
  * \param[in]   dynamicInfo HDR dynamic info to set. Ignored if null or invalid.
  * \param[out]  handle      handle of the allocation.
  * \return C2_OK if successful
  */
-c2_status_t SetHdrMetadataToGralloc4Handle(
+c2_status_t SetMetadataToGralloc4Handle(
+        const android_dataspace_t dataSpace,
         const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
         const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
         const C2Handle *const handle);
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 1c362ae..453a0d2 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -156,9 +156,10 @@
     // dynamic metadata as that needs to be frame accurate.)
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
 
-    // HDR support implies 10-bit support.
+    // HDR support implies 10-bit support. AV1 codecs are also required to
+    // support 10-bit per CDD.
     // TODO: directly check this from the component interface
-    supports10Bit = (supportsHdr || supportsHdr10Plus);
+    supports10Bit = (supportsHdr || supportsHdr10Plus) || (mediaType == MIMETYPE_VIDEO_AV1);
 
     // If the device doesn't support HDR display, then no codec on the device
     // can advertise support for HDR profiles.
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index ec18128..13713bc 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -109,9 +109,9 @@
     void allocate(const sp<MediaCodecInfo> &codecInfo);
     void configure(const sp<AMessage> &msg);
     void start();
-    void stop();
+    void stop(bool pushBlankBuffer);
     void flush();
-    void release(bool sendCallback);
+    void release(bool sendCallback, bool pushBlankBuffer);
 
     /**
      * Creates an input surface for the current device configuration compatible with CCodec.
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 807841e..9004bcf 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -313,6 +313,28 @@
             && layout.planes[layout.PLANE_V].rowSampling == 2);
 }
 
+bool IsYUV420_10bit(const C2GraphicView &view) {
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.numPlanes == 3
+            && layout.type == C2PlanarLayout::TYPE_YUV
+            && layout.planes[layout.PLANE_Y].channel == C2PlaneInfo::CHANNEL_Y
+            && layout.planes[layout.PLANE_Y].allocatedDepth == 16
+            && layout.planes[layout.PLANE_Y].bitDepth == 10
+            && layout.planes[layout.PLANE_Y].colSampling == 1
+            && layout.planes[layout.PLANE_Y].rowSampling == 1
+            && layout.planes[layout.PLANE_U].channel == C2PlaneInfo::CHANNEL_CB
+            && layout.planes[layout.PLANE_U].allocatedDepth == 16
+            && layout.planes[layout.PLANE_U].bitDepth == 10
+            && layout.planes[layout.PLANE_U].colSampling == 2
+            && layout.planes[layout.PLANE_U].rowSampling == 2
+            && layout.planes[layout.PLANE_V].channel == C2PlaneInfo::CHANNEL_CR
+            && layout.planes[layout.PLANE_V].allocatedDepth == 16
+            && layout.planes[layout.PLANE_V].bitDepth == 10
+            && layout.planes[layout.PLANE_V].colSampling == 2
+            && layout.planes[layout.PLANE_V].rowSampling == 2);
+}
+
+
 bool IsNV12(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
@@ -327,6 +349,24 @@
             && layout.planes[layout.PLANE_V].offset == 1);
 }
 
+bool IsP010(const C2GraphicView &view) {
+    if (!IsYUV420_10bit(view)) {
+        return false;
+    }
+    const C2PlanarLayout &layout = view.layout();
+    return (layout.rootPlanes == 2
+            && layout.planes[layout.PLANE_U].colInc == 4
+            && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_U
+            && layout.planes[layout.PLANE_U].offset == 0
+            && layout.planes[layout.PLANE_V].colInc == 4
+            && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_U
+            && layout.planes[layout.PLANE_V].offset == 2
+            && layout.planes[layout.PLANE_Y].rightShift == 6
+            && layout.planes[layout.PLANE_U].rightShift == 6
+            && layout.planes[layout.PLANE_V].rightShift == 6);
+}
+
+
 bool IsNV21(const C2GraphicView &view) {
     if (!IsYUV420(view)) {
         return false;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index 9fa642d..6b0ba7f 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -93,11 +93,21 @@
 bool IsYUV420(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a YUV 420 10-10-10 layout.
+ */
+bool IsYUV420_10bit(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a NV12 layout.
  */
 bool IsNV12(const C2GraphicView &view);
 
 /**
+ * Returns true iff a view has a P010 layout.
+ */
+bool IsP010(const C2GraphicView &view);
+
+/**
  * Returns true iff a view has a NV21 layout.
  */
 bool IsNV21(const C2GraphicView &view);
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index ef5800d..332d3ac 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -38,7 +38,7 @@
            !strcmp(deviceCodeName, "Tiramisu");
 }
 
-bool isVendorApiOrFirstApiAtLeastT() {
+static bool isP010Allowed() {
     // The first SDK the device shipped with.
     static const int32_t kProductFirstApiLevel =
         base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
@@ -47,6 +47,17 @@
     // to signal which VSR requirements they conform to even if the first device SDK was higher.
     static const int32_t kBoardFirstApiLevel =
         base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+
+    // Some devices that launched prior to Android S may not support P010 correctly, even
+    // though they may advertise it as supported.
+    if (kProductFirstApiLevel != 0 && kProductFirstApiLevel < __ANDROID_API_S__) {
+        return false;
+    }
+
+    if (kBoardFirstApiLevel != 0 && kBoardFirstApiLevel < __ANDROID_API_S__) {
+        return false;
+    }
+
     static const int32_t kBoardApiLevel =
         base::GetIntProperty<int32_t>("ro.board.api_level", 0);
 
@@ -67,7 +78,7 @@
     // API alone. For now limit P010 to devices that launched with Android T or known to conform
     // to Android T VSR (as opposed to simply limiting to a T vendor image).
     if (format == (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010 &&
-            !isVendorApiOrFirstApiAtLeastT()) {
+            !isP010Allowed()) {
         return false;
     }
 
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index c606d6f..9297520 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -119,8 +119,8 @@
     { C2Color::PRIMARIES_BT601_525,    ColorAspects::PrimariesBT601_6_525 },
     { C2Color::PRIMARIES_GENERIC_FILM, ColorAspects::PrimariesGenericFilm },
     { C2Color::PRIMARIES_BT2020,       ColorAspects::PrimariesBT2020 },
-//    { C2Color::PRIMARIES_RP431,        ColorAspects::Primaries... },
-//    { C2Color::PRIMARIES_EG432,        ColorAspects::Primaries... },
+    { C2Color::PRIMARIES_RP431,        ColorAspects::PrimariesRP431 },
+    { C2Color::PRIMARIES_EG432,        ColorAspects::PrimariesEG432 },
 //    { C2Color::PRIMARIES_EBU3213,      ColorAspects::Primaries... },
     { C2Color::PRIMARIES_OTHER,        ColorAspects::PrimariesOther },
 };
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index 9c3ba4d..2217235 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -36,6 +36,8 @@
 cc_test {
     name: "codec2_vndk_test",
     test_suites: ["device-tests"],
+    // This test doesn't seem to support isolated with current assumption
+    isolated: false,
 
     srcs: [
         "C2_test.cpp",
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 1d8aea3..ba25226 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -80,6 +80,7 @@
         "libbase",
         "libdmabufheap",
         "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.bufferpool2-V1-ndk",
     ],
 
     local_include_dirs: [
@@ -94,8 +95,12 @@
     shared_libs: [
         "android.hardware.graphics.bufferqueue@2.0",
         "android.hardware.graphics.common@1.2",
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
         "android.hardware.media.bufferpool@2.0",
+        "android.hardware.media.bufferpool2-V1-ndk",
         "libbase",
+        "libbinder_ndk",
         "libcutils",
         "libdl",
         "libdmabufheap",
@@ -107,6 +112,7 @@
         "libnativewindow",
         "libstagefright_foundation",
         "libstagefright_bufferpool@2.0.1",
+        "libstagefright_aidl_bufferpool2",
         "libui",
         "libutils",
     ],
@@ -135,6 +141,7 @@
         "libnativewindow",
         "libcodec2_soft_common",
         "libsfplugin_ccodec_utils",
+        "libstagefright_aidl_bufferpool2",
         "libstagefright_foundation",
         "libstagefright_bufferpool@2.0.1",
         "libgralloctypes",
@@ -144,9 +151,13 @@
         "android.hardware.graphics.allocator@2.0",
         "android.hardware.graphics.allocator@3.0",
         "android.hardware.graphics.bufferqueue@2.0",
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
+        "android.hardware.media.bufferpool2-V1-ndk",
     ],
 
     shared_libs: [
+        "libbinder_ndk",
         "libui",
         "libdl",
         "libvndksupport",
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index bc4053d..f272499 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -54,6 +54,10 @@
     static_assert((~C2MemoryUsage::PLATFORM_MASK & PASSTHROUGH_USAGE_MASK) == 0, "");
 } // unnamed
 
+static bool isAtLeastT() {
+    return android_get_device_api_level() >= __ANDROID_API_T__;
+}
+
 C2MemoryUsage C2AndroidMemoryUsage::FromGrallocUsage(uint64_t usage) {
     // gralloc does not support WRITE_PROTECTED
     return C2MemoryUsage(
@@ -702,6 +706,14 @@
         }
 
         case static_cast<uint32_t>(PixelFormat4::YCBCR_P010): {
+            // In Android T, P010 is relaxed to allow arbitrary stride for the Y and UV planes,
+            // try locking with the gralloc4 mapper first.
+            c2_status_t status = Gralloc4Mapper_lock(
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
+            if (status == C2_OK) {
+                break;
+            }
+
             void *pointer = nullptr;
             status_t err = GraphicBufferMapper::get().lock(
                     const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
@@ -760,10 +772,12 @@
         default: {
             // We don't know what it is, let's try to lock it with gralloc4
             android_ycbcr ycbcrLayout;
-            c2_status_t status = Gralloc4Mapper_lock(
-                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
-            if (status == C2_OK) {
-                break;
+            if (isAtLeastT()) {
+                c2_status_t status = Gralloc4Mapper_lock(
+                        const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
+                if (status == C2_OK) {
+                    break;
+                }
             }
 
             // fallback to lockYCbCr
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 143355f..018e269 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -29,19 +29,21 @@
 #include <C2BlockInternal.h>
 #include <C2PlatformSupport.h>
 #include <bufferpool/ClientManager.h>
+#include <bufferpool2/ClientManager.h>
 
 namespace {
 
 using android::C2AllocatorBlob;
 using android::C2AllocatorGralloc;
 using android::C2AllocatorIon;
-using android::hardware::media::bufferpool::BufferPoolData;
+
+namespace bufferpool = android::hardware::media::bufferpool;
+namespace bufferpool_impl = android::hardware::media::bufferpool::V2_0::implementation;
 using android::hardware::media::bufferpool::V2_0::ResultStatus;
-using android::hardware::media::bufferpool::V2_0::implementation::BufferPoolAllocation;
-using android::hardware::media::bufferpool::V2_0::implementation::BufferPoolAllocator;
-using android::hardware::media::bufferpool::V2_0::implementation::ClientManager;
-using android::hardware::media::bufferpool::V2_0::implementation::ConnectionId;
-using android::hardware::media::bufferpool::V2_0::implementation::INVALID_CONNECTIONID;
+
+namespace bufferpool2 = aidl::android::hardware::media::bufferpool2;
+namespace bufferpool2_impl = aidl::android::hardware::media::bufferpool2::implementation;
+using ResultStatus2 = aidl::android::hardware::media::bufferpool2::ResultStatus;
 
 // This anonymous namespace contains the helper classes that allow our implementation to create
 // block/buffer objects.
@@ -354,21 +356,21 @@
         return TYPE_BUFFERPOOL;
     }
 
-    void getBufferPoolData(std::shared_ptr<BufferPoolData> *data) const {
+    void getBufferPoolData(std::shared_ptr<bufferpool::BufferPoolData> *data) const {
         *data = mData;
     }
 
-    C2PooledBlockPoolData(const std::shared_ptr<BufferPoolData> &data) : mData(data) {}
+    C2PooledBlockPoolData(const std::shared_ptr<bufferpool::BufferPoolData> &data) : mData(data) {}
 
     virtual ~C2PooledBlockPoolData() override {}
 
 private:
-    std::shared_ptr<BufferPoolData> mData;
+    std::shared_ptr<bufferpool::BufferPoolData> mData;
 };
 
 bool _C2BlockFactory::GetBufferPoolData(
         const std::shared_ptr<const _C2BlockPoolData> &data,
-        std::shared_ptr<BufferPoolData> *bufferPoolData) {
+        std::shared_ptr<bufferpool::BufferPoolData> *bufferPoolData) {
     if (data && data->getType() == _C2BlockPoolData::TYPE_BUFFERPOOL) {
         const std::shared_ptr<const C2PooledBlockPoolData> poolData =
                 std::static_pointer_cast<const C2PooledBlockPoolData>(data);
@@ -378,6 +380,37 @@
     return false;
 }
 
+struct C2_HIDE C2PooledBlockPoolData2 : _C2BlockPoolData { // AIDL BufferPool(bufferpool2)
+
+    type_t getType() const override {
+        return TYPE_BUFFERPOOL2;
+    }
+
+    void getBufferPoolData(std::shared_ptr<bufferpool2::BufferPoolData> *data) const {
+        *data = mData;
+    }
+
+    C2PooledBlockPoolData2(const std::shared_ptr<bufferpool2::BufferPoolData> &data)
+            : mData(data) {}
+
+    virtual ~C2PooledBlockPoolData2() override {}
+
+private:
+    std::shared_ptr<bufferpool2::BufferPoolData> mData;
+};
+
+bool _C2BlockFactory::GetBufferPoolData(
+        const std::shared_ptr<const _C2BlockPoolData> &data,
+        std::shared_ptr<bufferpool2::BufferPoolData> *bufferPoolData) {
+    if (data && data->getType() == _C2BlockPoolData::TYPE_BUFFERPOOL2) {
+        const std::shared_ptr<const C2PooledBlockPoolData2> poolData =
+                std::static_pointer_cast<const C2PooledBlockPoolData2>(data);
+        poolData->getBufferPoolData(bufferPoolData);
+        return true;
+    }
+    return false;
+}
+
 std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
         const std::shared_ptr<C2LinearAllocation> &alloc,
         const std::shared_ptr<_C2BlockPoolData> &data, size_t offset, size_t size) {
@@ -422,7 +455,7 @@
 }
 
 std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
-        const C2Handle *cHandle, const std::shared_ptr<BufferPoolData> &data) {
+        const C2Handle *cHandle, const std::shared_ptr<bufferpool::BufferPoolData> &data) {
     // TODO: get proper allocator? and mutex?
     static std::shared_ptr<C2Allocator> sAllocator = []{
         std::shared_ptr<C2Allocator> allocator;
@@ -452,11 +485,43 @@
     return nullptr;
 };
 
+std::shared_ptr<C2LinearBlock> _C2BlockFactory::CreateLinearBlock(
+        const C2Handle *cHandle, const std::shared_ptr<bufferpool2::BufferPoolData> &data) {
+    // TODO: get proper allocator? and mutex?
+    static std::shared_ptr<C2Allocator> sAllocator = []{
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2AllocatorStore> allocatorStore =
+                android::GetCodec2PlatformAllocatorStore();
+        allocatorStore->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator);
+
+        return allocator;
+    }();
+
+    if (sAllocator == nullptr)
+        return nullptr;
+
+    bool isValidHandle = sAllocator->checkHandle(cHandle);
+
+    std::shared_ptr<C2LinearAllocation> alloc;
+    if (isValidHandle) {
+        c2_status_t err = sAllocator->priorLinearAllocation(cHandle, &alloc);
+        const std::shared_ptr<C2PooledBlockPoolData2> poolData =
+                std::make_shared<C2PooledBlockPoolData2>(data);
+        if (err == C2_OK && poolData) {
+            // TODO: config params?
+            std::shared_ptr<C2LinearBlock> block =
+                    _C2BlockFactory::CreateLinearBlock(alloc, poolData);
+            return block;
+        }
+    }
+    return nullptr;
+};
+
 /**
  * Wrapped C2Allocator which is injected to buffer pool on behalf of
  * C2BlockPool.
  */
-class _C2BufferPoolAllocator : public BufferPoolAllocator {
+class _C2BufferPoolAllocator : public bufferpool_impl::BufferPoolAllocator {
 public:
     _C2BufferPoolAllocator(const std::shared_ptr<C2Allocator> &allocator)
         : mAllocator(allocator) {}
@@ -464,7 +529,7 @@
     ~_C2BufferPoolAllocator() override {}
 
     ResultStatus allocate(const std::vector<uint8_t> &params,
-                          std::shared_ptr<BufferPoolAllocation> *alloc,
+                          std::shared_ptr<bufferpool_impl::BufferPoolAllocation> *alloc,
                           size_t *allocSize) override;
 
     bool compatible(const std::vector<uint8_t> &newParams,
@@ -496,7 +561,7 @@
                           std::vector<uint8_t> *params);
 
     /**
-     * Transforms an existing native handle to an C2LinearAllcation.
+     * Transforms an existing native handle to a C2LinearAllocation.
      * Wrapper to C2Allocator#priorLinearAllocation
      */
     c2_status_t priorLinearAllocation(
@@ -504,7 +569,7 @@
             std::shared_ptr<C2LinearAllocation> *c2Allocation);
 
     /**
-     * Transforms an existing native handle to an C2GraphicAllcation.
+     * Transforms an existing native handle to a C2GraphicAllocation.
      * Wrapper to C2Allocator#priorGraphicAllocation
      */
     c2_status_t priorGraphicAllocation(
@@ -545,7 +610,7 @@
     LinearAllocationDtor(const std::shared_ptr<C2LinearAllocation> &alloc)
         : mAllocation(alloc) {}
 
-    void operator()(BufferPoolAllocation *poolAlloc) { delete poolAlloc; }
+    void operator()(bufferpool_impl::BufferPoolAllocation *poolAlloc) { delete poolAlloc; }
 
     const std::shared_ptr<C2LinearAllocation> mAllocation;
 };
@@ -554,14 +619,14 @@
     GraphicAllocationDtor(const std::shared_ptr<C2GraphicAllocation> &alloc)
         : mAllocation(alloc) {}
 
-    void operator()(BufferPoolAllocation *poolAlloc) { delete poolAlloc; }
+    void operator()(bufferpool_impl::BufferPoolAllocation *poolAlloc) { delete poolAlloc; }
 
     const std::shared_ptr<C2GraphicAllocation> mAllocation;
 };
 
 ResultStatus _C2BufferPoolAllocator::allocate(
         const std::vector<uint8_t>  &params,
-        std::shared_ptr<BufferPoolAllocation> *alloc,
+        std::shared_ptr<bufferpool_impl::BufferPoolAllocation> *alloc,
         size_t *allocSize) {
     AllocParams c2Params;
     memcpy(&c2Params, params.data(), std::min(sizeof(AllocParams), params.size()));
@@ -574,9 +639,10 @@
             status = mAllocator->newLinearAllocation(
                     c2Params.data.params[0], c2Params.data.usage, &c2Linear);
             if (status == C2_OK && c2Linear) {
-                BufferPoolAllocation *ptr = new BufferPoolAllocation(c2Linear->handle());
+                bufferpool_impl::BufferPoolAllocation *ptr =
+                        new bufferpool_impl::BufferPoolAllocation(c2Linear->handle());
                 if (ptr) {
-                    *alloc = std::shared_ptr<BufferPoolAllocation>(
+                    *alloc = std::shared_ptr<bufferpool_impl::BufferPoolAllocation>(
                             ptr, LinearAllocationDtor(c2Linear));
                     if (*alloc) {
                         *allocSize = (size_t)c2Params.data.params[0];
@@ -596,9 +662,10 @@
                     c2Params.data.params[2],
                     c2Params.data.usage, &c2Graphic);
             if (status == C2_OK && c2Graphic) {
-                BufferPoolAllocation *ptr = new BufferPoolAllocation(c2Graphic->handle());
+                bufferpool_impl::BufferPoolAllocation *ptr =
+                        new bufferpool_impl::BufferPoolAllocation(c2Graphic->handle());
                 if (ptr) {
-                    *alloc = std::shared_ptr<BufferPoolAllocation>(
+                    *alloc = std::shared_ptr<bufferpool_impl::BufferPoolAllocation>(
                             ptr, GraphicAllocationDtor(c2Graphic));
                     if (*alloc) {
                         *allocSize = c2Params.data.params[0] * c2Params.data.params[1];
@@ -624,7 +691,7 @@
     memcpy(&newAlloc, newParams.data(), std::min(sizeof(AllocParams), newParams.size()));
     memcpy(&oldAlloc, oldParams.data(), std::min(sizeof(AllocParams), oldParams.size()));
 
-    // TODO: support not exact matching. e.g) newCapacity < oldCapacity
+    // TODO: support not exact matching. e.g.) newCapacity < oldCapacity
     if (newAlloc.data.allocType == oldAlloc.data.allocType &&
             newAlloc.data.usage.expected == oldAlloc.data.usage.expected) {
         for (int i = 0; i < kMaxIntParams; ++i) {
@@ -666,7 +733,7 @@
 public:
     Impl(const std::shared_ptr<C2Allocator> &allocator)
             : mInit(C2_OK),
-              mBufferPoolManager(ClientManager::getInstance()),
+              mBufferPoolManager(bufferpool_impl::ClientManager::getInstance()),
               mAllocator(std::make_shared<_C2BufferPoolAllocator>(allocator)) {
         if (mAllocator && mBufferPoolManager) {
             if (mBufferPoolManager->create(
@@ -692,7 +759,7 @@
         }
         std::vector<uint8_t> params;
         mAllocator->getLinearParams(capacity, usage, &params);
-        std::shared_ptr<BufferPoolData> bufferPoolData;
+        std::shared_ptr<bufferpool::BufferPoolData> bufferPoolData;
         native_handle_t *cHandle = nullptr;
         ResultStatus status = mBufferPoolManager->allocate(
                 mConnectionId, params, &cHandle, &bufferPoolData);
@@ -725,7 +792,7 @@
         }
         std::vector<uint8_t> params;
         mAllocator->getGraphicParams(width, height, format, usage, &params);
-        std::shared_ptr<BufferPoolData> bufferPoolData;
+        std::shared_ptr<bufferpool::BufferPoolData> bufferPoolData;
         native_handle_t *cHandle = nullptr;
         ResultStatus status = mBufferPoolManager->allocate(
                 mConnectionId, params, &cHandle, &bufferPoolData);
@@ -750,20 +817,340 @@
         return C2_CORRUPTED;
     }
 
-    ConnectionId getConnectionId() {
-        return mInit != C2_OK ? INVALID_CONNECTIONID : mConnectionId;
+    bufferpool_impl::ConnectionId getConnectionId() {
+        return mInit != C2_OK ? bufferpool_impl::INVALID_CONNECTIONID : mConnectionId;
     }
 
 private:
     c2_status_t mInit;
-    const android::sp<ClientManager> mBufferPoolManager;
-    ConnectionId mConnectionId; // locally
+    const android::sp<bufferpool_impl::ClientManager> mBufferPoolManager;
+    bufferpool_impl::ConnectionId mConnectionId; // locally
     const std::shared_ptr<_C2BufferPoolAllocator> mAllocator;
 };
 
+/**
+ * Wrapped C2Allocator which is injected to AIDL buffer pool on behalf of
+ * C2BlockPool.
+ */
+class _C2BufferPoolAllocator2 : public bufferpool2_impl::BufferPoolAllocator {
+public:
+    _C2BufferPoolAllocator2(const std::shared_ptr<C2Allocator> &allocator)
+        : mAllocator(allocator) {}
+
+    ~_C2BufferPoolAllocator2() override {}
+
+    bufferpool2_impl::BufferPoolStatus allocate(const std::vector<uint8_t> &params,
+                          std::shared_ptr<bufferpool2_impl::BufferPoolAllocation> *alloc,
+                          size_t *allocSize) override;
+
+    bool compatible(const std::vector<uint8_t> &newParams,
+                    const std::vector<uint8_t> &oldParams) override;
+
+    // Methods for codec2 component (C2BlockPool).
+    /**
+     * Transforms linear allocation parameters for C2Allocator to parameters
+     * for buffer pool.
+     *
+     * @param capacity      size of linear allocation
+     * @param usage         memory usage pattern for linear allocation
+     * @param params        allocation parameters for buffer pool
+     */
+    void getLinearParams(uint32_t capacity, C2MemoryUsage usage,
+                         std::vector<uint8_t> *params);
+
+    /**
+     * Transforms graphic allocation parameters for C2Allocator to parameters
+     * for buffer pool.
+     *
+     * @param width         width of graphic allocation
+     * @param height        height of graphic allocation
+     * @param format        color format of graphic allocation
+     * @param params        allocation parameter for buffer pool
+     */
+    void getGraphicParams(uint32_t width, uint32_t height,
+                          uint32_t format, C2MemoryUsage usage,
+                          std::vector<uint8_t> *params);
+
+    /**
+     * Transforms an existing native handle to a C2LinearAllocation.
+     * Wrapper to C2Allocator#priorLinearAllocation
+     */
+    c2_status_t priorLinearAllocation(
+            const C2Handle *handle,
+            std::shared_ptr<C2LinearAllocation> *c2Allocation);
+
+    /**
+     * Transforms an existing native handle to a C2GraphicAllocation.
+     * Wrapper to C2Allocator#priorGraphicAllocation
+     */
+    c2_status_t priorGraphicAllocation(
+            const C2Handle *handle,
+            std::shared_ptr<C2GraphicAllocation> *c2Allocation);
+
+private:
+    static constexpr int kMaxIntParams = 5; // large enough number;
+
+    enum AllocType : uint8_t {
+        ALLOC_NONE = 0,
+
+        ALLOC_LINEAR,
+        ALLOC_GRAPHIC,
+    };
+
+    union AllocParams {
+        struct {
+            AllocType allocType;
+            C2MemoryUsage usage;
+            uint32_t params[kMaxIntParams];
+        } data;
+        uint8_t array[0];
+
+        AllocParams() : data{ALLOC_NONE, {0, 0}, {0}} {}
+        AllocParams(C2MemoryUsage usage, uint32_t capacity)
+            : data{ALLOC_LINEAR, usage, {[0] = capacity}} {}
+        AllocParams(
+                C2MemoryUsage usage,
+                uint32_t width, uint32_t height, uint32_t format)
+                : data{ALLOC_GRAPHIC, usage, {width, height, format}} {}
+    };
+
+    const std::shared_ptr<C2Allocator> mAllocator;
+};
+
+struct LinearAllocationDtor2 {
+    LinearAllocationDtor2(const std::shared_ptr<C2LinearAllocation> &alloc)
+        : mAllocation(alloc) {}
+
+    void operator()(bufferpool2_impl::BufferPoolAllocation *poolAlloc) { delete poolAlloc; }
+
+    const std::shared_ptr<C2LinearAllocation> mAllocation;
+};
+
+struct GraphicAllocationDtor2 {
+    GraphicAllocationDtor2(const std::shared_ptr<C2GraphicAllocation> &alloc)
+        : mAllocation(alloc) {}
+
+    void operator()(bufferpool2_impl::BufferPoolAllocation *poolAlloc) { delete poolAlloc; }
+
+    const std::shared_ptr<C2GraphicAllocation> mAllocation;
+};
+
+bufferpool2_impl::BufferPoolStatus _C2BufferPoolAllocator2::allocate(
+        const std::vector<uint8_t>  &params,
+        std::shared_ptr<bufferpool2_impl::BufferPoolAllocation> *alloc,
+        size_t *allocSize) {
+    AllocParams c2Params;
+    memcpy(&c2Params, params.data(), std::min(sizeof(AllocParams), params.size()));
+    c2_status_t status = C2_BAD_VALUE;
+    switch(c2Params.data.allocType) {
+        case ALLOC_NONE:
+            break;
+        case ALLOC_LINEAR: {
+            std::shared_ptr<C2LinearAllocation> c2Linear;
+            status = mAllocator->newLinearAllocation(
+                    c2Params.data.params[0], c2Params.data.usage, &c2Linear);
+            if (status == C2_OK && c2Linear) {
+                bufferpool2_impl::BufferPoolAllocation *ptr =
+                        new bufferpool2_impl::BufferPoolAllocation(c2Linear->handle());
+                if (ptr) {
+                    *alloc = std::shared_ptr<bufferpool2_impl::BufferPoolAllocation>(
+                            ptr, LinearAllocationDtor2(c2Linear));
+                    if (*alloc) {
+                        *allocSize = (size_t)c2Params.data.params[0];
+                        return ResultStatus2::OK;
+                    }
+                    delete ptr;
+                }
+                return ResultStatus2::NO_MEMORY;
+            }
+            break;
+        }
+        case ALLOC_GRAPHIC: {
+            std::shared_ptr<C2GraphicAllocation> c2Graphic;
+            status = mAllocator->newGraphicAllocation(
+                    c2Params.data.params[0],
+                    c2Params.data.params[1],
+                    c2Params.data.params[2],
+                    c2Params.data.usage, &c2Graphic);
+            if (status == C2_OK && c2Graphic) {
+                bufferpool2_impl::BufferPoolAllocation *ptr =
+                        new bufferpool2_impl::BufferPoolAllocation(c2Graphic->handle());
+                if (ptr) {
+                    *alloc = std::shared_ptr<bufferpool2_impl::BufferPoolAllocation>(
+                            ptr, GraphicAllocationDtor2(c2Graphic));
+                    if (*alloc) {
+                        *allocSize = c2Params.data.params[0] * c2Params.data.params[1];
+                        return ResultStatus2::OK;
+                    }
+                    delete ptr;
+                }
+                return ResultStatus2::NO_MEMORY;
+            }
+            break;
+        }
+        default:
+            break;
+    }
+    return ResultStatus2::CRITICAL_ERROR;
+}
+
+bool _C2BufferPoolAllocator2::compatible(
+        const std::vector<uint8_t>  &newParams,
+        const std::vector<uint8_t>  &oldParams) {
+    AllocParams newAlloc;
+    AllocParams oldAlloc;
+    memcpy(&newAlloc, newParams.data(), std::min(sizeof(AllocParams), newParams.size()));
+    memcpy(&oldAlloc, oldParams.data(), std::min(sizeof(AllocParams), oldParams.size()));
+
+    // TODO: support not exact matching. e.g.) newCapacity < oldCapacity
+    if (newAlloc.data.allocType == oldAlloc.data.allocType &&
+            newAlloc.data.usage.expected == oldAlloc.data.usage.expected) {
+        for (int i = 0; i < kMaxIntParams; ++i) {
+            if (newAlloc.data.params[i] != oldAlloc.data.params[i]) {
+                return false;
+            }
+        }
+        return true;
+    }
+    return false;
+}
+
+void _C2BufferPoolAllocator2::getLinearParams(
+        uint32_t capacity, C2MemoryUsage usage, std::vector<uint8_t> *params) {
+    AllocParams c2Params(usage, capacity);
+    params->assign(c2Params.array, c2Params.array + sizeof(AllocParams));
+}
+
+void _C2BufferPoolAllocator2::getGraphicParams(
+        uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
+        std::vector<uint8_t> *params) {
+    AllocParams c2Params(usage, width, height, format);
+    params->assign(c2Params.array, c2Params.array + sizeof(AllocParams));
+}
+
+c2_status_t _C2BufferPoolAllocator2::priorLinearAllocation(
+        const C2Handle *handle,
+        std::shared_ptr<C2LinearAllocation> *c2Allocation) {
+    return mAllocator->priorLinearAllocation(handle, c2Allocation);
+}
+
+c2_status_t _C2BufferPoolAllocator2::priorGraphicAllocation(
+        const C2Handle *handle,
+        std::shared_ptr<C2GraphicAllocation> *c2Allocation) {
+    return mAllocator->priorGraphicAllocation(handle, c2Allocation);
+}
+
+class C2PooledBlockPool::Impl2 {
+public:
+    Impl2(const std::shared_ptr<C2Allocator> &allocator)
+            : mInit(C2_OK),
+              mBufferPoolManager(bufferpool2_impl::ClientManager::getInstance()),
+              mAllocator(std::make_shared<_C2BufferPoolAllocator2>(allocator)) {
+        if (mAllocator && mBufferPoolManager) {
+            if (mBufferPoolManager->create(
+                    mAllocator, &mConnectionId) == ResultStatus2::OK) {
+                return;
+            }
+        }
+        mInit = C2_NO_INIT;
+    }
+
+    ~Impl2() {
+        if (mInit == C2_OK) {
+            mBufferPoolManager->close(mConnectionId);
+        }
+    }
+
+    c2_status_t fetchLinearBlock(
+            uint32_t capacity, C2MemoryUsage usage,
+            std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
+        block->reset();
+        if (mInit != C2_OK) {
+            return mInit;
+        }
+        std::vector<uint8_t> params;
+        mAllocator->getLinearParams(capacity, usage, &params);
+        std::shared_ptr<bufferpool2::BufferPoolData> bufferPoolData;
+        native_handle_t *cHandle = nullptr;
+        bufferpool2_impl::BufferPoolStatus status = mBufferPoolManager->allocate(
+                mConnectionId, params, &cHandle, &bufferPoolData);
+        if (status == ResultStatus2::OK) {
+            std::shared_ptr<C2LinearAllocation> alloc;
+            std::shared_ptr<C2PooledBlockPoolData2> poolData =
+                    std::make_shared<C2PooledBlockPoolData2>(bufferPoolData);
+            c2_status_t err = mAllocator->priorLinearAllocation(cHandle, &alloc);
+            if (err == C2_OK && poolData && alloc) {
+                *block = _C2BlockFactory::CreateLinearBlock(alloc, poolData, 0, capacity);
+                if (*block) {
+                    return C2_OK;
+                }
+            }
+            return C2_NO_MEMORY;
+        }
+        if (status == ResultStatus2::NO_MEMORY) {
+            return C2_NO_MEMORY;
+        }
+        return C2_CORRUPTED;
+    }
+
+    c2_status_t fetchGraphicBlock(
+            uint32_t width, uint32_t height, uint32_t format,
+            C2MemoryUsage usage,
+            std::shared_ptr<C2GraphicBlock> *block) {
+        block->reset();
+        if (mInit != C2_OK) {
+            return mInit;
+        }
+        std::vector<uint8_t> params;
+        mAllocator->getGraphicParams(width, height, format, usage, &params);
+        std::shared_ptr<bufferpool2::BufferPoolData> bufferPoolData;
+        native_handle_t *cHandle = nullptr;
+        bufferpool2_impl::BufferPoolStatus status = mBufferPoolManager->allocate(
+                mConnectionId, params, &cHandle, &bufferPoolData);
+        if (status == ResultStatus2::OK) {
+            std::shared_ptr<C2GraphicAllocation> alloc;
+            std::shared_ptr<C2PooledBlockPoolData2> poolData =
+                std::make_shared<C2PooledBlockPoolData2>(bufferPoolData);
+            c2_status_t err = mAllocator->priorGraphicAllocation(
+                    cHandle, &alloc);
+            if (err == C2_OK && poolData && alloc) {
+                *block = _C2BlockFactory::CreateGraphicBlock(
+                        alloc, poolData, C2Rect(width, height));
+                if (*block) {
+                    return C2_OK;
+                }
+            }
+            return C2_NO_MEMORY;
+        }
+        if (status == ResultStatus2::NO_MEMORY) {
+            return C2_NO_MEMORY;
+        }
+        return C2_CORRUPTED;
+    }
+
+    bufferpool2_impl::ConnectionId getConnectionId() {
+        return mInit != C2_OK ? bufferpool2_impl::INVALID_CONNECTIONID : mConnectionId;
+    }
+
+private:
+    c2_status_t mInit;
+    const std::shared_ptr<bufferpool2_impl::ClientManager> mBufferPoolManager;
+    bufferpool2_impl::ConnectionId mConnectionId; // locally
+    const std::shared_ptr<_C2BufferPoolAllocator2> mAllocator;
+};
+
 C2PooledBlockPool::C2PooledBlockPool(
-        const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId)
-        : mAllocator(allocator), mLocalId(localId), mImpl(new Impl(allocator)) {}
+        const std::shared_ptr<C2Allocator> &allocator,
+        const local_id_t localId,
+        BufferPoolVer ver)
+        : mAllocator(allocator), mLocalId(localId), mBufferPoolVer(ver) {
+        if (mBufferPoolVer == VER_HIDL) {
+            mImpl = std::make_unique<Impl>(allocator);
+        }
+        if (mBufferPoolVer == VER_AIDL2) {
+            mImpl2 = std::make_unique<Impl2>(allocator);
+        }
+    }
 
 C2PooledBlockPool::~C2PooledBlockPool() {
 }
@@ -772,9 +1159,12 @@
         uint32_t capacity,
         C2MemoryUsage usage,
         std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
-    if (mImpl) {
+    if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchLinearBlock(capacity, usage, block);
     }
+    if (mBufferPoolVer == VER_AIDL2 && mImpl2) {
+        return mImpl2->fetchLinearBlock(capacity, usage, block);
+    }
     return C2_CORRUPTED;
 }
 
@@ -784,16 +1174,22 @@
         uint32_t format,
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block) {
-    if (mImpl) {
+    if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchGraphicBlock(width, height, format, usage, block);
     }
+    if (mBufferPoolVer == VER_AIDL2 && mImpl2) {
+        return mImpl2->fetchGraphicBlock(width, height, format, usage, block);
+    }
     return C2_CORRUPTED;
 }
 
 int64_t C2PooledBlockPool::getConnectionId() {
-    if (mImpl) {
+    if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->getConnectionId();
     }
+    if (mBufferPoolVer == VER_AIDL2 && mImpl2) {
+        return mImpl2->getConnectionId();
+    }
     return 0;
 }
 
@@ -1128,7 +1524,7 @@
 
 std::shared_ptr<C2GraphicBlock> _C2BlockFactory::CreateGraphicBlock(
         const C2Handle *cHandle,
-        const std::shared_ptr<BufferPoolData> &data) {
+        const std::shared_ptr<bufferpool::BufferPoolData> &data) {
     // TODO: get proper allocator? and mutex?
     static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
 
@@ -1147,6 +1543,26 @@
     return nullptr;
 };
 
+std::shared_ptr<C2GraphicBlock> _C2BlockFactory::CreateGraphicBlock(
+        const C2Handle *cHandle,
+        const std::shared_ptr<bufferpool2::BufferPoolData> &data) {
+    // TODO: get proper allocator? and mutex?
+    static std::unique_ptr<C2AllocatorGralloc> sAllocator = std::make_unique<C2AllocatorGralloc>(0);
+
+    std::shared_ptr<C2GraphicAllocation> alloc;
+    if (sAllocator->isValid(cHandle)) {
+        c2_status_t err = sAllocator->priorGraphicAllocation(cHandle, &alloc);
+        const std::shared_ptr<C2PooledBlockPoolData2> poolData =
+                std::make_shared<C2PooledBlockPoolData2>(data);
+        if (err == C2_OK && poolData) {
+            // TODO: config setup?
+            std::shared_ptr<C2GraphicBlock> block =
+                    _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
+            return block;
+        }
+    }
+    return nullptr;
+};
 
 /* ========================================== BUFFER ========================================= */
 
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 0b556aa..aa908a8 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -186,7 +186,7 @@
 class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
 public:
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
-        c2_nsecs_t timeoutMs = timeoutNs / 1000;
+        int64_t timeoutMs = timeoutNs / 1000000;
         if (timeoutMs > INT_MAX) {
             timeoutMs = INT_MAX;
         }
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index dfdd84d..f6f97da 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -15,7 +15,7 @@
  */
 
 #define LOG_TAG "C2Store"
-#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 #include <utils/Log.h>
 
 #include <C2AllocatorBlob.h>
@@ -1081,6 +1081,7 @@
     emplace("libcodec2_soft_amrwbenc.so");
     //emplace("libcodec2_soft_av1dec_aom.so"); // deprecated for the gav1 implementation
     emplace("libcodec2_soft_av1dec_gav1.so");
+    emplace("libcodec2_soft_av1enc.so");
     emplace("libcodec2_soft_avcdec.so");
     emplace("libcodec2_soft_avcenc.so");
     emplace("libcodec2_soft_flacdec.so");
diff --git a/media/codec2/vndk/include/C2BufferPriv.h b/media/codec2/vndk/include/C2BufferPriv.h
index be5f69c..527845f 100644
--- a/media/codec2/vndk/include/C2BufferPriv.h
+++ b/media/codec2/vndk/include/C2BufferPriv.h
@@ -74,7 +74,14 @@
 
 class C2PooledBlockPool : public C2BlockPool {
 public:
-    C2PooledBlockPool(const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId);
+    enum BufferPoolVer : int {
+        VER_HIDL = 0,
+        VER_AIDL2
+    };
+    C2PooledBlockPool(
+            const std::shared_ptr<C2Allocator> &allocator,
+            const local_id_t localId,
+            BufferPoolVer ver = VER_HIDL);
 
     virtual ~C2PooledBlockPool() override;
 
@@ -117,9 +124,12 @@
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
+    const BufferPoolVer mBufferPoolVer;
 
-    class Impl;
+    class Impl; // HIDL BufferPool VER_HIDL
     std::unique_ptr<Impl> mImpl;
+    class Impl2; // AIDL BufferPool(bufferpool2) VER_AIDL2
+    std::unique_ptr<Impl2> mImpl2;
 };
 
 #endif // STAGEFRIGHT_CODEC2_BUFFER_PRIV_H_
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
index ac87fe4..b193b4a 100644
--- a/media/codec2/vndk/include/C2SurfaceSyncObj.h
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -72,12 +72,13 @@
     /**
      * Notify a buffer is queued. Return whether the upcoming dequeue operation
      * is not blocked. if it's blocked and waitId is non-null, waitId is returned
-     * to be used for waiting.
+     * to be used for waiting. Notify(wake-up) waitors only when 'notify' is
+     * true.
      *
      * \retval false    dequeue operation is blocked now.
      * \retval true     dequeue operation is possible.
      */
-    bool notifyQueuedLocked(uint32_t *waitId = nullptr);
+    bool notifyQueuedLocked(uint32_t *waitId = nullptr, bool notify = true);
 
     /**
      * Notify a buffer is dequeued.
@@ -111,6 +112,11 @@
      */
     c2_status_t waitForChange(uint32_t waitId, c2_nsecs_t timeoutNs);
 
+    /**
+     * Wake up and expire all waitors.
+     */
+    void notifyAll();
+
     C2SyncVariables() {}
 
 private:
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index c510fca..1eefd87 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -21,17 +21,22 @@
 
 #include <C2Buffer.h>
 
-namespace android {
-namespace hardware {
-namespace media {
-namespace bufferpool {
+// Note: HIDL-BufferPool and AIDL-BufferPool are not compatible
+namespace android::hardware::media::bufferpool {
 
+// BuffePool Data for HIDL-BufferPool
 struct BufferPoolData;
 
 }
+namespace aidl::android::hardware::media::bufferpool2 {
+
+// BuffePool Data for AIDL-BufferPool
+struct BufferPoolData;
+
 }
-}
-}
+
+using bufferpool_BufferPoolData = android::hardware::media::bufferpool::BufferPoolData;
+using bufferpool2_BufferPoolData = aidl::android::hardware::media::bufferpool2::BufferPoolData;
 
 /**
  * Stores informations from C2BlockPool implementations which are required by C2Block.
@@ -40,6 +45,7 @@
     enum type_t : int {
         TYPE_BUFFERPOOL = 0,
         TYPE_BUFFERQUEUE,
+        TYPE_BUFFERPOOL2, // AIDL-BufferPool
     };
 
     virtual type_t getType() const = 0;
@@ -136,6 +142,7 @@
     std::shared_ptr<C2GraphicBlock> CreateGraphicBlock(
             const C2Handle *handle);
 
+    // HIDL-BufferPool
     /**
      * Create a linear block from the received bufferpool data.
      *
@@ -147,7 +154,7 @@
     static
     std::shared_ptr<C2LinearBlock> CreateLinearBlock(
             const C2Handle *handle,
-            const std::shared_ptr<android::hardware::media::bufferpool::BufferPoolData> &data);
+            const std::shared_ptr<bufferpool_BufferPoolData> &data);
 
     /**
      * Create a graphic block from the received bufferpool data.
@@ -160,7 +167,7 @@
     static
     std::shared_ptr<C2GraphicBlock> CreateGraphicBlock(
             const C2Handle *handle,
-            const std::shared_ptr<android::hardware::media::bufferpool::BufferPoolData> &data);
+            const std::shared_ptr<bufferpool_BufferPoolData> &data);
 
     /**
      * Get bufferpool data from the blockpool data.
@@ -174,7 +181,48 @@
     static
     bool GetBufferPoolData(
             const std::shared_ptr<const _C2BlockPoolData> &poolData,
-            std::shared_ptr<android::hardware::media::bufferpool::BufferPoolData> *bufferPoolData);
+            std::shared_ptr<bufferpool_BufferPoolData> *bufferPoolData);
+
+    // AIDL-BufferPool
+    /**
+     * Create a linear block from the received bufferpool data.
+     *
+     * \param data  bufferpool data to a linear block
+     *
+     * \return shared pointer to the linear block. nullptr if there was not enough memory to
+     *         create this block.
+     */
+    static
+    std::shared_ptr<C2LinearBlock> CreateLinearBlock(
+            const C2Handle *handle,
+            const std::shared_ptr<bufferpool2_BufferPoolData> &data);
+
+    /**
+     * Create a graphic block from the received bufferpool data.
+     *
+     * \param data  bufferpool data to a graphic block
+     *
+     * \return shared pointer to the graphic block. nullptr if there was not enough memory to
+     *         create this block.
+     */
+    static
+    std::shared_ptr<C2GraphicBlock> CreateGraphicBlock(
+            const C2Handle *handle,
+            const std::shared_ptr<bufferpool2_BufferPoolData> &data);
+
+    /**
+     * Get bufferpool data from the blockpool data.
+     *
+     * \param poolData          blockpool data
+     * \param bufferPoolData    pointer to bufferpool data where the bufferpool
+     *                          data is stored.
+     *
+     * \return {\code true} when there is valid bufferpool data, {\code false} otherwise.
+     */
+    static
+    bool GetBufferPoolData(
+            const std::shared_ptr<const _C2BlockPoolData> &poolData,
+            std::shared_ptr<bufferpool2_BufferPoolData> *bufferPoolData);
 
     /*
      * Life Cycle Management of BufferQueue-Based Blocks
@@ -238,7 +286,7 @@
      *   - Local migration on blockpool side will be done automatically by
      *     blockpool.
      *   - Before attachBuffer(), BeginAttachBlockToBufferQueue() should be called
-     *     to test eligiblity.
+     *     to test eligibility.
      *   - After attachBuffer() is called, EndAttachBlockToBufferQueue() should
      *     be called. This will set "held" status to true. If it returned
      *     false, cancelBuffer() should be called.
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index e67e42f..f2cd585 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -432,12 +432,16 @@
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
+            if (bufferNeedsReallocation) {
+                mBuffers[slot].clear();
+            }
+
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     dequeueable = syncVar->notifyQueuedLocked(&waitId);
                     syncVar->unlock();
                     if (c2Fence) {
@@ -452,8 +456,8 @@
             if (status != android::NO_ERROR) {
                 ALOGD("buffer fence wait error %d", status);
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     syncVar->notifyQueuedLocked();
                     syncVar->unlock();
                     if (c2Fence) {
@@ -502,8 +506,8 @@
             } else if (status != android::NO_ERROR) {
                 slotBuffer.clear();
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     syncVar->notifyQueuedLocked();
                     syncVar->unlock();
                     if (c2Fence) {
@@ -550,8 +554,8 @@
             // Block was not created. call requestBuffer# again next time.
             slotBuffer.clear();
             if (syncVar) {
-                syncVar->lock();
                 (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                syncVar->lock();
                 syncVar->notifyQueuedLocked();
                 syncVar->unlock();
                 if (c2Fence) {
@@ -813,11 +817,10 @@
         if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId && !mOwner.expired()) {
             C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
             if (syncVar) {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
                 syncVar->lock();
-                if (syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE) {
-                    mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
-                    syncVar->notifyQueuedLocked();
-                }
+                syncVar->notifyQueuedLocked(nullptr,
+                        syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE);
                 syncVar->unlock();
             } else {
                 mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
@@ -826,11 +829,10 @@
     } else if (!mOwner.expired()) {
         C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
         if (syncVar) {
+            mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
             syncVar->lock();
-            if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING) {
-                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
-                syncVar->notifyQueuedLocked();
-            }
+            syncVar->notifyQueuedLocked(nullptr,
+                    syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING);
             syncVar->unlock();
         } else {
             mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index 2115cc3..d55a3d8 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -64,6 +64,11 @@
     }
 
     HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    if (o->size() < sizeof(C2SyncVariables)) {
+        android_errorWriteLog(0x534e4554, "240140929");
+        return nullptr;
+    }
+
     void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
 
     if (ptr == MAP_FAILED) {
@@ -177,12 +182,14 @@
     return true;
 }
 
-bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId) {
+bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId, bool notify) {
     // Note. thundering herds may occur. Edge trigged signalling.
     // But one waiter will guarantee to dequeue. others may wait again.
     // Minimize futex syscall(trap) for the main use case(one waiter case).
     if (mMaxDequeueCount == mCurDequeueCount--) {
-        broadcast();
+        if (notify) {
+            broadcast();
+        }
         return true;
     }
 
@@ -237,6 +244,12 @@
     return C2_BAD_VALUE;
 }
 
+void C2SyncVariables::notifyAll() {
+    this->lock();
+    this->broadcast();
+    this->unlock();
+}
+
 int C2SyncVariables::signal() {
     mCond++;
 
diff --git a/media/janitors/avic_OWNERS b/media/janitors/avic_OWNERS
new file mode 100644
index 0000000..eca9978
--- /dev/null
+++ b/media/janitors/avic_OWNERS
@@ -0,0 +1,6 @@
+# Bug component: 1344
+# gerrit owner/approvers in the AVIC team
+arifdikici@google.com
+dichenzhang@google.com
+kyslov@google.com
+richardxie@google.com
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index add28e0..4b417a7 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -36,6 +36,9 @@
     symbol_file: "src/libaaudio.map.txt",
     first_version: "26",
     unversioned_until: "current",
+    export_header_libs: [
+        "libAAudio_headers",
+    ],
 }
 
 cc_library_headers {
diff --git a/media/libaaudio/TEST_MAPPING b/media/libaaudio/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/media/libaaudio/TEST_MAPPING
+++ b/media/libaaudio/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaaudio/examples/loopback/README.md b/media/libaaudio/examples/loopback/README.md
new file mode 100644
index 0000000..0da751f
--- /dev/null
+++ b/media/libaaudio/examples/loopback/README.md
@@ -0,0 +1,7 @@
+# to run the loopback test from the command line
+{cd to top of the repo}
+mmma frameworks/av/media/libaaudio/examples/
+adb root
+adb remount -R
+adb push $OUT/data/nativetest/aaudio_loopback/aaudio_loopback /data/aaudio_loopback
+adb shell /data/aaudio_loopback -?
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 6fff568..4affaed 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -108,7 +108,7 @@
     aaudio_result_t    outputError = AAUDIO_OK;
 
     GlitchAnalyzer     sineAnalyzer;
-    PulseLatencyAnalyzer echoAnalyzer;
+    WhiteNoiseLatencyAnalyzer echoAnalyzer;
     AudioRecording     audioRecording;
     LoopbackProcessor *loopbackProcessor;
 
diff --git a/media/libaaudio/examples/write_sine/README.md b/media/libaaudio/examples/write_sine/README.md
index b150471..73e6fc9 100644
--- a/media/libaaudio/examples/write_sine/README.md
+++ b/media/libaaudio/examples/write_sine/README.md
@@ -1,7 +1,10 @@
-# cd to this directory
-mkdir -p jni/include/aaudio
-ln -s $PLATFORM/frameworks/av/media/liboboe/include/aaudio/*.h jni/include/aaudio
-ln -s $PLATFORM/out/target/product/$TARGET_PRODUCT/symbols/out/soong/ndk/platforms/android-current/arch-arm64/usr/lib/liboboe.so jni
-$NDK/ndk-build
-adb push libs/arm64-v8a/write_sine_threaded /data
-adb shell /data/write_sine_threaded
+# to run write_sine examples from the command line
+{cd to top of the repo}
+mmma frameworks/av/media/libaaudio/examples/
+adb root
+adb remount -R
+adb push $OUT/data/nativetest/write_sine/write_sine /data/write_sine
+adb shell /data/write_sine -?
+
+adb push $OUT/data/nativetest/write_sine_callback/write_sine_callback /data/write_sine_callback
+adb shell /data/write_sine_callback -?
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index 2a12191..3393930 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -25,6 +25,9 @@
 
 cc_fuzz {
     name: "libaaudio_fuzzer",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_static",
+    ],
     srcs: [
         "libaaudio_fuzzer.cpp",
     ],
@@ -36,10 +39,10 @@
         "libaudiomanager",
         "libaudiopolicy",
         "libaudioclient_aidl_conversion",
+        "libaudio_aidl_conversion_common_cpp",
         "libutils",
     ],
     static_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "liblog",
         "libcutils",
         "libaaudio",
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 2ff9f5a..7648c76 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -40,7 +40,7 @@
 /**
  * This is used to represent a value that has not been specified.
  * For example, an application could use {@link #AAUDIO_UNSPECIFIED} to indicate
- * that is did not not care what the specific value of a parameter was
+ * that it did not care what the specific value of a parameter was
  * and would accept whatever it was given.
  */
 #define AAUDIO_UNSPECIFIED           0
@@ -103,7 +103,23 @@
      *
      * Available since API level 31.
      */
-    AAUDIO_FORMAT_PCM_I32
+    AAUDIO_FORMAT_PCM_I32,
+
+    /**
+     * This format is used for compressed audio wrapped in IEC61937 for HDMI
+     * or S/PDIF passthrough.
+     *
+     * Unlike PCM playback, the Android framework is not able to do format
+     * conversion for IEC61937. In that case, when IEC61937 is requested, sampling
+     * rate and channel count or channel mask must be specified. Otherwise, it may
+     * fail when opening the stream. Apps are able to get the correct configuration
+     * for the playback by calling
+     * <a href="/reference/android/media/AudioManager#getDevices(int)">
+     *   AudioManager#getDevices(int)</a>.
+     *
+     * Available since API level 34.
+     */
+    AAUDIO_FORMAT_IEC61937
 
 };
 typedef int32_t aaudio_format_t;
@@ -118,7 +134,11 @@
      * The call was successful.
      */
     AAUDIO_OK,
-    AAUDIO_ERROR_BASE = -900, // TODO review
+
+    /**
+     * Reserved. This should not be returned.
+     */
+    AAUDIO_ERROR_BASE = -900,
 
     /**
      * The audio device was disconnected. This could occur, for example, when headphones
@@ -134,6 +154,10 @@
      */
     AAUDIO_ERROR_ILLEGAL_ARGUMENT,
     // reserved
+
+    /**
+     * An internal error occurred.
+     */
     AAUDIO_ERROR_INTERNAL = AAUDIO_ERROR_ILLEGAL_ARGUMENT + 2,
 
     /**
@@ -142,7 +166,9 @@
     AAUDIO_ERROR_INVALID_STATE,
     // reserved
     // reserved
-    /* The server rejected the handle used to identify the stream.
+
+    /**
+     * The server rejected the handle used to identify the stream.
      */
     AAUDIO_ERROR_INVALID_HANDLE = AAUDIO_ERROR_INVALID_STATE + 3,
     // reserved
@@ -158,6 +184,10 @@
      * or a timestamp is not available.
      */
     AAUDIO_ERROR_UNAVAILABLE,
+
+    /**
+     * Reserved. This should not be returned.
+     */
     AAUDIO_ERROR_NO_FREE_HANDLES,
 
     /**
@@ -175,6 +205,10 @@
      * An operation took longer than expected.
      */
     AAUDIO_ERROR_TIMEOUT,
+
+    /**
+     * A queue is full. This queue would be blocked.
+     */
     AAUDIO_ERROR_WOULD_BLOCK,
 
     /**
@@ -261,6 +295,7 @@
     AAUDIO_STREAM_STATE_CLOSED,
     /**
      * The stream is disconnected from audio device.
+     * @deprecated
      */
     AAUDIO_STREAM_STATE_DISCONNECTED
 };
@@ -741,7 +776,8 @@
  *
  * @return pointer to a text representation of an AAudio result code.
  */
-AAUDIO_API const char * AAudio_convertResultToText(aaudio_result_t returnCode) __INTRODUCED_IN(26);
+AAUDIO_API const char * _Nonnull AAudio_convertResultToText(aaudio_result_t returnCode)
+        __INTRODUCED_IN(26);
 
 /**
  * The text is the ASCII symbol corresponding to the stream state,
@@ -753,7 +789,7 @@
  *
  * @return pointer to a text representation of an AAudio state.
  */
-AAUDIO_API const char * AAudio_convertStreamStateToText(aaudio_stream_state_t state)
+AAUDIO_API const char * _Nonnull AAudio_convertStreamStateToText(aaudio_stream_state_t state)
         __INTRODUCED_IN(26);
 
 // ============================================================
@@ -774,8 +810,8 @@
  *
  * Available since API level 26.
  */
-AAUDIO_API aaudio_result_t AAudio_createStreamBuilder(AAudioStreamBuilder** builder)
-        __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t AAudio_createStreamBuilder(AAudioStreamBuilder* _Nullable* _Nonnull
+                                                      builder) __INTRODUCED_IN(26);
 
 /**
  * Request an audio device identified by an ID.
@@ -797,7 +833,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param deviceId device identifier or {@link #AAUDIO_UNSPECIFIED}
  */
-AAUDIO_API void AAudioStreamBuilder_setDeviceId(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setDeviceId(AAudioStreamBuilder* _Nonnull builder,
                                                 int32_t deviceId) __INTRODUCED_IN(26);
 
 /**
@@ -817,8 +853,8 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param packageName packageName of the calling app.
  */
-AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* builder,
-                                                   const char * packageName) __INTRODUCED_IN(31);
+AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* _Nonnull builder,
+        const char * _Nonnull packageName) __INTRODUCED_IN(31);
 
 /**
  * Declare the attribution tag of the context creating the stream.
@@ -832,8 +868,8 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param attributionTag attributionTag of the calling context.
  */
-AAUDIO_API void AAudioStreamBuilder_setAttributionTag(AAudioStreamBuilder* builder,
-        const char * attributionTag) __INTRODUCED_IN(31);
+AAUDIO_API void AAudioStreamBuilder_setAttributionTag(AAudioStreamBuilder* _Nonnull builder,
+        const char * _Nonnull attributionTag) __INTRODUCED_IN(31);
 
 /**
  * Request a sample rate in Hertz.
@@ -851,7 +887,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param sampleRate frames per second. Common rates include 44100 and 48000 Hz.
  */
-AAUDIO_API void AAudioStreamBuilder_setSampleRate(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setSampleRate(AAudioStreamBuilder* _Nonnull builder,
                                                   int32_t sampleRate) __INTRODUCED_IN(26);
 
 /**
@@ -870,12 +906,18 @@
  * will be respected if both this function and {@link AAudioStreamBuilder_setChannelMask} are
  * called.
  *
+ * Note that if the channel count is two then it may get mixed to mono when the device only supports
+ * one channel. If the channel count is greater than two but the device's supported channel count is
+ * less than the requested value, the channels higher than the device channel will be dropped. If
+ * higher channels should be mixed or spatialized, use {@link AAudioStreamBuilder_setChannelMask}
+ * instead.
+ *
  * Available since API level 26.
  *
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param channelCount Number of channels desired.
  */
-AAUDIO_API void AAudioStreamBuilder_setChannelCount(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setChannelCount(AAudioStreamBuilder* _Nonnull builder,
                                                     int32_t channelCount) __INTRODUCED_IN(26);
 
 /**
@@ -888,7 +930,7 @@
  *
  * @deprecated use {@link AAudioStreamBuilder_setChannelCount}
  */
-AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* _Nonnull builder,
                                                        int32_t samplesPerFrame) __INTRODUCED_IN(26);
 
 /**
@@ -908,7 +950,7 @@
  * @param format common formats are {@link #AAUDIO_FORMAT_PCM_FLOAT} and
  *               {@link #AAUDIO_FORMAT_PCM_I16}.
  */
-AAUDIO_API void AAudioStreamBuilder_setFormat(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setFormat(AAudioStreamBuilder* _Nonnull builder,
                                               aaudio_format_t format) __INTRODUCED_IN(26);
 
 /**
@@ -924,7 +966,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param sharingMode {@link #AAUDIO_SHARING_MODE_SHARED} or {@link #AAUDIO_SHARING_MODE_EXCLUSIVE}
  */
-AAUDIO_API void AAudioStreamBuilder_setSharingMode(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setSharingMode(AAudioStreamBuilder* _Nonnull builder,
         aaudio_sharing_mode_t sharingMode) __INTRODUCED_IN(26);
 
 /**
@@ -937,7 +979,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param direction {@link #AAUDIO_DIRECTION_OUTPUT} or {@link #AAUDIO_DIRECTION_INPUT}
  */
-AAUDIO_API void AAudioStreamBuilder_setDirection(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setDirection(AAudioStreamBuilder* _Nonnull builder,
         aaudio_direction_t direction) __INTRODUCED_IN(26);
 
 /**
@@ -951,8 +993,8 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param numFrames the desired buffer capacity in frames or {@link #AAUDIO_UNSPECIFIED}
  */
-AAUDIO_API void AAudioStreamBuilder_setBufferCapacityInFrames(AAudioStreamBuilder* builder,
-        int32_t numFrames) __INTRODUCED_IN(26);
+AAUDIO_API void AAudioStreamBuilder_setBufferCapacityInFrames(
+        AAudioStreamBuilder* _Nonnull builder, int32_t numFrames) __INTRODUCED_IN(26);
 
 /**
  * Set the requested performance mode.
@@ -971,7 +1013,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param mode the desired performance mode, eg. {@link #AAUDIO_PERFORMANCE_MODE_LOW_LATENCY}
  */
-AAUDIO_API void AAudioStreamBuilder_setPerformanceMode(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setPerformanceMode(AAudioStreamBuilder* _Nonnull builder,
         aaudio_performance_mode_t mode) __INTRODUCED_IN(26);
 
 /**
@@ -988,7 +1030,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param usage the desired usage, eg. {@link #AAUDIO_USAGE_GAME}
  */
-AAUDIO_API void AAudioStreamBuilder_setUsage(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setUsage(AAudioStreamBuilder* _Nonnull builder,
         aaudio_usage_t usage) __INTRODUCED_IN(28);
 
 /**
@@ -1005,7 +1047,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param contentType the type of audio data, eg. {@link #AAUDIO_CONTENT_TYPE_SPEECH}
  */
-AAUDIO_API void AAudioStreamBuilder_setContentType(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setContentType(AAudioStreamBuilder* _Nonnull builder,
         aaudio_content_type_t contentType) __INTRODUCED_IN(28);
 
 /**
@@ -1020,7 +1062,8 @@
  * @param spatializationBehavior the desired behavior with regards to spatialization, eg.
  *     {@link #AAUDIO_SPATIALIZATION_BEHAVIOR_AUTO}
  */
-AAUDIO_API void AAudioStreamBuilder_setSpatializationBehavior(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setSpatializationBehavior(
+        AAudioStreamBuilder* _Nonnull builder,
         aaudio_spatialization_behavior_t spatializationBehavior) __INTRODUCED_IN(32);
 
 /**
@@ -1036,7 +1079,7 @@
  * @param isSpatialized true if the content is already processed for binaural or transaural spatial
  *     rendering, false otherwise.
  */
-AAUDIO_API void AAudioStreamBuilder_setIsContentSpatialized(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setIsContentSpatialized(AAudioStreamBuilder* _Nonnull builder,
         bool isSpatialized) __INTRODUCED_IN(32);
 
 /**
@@ -1056,7 +1099,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param inputPreset the desired configuration for recording
  */
-AAUDIO_API void AAudioStreamBuilder_setInputPreset(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setInputPreset(AAudioStreamBuilder* _Nonnull builder,
         aaudio_input_preset_t inputPreset) __INTRODUCED_IN(28);
 
 /**
@@ -1074,7 +1117,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param capturePolicy the desired level of opt-out from being captured.
  */
-AAUDIO_API void AAudioStreamBuilder_setAllowedCapturePolicy(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setAllowedCapturePolicy(AAudioStreamBuilder* _Nonnull builder,
         aaudio_allowed_capture_policy_t capturePolicy) __INTRODUCED_IN(29);
 
 /** Set the requested session ID.
@@ -1104,7 +1147,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param sessionId an allocated sessionID or {@link #AAUDIO_SESSION_ID_ALLOCATE}
  */
-AAUDIO_API void AAudioStreamBuilder_setSessionId(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setSessionId(AAudioStreamBuilder* _Nonnull builder,
         aaudio_session_id_t sessionId) __INTRODUCED_IN(28);
 
 
@@ -1126,7 +1169,7 @@
  * @param privacySensitive true if capture from this stream must be marked as privacy sensitive,
  * false otherwise.
  */
-AAUDIO_API void AAudioStreamBuilder_setPrivacySensitive(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setPrivacySensitive(AAudioStreamBuilder* _Nonnull builder,
         bool privacySensitive) __INTRODUCED_IN(30);
 
 /**
@@ -1157,7 +1200,10 @@
  * in the streams current data format to the audioData buffer.
  *
  * For an input stream, this function should read and process numFrames of data
- * from the audioData buffer.
+ * from the audioData buffer. The data in the audioData buffer must not be modified
+ * directly. Instead, it should be copied to another buffer before doing any modification.
+ * In many cases, writing to the audioData buffer of an input stream will result in a
+ * native exception.
  *
  * The audio data is passed through the buffer. So do NOT call AAudioStream_read() or
  * AAudioStream_write() on the stream that is making the callback.
@@ -1197,9 +1243,9 @@
  * @return AAUDIO_CALLBACK_RESULT_*
  */
 typedef aaudio_data_callback_result_t (*AAudioStream_dataCallback)(
-        AAudioStream *stream,
-        void *userData,
-        void *audioData,
+        AAudioStream* _Nonnull stream,
+        void* _Nullable userData,
+        void* _Nonnull audioData,
         int32_t numFrames);
 
 /**
@@ -1228,8 +1274,9 @@
  * @param userData pointer to an application data structure that will be passed
  *          to the callback functions.
  */
-AAUDIO_API void AAudioStreamBuilder_setDataCallback(AAudioStreamBuilder* builder,
-        AAudioStream_dataCallback callback, void *userData) __INTRODUCED_IN(26);
+AAUDIO_API void AAudioStreamBuilder_setDataCallback(AAudioStreamBuilder* _Nonnull builder,
+        AAudioStream_dataCallback _Nullable callback, void* _Nullable userData)
+        __INTRODUCED_IN(26);
 
 /**
  * Set the requested data callback buffer size in frames.
@@ -1256,8 +1303,8 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param numFrames the desired buffer size in frames or {@link #AAUDIO_UNSPECIFIED}
  */
-AAUDIO_API void AAudioStreamBuilder_setFramesPerDataCallback(AAudioStreamBuilder* builder,
-                                                             int32_t numFrames) __INTRODUCED_IN(26);
+AAUDIO_API void AAudioStreamBuilder_setFramesPerDataCallback(AAudioStreamBuilder* _Nonnull builder,
+        int32_t numFrames) __INTRODUCED_IN(26);
 
 /**
  * Prototype for the callback function that is passed to
@@ -1284,8 +1331,8 @@
  * @param error an AAUDIO_ERROR_* value.
  */
 typedef void (*AAudioStream_errorCallback)(
-        AAudioStream *stream,
-        void *userData,
+        AAudioStream* _Nonnull stream,
+        void* _Nullable userData,
         aaudio_result_t error);
 
 /**
@@ -1311,8 +1358,9 @@
  * @param userData pointer to an application data structure that will be passed
  *          to the callback functions.
  */
-AAUDIO_API void AAudioStreamBuilder_setErrorCallback(AAudioStreamBuilder* builder,
-        AAudioStream_errorCallback callback, void *userData) __INTRODUCED_IN(26);
+AAUDIO_API void AAudioStreamBuilder_setErrorCallback(AAudioStreamBuilder* _Nonnull builder,
+        AAudioStream_errorCallback _Nullable callback, void* _Nullable userData)
+        __INTRODUCED_IN(26);
 
 /**
  * Open a stream based on the options in the StreamBuilder.
@@ -1326,8 +1374,8 @@
  * @param stream pointer to a variable to receive the new stream reference
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStreamBuilder_openStream(AAudioStreamBuilder* builder,
-        AAudioStream** stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t  AAudioStreamBuilder_openStream(AAudioStreamBuilder* _Nonnull builder,
+        AAudioStream* _Nullable* _Nonnull stream) __INTRODUCED_IN(26);
 
 /**
  * Delete the resources associated with the StreamBuilder.
@@ -1337,7 +1385,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStreamBuilder_delete(AAudioStreamBuilder* builder)
+AAUDIO_API aaudio_result_t  AAudioStreamBuilder_delete(AAudioStreamBuilder* _Nonnull builder)
     __INTRODUCED_IN(26);
 
 /**
@@ -1363,7 +1411,7 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @param channelMask Audio channel mask desired.
  */
-AAUDIO_API void AAudioStreamBuilder_setChannelMask(AAudioStreamBuilder* builder,
+AAUDIO_API void AAudioStreamBuilder_setChannelMask(AAudioStreamBuilder* _Nonnull builder,
         aaudio_channel_mask_t channelMask) __INTRODUCED_IN(32);
 
 // ============================================================
@@ -1392,7 +1440,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_release(AAudioStream* stream) __INTRODUCED_IN(30);
+AAUDIO_API aaudio_result_t  AAudioStream_release(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(30);
 
 /**
  * Delete the internal data structures associated with the stream created
@@ -1405,7 +1454,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_close(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t  AAudioStream_close(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
 
 /**
  * Asynchronously request to start playing the stream. For output streams, one should
@@ -1419,7 +1468,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestStart(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t  AAudioStream_requestStart(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Asynchronous request for the stream to pause.
@@ -1436,12 +1486,16 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestPause(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t  AAudioStream_requestPause(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Asynchronous request for the stream to flush.
  * Flushing will discard any pending data.
- * This call only works if the stream is pausing or paused. TODO review
+ * This call only works if the stream is OPEN, PAUSED, STOPPED, or FLUSHED.
+ * Calling this function when in other states,
+ * or calling from an AAudio callback function,
+ * will have no effect and an error will be returned.
  * Frame counters are not reset by a flush. They may be advanced.
  * After this call the state will be in {@link #AAUDIO_STREAM_STATE_FLUSHING} or
  * {@link #AAUDIO_STREAM_STATE_FLUSHED}.
@@ -1453,7 +1507,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestFlush(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t  AAudioStream_requestFlush(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Asynchronous request for the stream to stop.
@@ -1466,7 +1521,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestStop(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t  AAudioStream_requestStop(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Query the current state of the client, eg. {@link #AAUDIO_STREAM_STATE_PAUSING}
@@ -1480,7 +1536,8 @@
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  */
-AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Wait until the current state no longer matches the input state.
@@ -1506,8 +1563,8 @@
  * @param timeoutNanoseconds Maximum number of nanoseconds to wait for completion.
  * @return {@link #AAUDIO_OK} or a negative error.
  */
-AAUDIO_API aaudio_result_t AAudioStream_waitForStateChange(AAudioStream* stream,
-        aaudio_stream_state_t inputState, aaudio_stream_state_t *nextState,
+AAUDIO_API aaudio_result_t AAudioStream_waitForStateChange(AAudioStream* _Nonnull stream,
+        aaudio_stream_state_t inputState, aaudio_stream_state_t* _Nullable nextState,
         int64_t timeoutNanoseconds) __INTRODUCED_IN(26);
 
 // ============================================================
@@ -1536,8 +1593,8 @@
  * @param timeoutNanoseconds Maximum number of nanoseconds to wait for completion.
  * @return The number of frames actually read or a negative error.
  */
-AAUDIO_API aaudio_result_t AAudioStream_read(AAudioStream* stream,
-        void *buffer, int32_t numFrames, int64_t timeoutNanoseconds) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t AAudioStream_read(AAudioStream* _Nonnull stream,
+        void* _Nonnull buffer, int32_t numFrames, int64_t timeoutNanoseconds) __INTRODUCED_IN(26);
 
 /**
  * Write data to the stream.
@@ -1561,8 +1618,9 @@
  * @param timeoutNanoseconds Maximum number of nanoseconds to wait for completion.
  * @return The number of frames actually written or a negative error.
  */
-AAUDIO_API aaudio_result_t AAudioStream_write(AAudioStream* stream,
-        const void *buffer, int32_t numFrames, int64_t timeoutNanoseconds) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t AAudioStream_write(AAudioStream* _Nonnull stream,
+        const void* _Nonnull buffer, int32_t numFrames, int64_t timeoutNanoseconds)
+        __INTRODUCED_IN(26);
 
 // ============================================================
 // Stream - queries
@@ -1586,7 +1644,7 @@
  * @param numFrames requested number of frames that can be filled without blocking
  * @return actual buffer size in frames or a negative error
  */
-AAUDIO_API aaudio_result_t AAudioStream_setBufferSizeInFrames(AAudioStream* stream,
+AAUDIO_API aaudio_result_t AAudioStream_setBufferSizeInFrames(AAudioStream* _Nonnull stream,
         int32_t numFrames) __INTRODUCED_IN(26);
 
 /**
@@ -1597,7 +1655,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return buffer size in frames.
  */
-AAUDIO_API int32_t AAudioStream_getBufferSizeInFrames(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getBufferSizeInFrames(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Query the number of frames that the application should read or write at
@@ -1614,7 +1673,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return burst size
  */
-AAUDIO_API int32_t AAudioStream_getFramesPerBurst(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getFramesPerBurst(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Query maximum buffer capacity in frames.
@@ -1624,7 +1684,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return  buffer capacity in frames
  */
-AAUDIO_API int32_t AAudioStream_getBufferCapacityInFrames(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getBufferCapacityInFrames(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Query the size of the buffer that will be passed to the dataProc callback
@@ -1647,7 +1708,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return callback buffer size in frames or {@link #AAUDIO_UNSPECIFIED}
  */
-AAUDIO_API int32_t AAudioStream_getFramesPerDataCallback(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getFramesPerDataCallback(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * An XRun is an Underrun or an Overrun.
@@ -1666,15 +1728,31 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return the underrun or overrun count
  */
-AAUDIO_API int32_t AAudioStream_getXRunCount(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getXRunCount(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
 
 /**
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual sample rate
+ * @return actual sample rate of the stream
  */
-AAUDIO_API int32_t AAudioStream_getSampleRate(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getSampleRate(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
+
+/**
+ * There may be sample rate conversions in the Audio framework.
+ * The sample rate set in the stream builder may not be actual sample rate used in the hardware.
+ *
+ * This returns the sample rate used by the hardware in Hertz.
+ *
+ * If AAudioStreamBuilder_openStream() returned AAUDIO_OK, the result should always be valid.
+ *
+ * Available since API level 34.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual sample rate of the underlying hardware
+ */
+AAUDIO_API int32_t AAudioStream_getHardwareSampleRate(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(__ANDROID_API_U__);
 
 /**
  * A stream has one or more channels of data.
@@ -1683,9 +1761,26 @@
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual number of channels
+ * @return actual number of channels of the stream
  */
-AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
+
+/**
+ * There may be channel conversions in the Audio framework.
+ * The channel count or channel mask set in the stream builder may not be actual number of
+ * channels used in the hardware.
+ *
+ * This returns the channel count used by the hardware.
+ *
+ * If AAudioStreamBuilder_openStream() returned AAUDIO_OK, the result should always be valid.
+ *
+ * Available since API level 34.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual number of channels of the underlying hardware
+ */
+AAUDIO_API int32_t AAudioStream_getHardwareChannelCount(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(__ANDROID_API_U__);
 
 /**
  * Identical to AAudioStream_getChannelCount().
@@ -1695,7 +1790,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual number of samples frame
  */
-AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Available since API level 26.
@@ -1703,15 +1799,39 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual device ID
  */
-AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
 
 /**
  * Available since API level 26.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual data format
+ * @return actual data format of the stream
  */
-AAUDIO_API aaudio_format_t AAudioStream_getFormat(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_format_t AAudioStream_getFormat(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
+
+/**
+ * There may be data format conversions in the Audio framework.
+ * The data format set in the stream builder may not be actual format used in the hardware.
+ *
+ * This returns the audio format used by the hardware.
+ *
+ * If AAudioStreamBuilder_openStream() returned AAUDIO_OK, this should always return an
+ * aaudio_format_t.
+ *
+ * AUDIO_FORMAT_PCM_8_24_BIT is currently not supported in AAudio, but the hardware may use it.
+ * If AUDIO_FORMAT_PCM_8_24_BIT is used by the hardware, return AAUDIO_FORMAT_PCM_I24_PACKED.
+ *
+ * If any other format used by the hardware is not supported by AAudio, this will return
+ * AAUDIO_FORMAT_INVALID.
+ *
+ * Available since API level 34.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return actual data format of the underlying hardware.
+ */
+AAUDIO_API aaudio_format_t AAudioStream_getHardwareFormat(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(__ANDROID_API_U__);
 
 /**
  * Provide actual sharing mode.
@@ -1721,7 +1841,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return  actual sharing mode
  */
-AAUDIO_API aaudio_sharing_mode_t AAudioStream_getSharingMode(AAudioStream* stream)
+AAUDIO_API aaudio_sharing_mode_t AAudioStream_getSharingMode(AAudioStream* _Nonnull stream)
         __INTRODUCED_IN(26);
 
 /**
@@ -1731,7 +1851,7 @@
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  */
-AAUDIO_API aaudio_performance_mode_t AAudioStream_getPerformanceMode(AAudioStream* stream)
+AAUDIO_API aaudio_performance_mode_t AAudioStream_getPerformanceMode(AAudioStream* _Nonnull stream)
         __INTRODUCED_IN(26);
 
 /**
@@ -1740,7 +1860,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return direction
  */
-AAUDIO_API aaudio_direction_t AAudioStream_getDirection(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_direction_t AAudioStream_getDirection(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Passes back the number of frames that have been written since the stream was created.
@@ -1755,7 +1876,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return frames written
  */
-AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Passes back the number of frames that have been read since the stream was created.
@@ -1770,7 +1892,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return frames read
  */
-AAUDIO_API int64_t AAudioStream_getFramesRead(AAudioStream* stream) __INTRODUCED_IN(26);
+AAUDIO_API int64_t AAudioStream_getFramesRead(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
 
 /**
  * Passes back the session ID associated with this stream.
@@ -1795,7 +1917,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return session ID or {@link #AAUDIO_SESSION_ID_NONE}
  */
-AAUDIO_API aaudio_session_id_t AAudioStream_getSessionId(AAudioStream* stream) __INTRODUCED_IN(28);
+AAUDIO_API aaudio_session_id_t AAudioStream_getSessionId(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(28);
 
 /**
  * Passes back the time at which a particular frame was presented.
@@ -1821,8 +1944,9 @@
  * @param timeNanoseconds pointer to a variable to receive the time
  * @return {@link #AAUDIO_OK} or a negative error
  */
-AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* stream,
-        clockid_t clockid, int64_t *framePosition, int64_t *timeNanoseconds) __INTRODUCED_IN(26);
+AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* _Nonnull stream,
+        clockid_t clockid, int64_t* _Nonnull framePosition, int64_t* _Nonnull timeNanoseconds)
+        __INTRODUCED_IN(26);
 
 /**
  * Return the use case for the stream.
@@ -1832,7 +1956,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return frames read
  */
-AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* stream) __INTRODUCED_IN(28);
+AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* _Nonnull stream) __INTRODUCED_IN(28);
 
 /**
  * Return the content type for the stream.
@@ -1842,7 +1966,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return content type, for example {@link #AAUDIO_CONTENT_TYPE_MUSIC}
  */
-AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* stream)
+AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* _Nonnull stream)
         __INTRODUCED_IN(28);
 
 /**
@@ -1857,7 +1981,7 @@
  * @return spatialization behavior, for example {@link #AAUDIO_SPATIALIZATION_BEHAVIOR_AUTO}
  */
 AAUDIO_API aaudio_spatialization_behavior_t AAudioStream_getSpatializationBehavior(
-        AAudioStream* stream) __INTRODUCED_IN(32);
+        AAudioStream* _Nonnull stream) __INTRODUCED_IN(32);
 
 /**
  * Return whether the content of the stream is spatialized.
@@ -1867,7 +1991,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return true if the content is spatialized
  */
-AAUDIO_API bool AAudioStream_isContentSpatialized(AAudioStream* stream) __INTRODUCED_IN(32);
+AAUDIO_API bool AAudioStream_isContentSpatialized(AAudioStream* _Nonnull stream)
+        __INTRODUCED_IN(32);
 
 
 /**
@@ -1878,7 +2003,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return input preset, for example {@link #AAUDIO_INPUT_PRESET_CAMCORDER}
  */
-AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* stream)
+AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* _Nonnull stream)
         __INTRODUCED_IN(28);
 
 /**
@@ -1891,7 +2016,7 @@
  * @return the allowed capture policy, for example {@link #AAUDIO_ALLOW_CAPTURE_BY_ALL}
  */
 AAUDIO_API aaudio_allowed_capture_policy_t AAudioStream_getAllowedCapturePolicy(
-        AAudioStream* stream) __INTRODUCED_IN(29);
+        AAudioStream* _Nonnull stream) __INTRODUCED_IN(29);
 
 
 /**
@@ -1904,7 +2029,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return true if privacy sensitive, false otherwise
  */
-AAUDIO_API bool AAudioStream_isPrivacySensitive(AAudioStream* stream)
+AAUDIO_API bool AAudioStream_isPrivacySensitive(AAudioStream* _Nonnull stream)
         __INTRODUCED_IN(30);
 
 /**
@@ -1916,7 +2041,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual channel mask
  */
-AAUDIO_API aaudio_channel_mask_t AAudioStream_getChannelMask(AAudioStream* stream)
+AAUDIO_API aaudio_channel_mask_t AAudioStream_getChannelMask(AAudioStream* _Nonnull stream)
         __INTRODUCED_IN(32);
 
 #ifdef __cplusplus
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 0f2d7a2..01d97b6 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -87,7 +87,7 @@
  * @note This is only for testing. Do not use this in an application.
  * It may change or be removed at any time.
  *
- * @return true if the stream uses ther MMAP data path
+ * @return true if the stream uses the MMAP data path
  */
 AAUDIO_API bool AAudioStream_isMMapUsed(AAudioStream* stream);
 
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 363d219..30f451a 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -134,6 +134,10 @@
 cc_library {
     name: "libaaudio_internal",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
     local_include_dirs: [
         "binding",
         "client",
@@ -167,7 +171,6 @@
         "libbinder",
         "framework-permission-aidl-cpp",
         "aaudio-aidl-cpp",
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
     ],
@@ -209,6 +212,7 @@
         "flowgraph/ChannelCountConverter.cpp",
         "flowgraph/ClipToRange.cpp",
         "flowgraph/FlowGraphNode.cpp",
+        "flowgraph/Limiter.cpp",
         "flowgraph/ManyToMultiConverter.cpp",
         "flowgraph/MonoBlend.cpp",
         "flowgraph/MonoToMultiConverter.cpp",
@@ -260,7 +264,7 @@
         "binding/aidl/aaudio/IAAudioService.aidl",
     ],
     imports: [
-        "android.media.audio.common.types-V1",
+        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
         "shared-file-region-aidl",
         "framework-permission-aidl",
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
index 42d81ca..ee7480b 100644
--- a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
@@ -23,15 +23,16 @@
 using android::aidl_utils::statusTFromBinderStatus;
 using android::binder::Status;
 
-AAudioBinderAdapter::AAudioBinderAdapter(IAAudioService* delegate)
-        : mDelegate(delegate) {}
+AAudioBinderAdapter::AAudioBinderAdapter(IAAudioService* delegate,
+                                         int32_t serviceLifetimeId)
+        : mDelegate(delegate), mServiceLifetimeId(serviceLifetimeId) {}
 
 void AAudioBinderAdapter::registerClient(const android::sp<IAAudioClient>& client) {
     mDelegate->registerClient(client);
 }
 
-aaudio_handle_t AAudioBinderAdapter::openStream(const AAudioStreamRequest& request,
-                                                AAudioStreamConfiguration& config) {
+AAudioHandleInfo AAudioBinderAdapter::openStream(const AAudioStreamRequest& request,
+                                                 AAudioStreamConfiguration& config) {
     aaudio_handle_t result;
     StreamParameters params;
     Status status = mDelegate->openStream(request.parcelable(),
@@ -41,23 +42,29 @@
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     config = params;
-    return result;
+    return {mServiceLifetimeId, result};
 }
 
-aaudio_result_t AAudioBinderAdapter::closeStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::closeStream(const AAudioHandleInfo& streamHandleInfo) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
-    Status status = mDelegate->closeStream(streamHandle, &result);
+    Status status = mDelegate->closeStream(streamHandleInfo.getHandle(), &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::getStreamDescription(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
                                                           AudioEndpointParcelable& endpointOut) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
     Endpoint endpoint;
-    Status status = mDelegate->getStreamDescription(streamHandle,
+    Status status = mDelegate->getStreamDescription(streamHandleInfo.getHandle(),
                                                     &endpoint,
                                                     &result);
     if (!status.isOk()) {
@@ -67,68 +74,91 @@
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::startStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::startStream(const AAudioHandleInfo& streamHandleInfo) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
-    Status status = mDelegate->startStream(streamHandle, &result);
+    Status status = mDelegate->startStream(streamHandleInfo.getHandle(), &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::pauseStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::pauseStream(const AAudioHandleInfo& streamHandleInfo) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
-    Status status = mDelegate->pauseStream(streamHandle, &result);
+    Status status = mDelegate->pauseStream(streamHandleInfo.getHandle(), &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::stopStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::stopStream(const AAudioHandleInfo& streamHandleInfo) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
-    Status status = mDelegate->stopStream(streamHandle, &result);
+    Status status = mDelegate->stopStream(streamHandleInfo.getHandle(), &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::flushStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::flushStream(const AAudioHandleInfo& streamHandleInfo) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
-    Status status = mDelegate->flushStream(streamHandle, &result);
+    Status status = mDelegate->flushStream(streamHandleInfo.getHandle(), &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::registerAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                          pid_t clientThreadId,
                                                          int64_t periodNanoseconds) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
-    Status status = mDelegate->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds, &result);
+    Status status = mDelegate->registerAudioThread(
+            streamHandleInfo.getHandle(), clientThreadId, periodNanoseconds, &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::unregisterAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                            pid_t clientThreadId) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
-    Status status = mDelegate->unregisterAudioThread(streamHandle, clientThreadId, &result);
+    Status status = mDelegate->unregisterAudioThread(
+            streamHandleInfo.getHandle(), clientThreadId, &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
     return result;
 }
 
-aaudio_result_t AAudioBinderAdapter::exitStandby(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::exitStandby(const AAudioHandleInfo& streamHandleInfo,
                                                  AudioEndpointParcelable &endpointOut) {
+    if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     aaudio_result_t result;
     Endpoint endpoint;
-    Status status = mDelegate->exitStandby(streamHandle, &endpoint, &result);
+    Status status = mDelegate->exitStandby(streamHandleInfo.getHandle(), &endpoint, &result);
     if (!status.isOk()) {
         result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
     }
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.h b/media/libaaudio/src/binding/AAudioBinderAdapter.h
index d170783..301150f 100644
--- a/media/libaaudio/src/binding/AAudioBinderAdapter.h
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.h
@@ -30,38 +30,40 @@
  */
 class AAudioBinderAdapter : public AAudioServiceInterface {
 public:
-    explicit AAudioBinderAdapter(IAAudioService* delegate);
+    AAudioBinderAdapter(IAAudioService* delegate, int32_t serviceLifetimeId);
 
     void registerClient(const android::sp<IAAudioClient>& client) override;
 
-    aaudio_handle_t openStream(const AAudioStreamRequest& request,
-                               AAudioStreamConfiguration& configuration) override;
+    AAudioHandleInfo openStream(const AAudioStreamRequest& request,
+                                AAudioStreamConfiguration& configuration) override;
 
-    aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+    aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
                                          AudioEndpointParcelable& endpoint) override;
 
-    aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
+    aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                         pid_t clientThreadId,
                                         int64_t periodNanoseconds) override;
 
-    aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+    aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                           pid_t clientThreadId) override;
 
-    aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+    aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo,
                                 AudioEndpointParcelable &parcelable) override;
 
 private:
     IAAudioService* const mDelegate;
+    // A unique id to recognize the service that the adapter connected to.
+    const int32_t mServiceLifetimeId;
 };
 
 }  // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 8e5facc..5f34a75 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -90,7 +90,8 @@
                     ALOGE("%s() - linkToDeath() returned %d", __func__, status);
                 }
                 aaudioService = interface_cast<IAAudioService>(binder);
-                mAdapter = std::make_shared<Adapter>(aaudioService, mAAudioClient);
+                mAdapter = std::make_shared<Adapter>(
+                        aaudioService, mAAudioClient, mAAudioClient->getServiceLifetimeId());
                 needToRegister = true;
                 // Make sure callbacks can be received by mAAudioClient
                 ProcessState::self()->startThreadPool();
@@ -115,97 +116,101 @@
 /**
 * @param request info needed to create the stream
 * @param configuration contains information about the created stream
-* @return handle to the stream or a negative error
+* @return an object for aaudio handle information, which includes the connected
+*         aaudio service lifetime id to recognize the connected aaudio service
+*         and aaudio handle to recognize the stream. If an error occurs, the
+*         aaudio handle will be set as the negative error.
 */
-aaudio_handle_t AAudioBinderClient::openStream(const AAudioStreamRequest &request,
-                                               AAudioStreamConfiguration &configuration) {
-    aaudio_handle_t stream;
+AAudioHandleInfo AAudioBinderClient::openStream(const AAudioStreamRequest &request,
+                                                AAudioStreamConfiguration &configuration) {
     for (int i = 0; i < 2; i++) {
         std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
-        if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+        if (service.get() == nullptr) {
+            return {};
+        }
 
-        stream = service->openStream(request, configuration);
+        AAudioHandleInfo handleInfo = service->openStream(request, configuration);
 
-        if (stream == AAUDIO_ERROR_NO_SERVICE) {
+        if (handleInfo.getHandle() == AAUDIO_ERROR_NO_SERVICE) {
             ALOGE("openStream lost connection to AAudioService.");
             dropAAudioService(); // force a reconnect
         } else {
-            break;
+            return handleInfo;
         }
     }
-    return stream;
+    return {};
 }
 
-aaudio_result_t AAudioBinderClient::closeStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::closeStream(const AAudioHandleInfo& streamHandleInfo) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->closeStream(streamHandle);
+    return service->closeStream(streamHandleInfo);
 }
 
 /* Get an immutable description of the in-memory queues
 * used to communicate with the underlying HAL or Service.
 */
-aaudio_result_t AAudioBinderClient::getStreamDescription(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
                                                          AudioEndpointParcelable& endpointOut) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->getStreamDescription(streamHandle, endpointOut);
+    return service->getStreamDescription(streamHandleInfo, endpointOut);
 }
 
-aaudio_result_t AAudioBinderClient::startStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::startStream(const AAudioHandleInfo& streamHandleInfo) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->startStream(streamHandle);
+    return service->startStream(streamHandleInfo);
 }
 
-aaudio_result_t AAudioBinderClient::pauseStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::pauseStream(const AAudioHandleInfo& streamHandleInfo) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->pauseStream(streamHandle);
+    return service->pauseStream(streamHandleInfo);
 }
 
-aaudio_result_t AAudioBinderClient::stopStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::stopStream(const AAudioHandleInfo& streamHandleInfo) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->stopStream(streamHandle);
+    return service->stopStream(streamHandleInfo);
 }
 
-aaudio_result_t AAudioBinderClient::flushStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::flushStream(const AAudioHandleInfo& streamHandleInfo) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->flushStream(streamHandle);
+    return service->flushStream(streamHandleInfo);
 }
 
 /**
 * Manage the specified thread as a low latency audio thread.
 */
-aaudio_result_t AAudioBinderClient::registerAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                         pid_t clientThreadId,
                                                         int64_t periodNanoseconds) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+    return service->registerAudioThread(streamHandleInfo, clientThreadId, periodNanoseconds);
 }
 
-aaudio_result_t AAudioBinderClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                           pid_t clientThreadId) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->unregisterAudioThread(streamHandle, clientThreadId);
+    return service->unregisterAudioThread(streamHandleInfo, clientThreadId);
 }
 
-aaudio_result_t AAudioBinderClient::exitStandby(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::exitStandby(const AAudioHandleInfo& streamHandleInfo,
                                                 AudioEndpointParcelable &endpointOut) {
     std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
     if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
 
-    return service->exitStandby(streamHandle, endpointOut);
+    return service->exitStandby(streamHandleInfo, endpointOut);
 }
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.h b/media/libaaudio/src/binding/AAudioBinderClient.h
index 0968f4c..8faf6e8 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.h
+++ b/media/libaaudio/src/binding/AAudioBinderClient.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_AAUDIO_AAUDIO_BINDER_CLIENT_H
 #define ANDROID_AAUDIO_AAUDIO_BINDER_CLIENT_H
 
+#include <mutex>
+
 #include <utils/RefBase.h>
 #include <utils/Singleton.h>
 
@@ -52,63 +54,66 @@
     /**
      * @param request info needed to create the stream
      * @param configuration contains resulting information about the created stream
-     * @return handle to the stream or a negative error
+     * @return an object for aaudio handle information, which includes the connected
+     *         aaudio service lifetime id to recognize the connected aaudio service
+     *         and aaudio handle to recognize the stream. If an error occurs, the
+     *         aaudio handle will be set as the negative error.
      */
-    aaudio_handle_t openStream(const AAudioStreamRequest &request,
-                               AAudioStreamConfiguration &configurationOutput) override;
+    AAudioHandleInfo openStream(const AAudioStreamRequest &request,
+                                AAudioStreamConfiguration &configurationOutput) override;
 
-    aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) override;
 
     /* Get an immutable description of the in-memory queues
     * used to communicate with the underlying HAL or Service.
     */
-    aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+    aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
                                          AudioEndpointParcelable &endpointOut) override;
 
     /**
      * Start the flow of data.
      * This is asynchronous. When complete, the service will send a STARTED event.
      */
-    aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) override;
 
     /**
      * Stop the flow of data such that start() can resume without loss of data.
      * This is asynchronous. When complete, the service will send a PAUSED event.
      */
-    aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) override;
 
     /**
      *  Discard any data held by the underlying HAL or Service.
      * This is asynchronous. When complete, the service will send a FLUSHED event.
      */
-    aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) override;
 
     /**
      * Manage the specified thread as a low latency audio thread.
      * TODO Consider passing this information as part of the startStream() call.
      */
-    aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
-                                                pid_t clientThreadId,
-                                                int64_t periodNanoseconds) override;
+    aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
+                                        pid_t clientThreadId,
+                                        int64_t periodNanoseconds) override;
 
-    aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
-                                                  pid_t clientThreadId) override;
+    aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
+                                          pid_t clientThreadId) override;
 
-    aaudio_result_t startClient(aaudio_handle_t streamHandle __unused,
+    aaudio_result_t startClient(const AAudioHandleInfo& streamHandleInfo __unused,
                                 const android::AudioClient& client __unused,
                                 const audio_attributes_t *attr __unused,
                                 audio_port_handle_t *clientHandle __unused) override {
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t stopClient(aaudio_handle_t streamHandle __unused,
+    aaudio_result_t stopClient(const AAudioHandleInfo& streamHandleInfo __unused,
                                audio_port_handle_t clientHandle __unused)  override {
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+    aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo,
                                 AudioEndpointParcelable &endpointOut) override;
 
     void onStreamChange(aaudio_handle_t /*handle*/, int32_t /*opcode*/, int32_t /*value*/) {
@@ -117,6 +122,10 @@
         ALOGW("onStreamChange called!");
     }
 
+    int32_t getServiceLifetimeId() const {
+        return mAAudioClient->getServiceLifetimeId();
+    }
+
     class AAudioClient : public android::IBinder::DeathRecipient, public BnAAudioClient {
     public:
         explicit AAudioClient(const android::wp<AAudioBinderClient>& aaudioBinderClient)
@@ -125,6 +134,7 @@
 
         // implement DeathRecipient
         virtual void binderDied(const android::wp<android::IBinder>& who __unused) {
+            mServiceLifetimeId++;
             android::sp<AAudioBinderClient> client = mBinderClient.promote();
             if (client.get() != nullptr) {
                 client->dropAAudioService();
@@ -141,8 +151,13 @@
             }
             return android::binder::Status::ok();
         }
+
+        int32_t getServiceLifetimeId() const {
+            return mServiceLifetimeId.load();
+        }
     private:
         android::wp<AAudioBinderClient> mBinderClient;
+        std::atomic_int                 mServiceLifetimeId{0};
     };
 
     // This adapter is used to convert the binder interface (delegate) to the AudioServiceInterface
@@ -153,8 +168,9 @@
     class Adapter : public AAudioBinderAdapter {
     public:
         Adapter(const android::sp<IAAudioService>& delegate,
-                android::sp<AAudioClient> aaudioClient)
-                : AAudioBinderAdapter(delegate.get()),
+                android::sp<AAudioClient> aaudioClient,
+                int32_t serviceLifetimeId)
+                : AAudioBinderAdapter(delegate.get(), serviceLifetimeId),
                   mDelegate(delegate),
                   mAAudioClient(std::move(aaudioClient)) {}
 
@@ -165,7 +181,7 @@
         }
 
         // This should never be called (call is rejected at the AudioBinderClient level).
-        aaudio_result_t startClient(aaudio_handle_t streamHandle __unused,
+        aaudio_result_t startClient(const AAudioHandleInfo& streamHandle __unused,
                                     const android::AudioClient& client __unused,
                                     const audio_attributes_t* attr __unused,
                                     audio_port_handle_t* clientHandle __unused) override {
@@ -174,7 +190,7 @@
         }
 
         // This should never be called (call is rejected at the AudioBinderClient level).
-        aaudio_result_t stopClient(aaudio_handle_t streamHandle __unused,
+        aaudio_result_t stopClient(const AAudioHandleInfo& streamHandle __unused,
                                    audio_port_handle_t clientHandle __unused) override {
             LOG_ALWAYS_FATAL("Shouldn't get here");
             return AAUDIO_ERROR_UNAVAILABLE;
diff --git a/media/libaaudio/src/binding/AAudioServiceDefinitions.h b/media/libaaudio/src/binding/AAudioServiceDefinitions.h
index 8a2303c..7b8978f 100644
--- a/media/libaaudio/src/binding/AAudioServiceDefinitions.h
+++ b/media/libaaudio/src/binding/AAudioServiceDefinitions.h
@@ -85,6 +85,23 @@
     RingBufferDescriptor dataQueueDescriptor;    // playback or capture
 } EndpointDescriptor;
 
+static constexpr int32_t AAUDIO_SERVICE_LIFETIME_ID_INVALID = -1;
+
+class AAudioHandleInfo {
+public:
+    AAudioHandleInfo()
+            : AAudioHandleInfo(AAUDIO_SERVICE_LIFETIME_ID_INVALID, AAUDIO_HANDLE_INVALID) {}
+    AAudioHandleInfo(int32_t serviceLifetimeId, aaudio_handle_t handle)
+            : mServiceLifetimeId(serviceLifetimeId), mHandle(handle) {}
+
+    int32_t getServiceLifetimeId() const { return mServiceLifetimeId; }
+    aaudio_handle_t getHandle() const { return mHandle; }
+
+private:
+    int32_t mServiceLifetimeId;
+    aaudio_handle_t mHandle;
+};
+
 } // namespace aaudio
 
 #endif //BINDING_AAUDIOSERVICEDEFINITIONS_H
diff --git a/media/libaaudio/src/binding/AAudioServiceInterface.h b/media/libaaudio/src/binding/AAudioServiceInterface.h
index e901767..79f498b 100644
--- a/media/libaaudio/src/binding/AAudioServiceInterface.h
+++ b/media/libaaudio/src/binding/AAudioServiceInterface.h
@@ -45,55 +45,58 @@
     /**
      * @param request info needed to create the stream
      * @param configuration contains information about the created stream
-     * @return handle to the stream or a negative error
+     * @return an object for aaudio handle information, which includes the connected
+     *         aaudio service lifetime id to recognize the connected aaudio service
+     *         and aaudio handle to recognize the stream. If an error occurs, the
+     *         aaudio handle will be set as the negative error.
      */
-    virtual aaudio_handle_t openStream(const AAudioStreamRequest &request,
-                                       AAudioStreamConfiguration &configuration) = 0;
+    virtual AAudioHandleInfo openStream(const AAudioStreamRequest &request,
+                                        AAudioStreamConfiguration &configuration) = 0;
 
-    virtual aaudio_result_t closeStream(aaudio_handle_t streamHandle) = 0;
+    virtual aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) = 0;
 
     /* Get an immutable description of the in-memory queues
     * used to communicate with the underlying HAL or Service.
     */
-    virtual aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+    virtual aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
                                                  AudioEndpointParcelable &parcelable) = 0;
 
     /**
      * Start the flow of data.
      */
-    virtual aaudio_result_t startStream(aaudio_handle_t streamHandle) = 0;
+    virtual aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) = 0;
 
     /**
      * Stop the flow of data such that start() can resume without loss of data.
      */
-    virtual aaudio_result_t pauseStream(aaudio_handle_t streamHandle) = 0;
+    virtual aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) = 0;
 
     /**
-     * Stop the flow of data after data currently inthe buffer has played.
+     * Stop the flow of data after data currently in the buffer has played.
      */
-    virtual aaudio_result_t stopStream(aaudio_handle_t streamHandle) = 0;
+    virtual aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) = 0;
 
     /**
      *  Discard any data held by the underlying HAL or Service.
      */
-    virtual aaudio_result_t flushStream(aaudio_handle_t streamHandle) = 0;
+    virtual aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) = 0;
 
     /**
      * Manage the specified thread as a low latency audio thread.
      */
-    virtual aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
+    virtual aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                 pid_t clientThreadId,
                                                 int64_t periodNanoseconds) = 0;
 
-    virtual aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+    virtual aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                   pid_t clientThreadId) = 0;
 
-    virtual aaudio_result_t startClient(aaudio_handle_t streamHandle,
+    virtual aaudio_result_t startClient(const AAudioHandleInfo& streamHandleInfo,
                                         const android::AudioClient& client,
                                         const audio_attributes_t *attr,
                                         audio_port_handle_t *clientHandle) = 0;
 
-    virtual aaudio_result_t stopClient(aaudio_handle_t streamHandle,
+    virtual aaudio_result_t stopClient(const AAudioHandleInfo& streamHandleInfo,
                                        audio_port_handle_t clientHandle) = 0;
 
     /**
@@ -103,7 +106,7 @@
      * @param parcelable contains new data queue information
      * @return the result of the execution
      */
-    virtual aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+    virtual aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo,
                                         AudioEndpointParcelable &parcelable) = 0;
 };
 
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index b60bac2..c4692ce 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -40,6 +40,10 @@
     auto convFormat = android::aidl2legacy_AudioFormatDescription_audio_format_t(
             parcelable.audioFormat);
     setFormat(convFormat.ok() ? convFormat.value() : AUDIO_FORMAT_INVALID);
+    if (!convFormat.ok()) {
+        ALOGE("audioFormat (%s) aidl2legacy conversion failed",
+              parcelable.hardwareAudioFormat.toString().c_str());
+    }
     static_assert(sizeof(aaudio_direction_t) == sizeof(parcelable.direction));
     setDirection(parcelable.direction);
     static_assert(sizeof(audio_usage_t) == sizeof(parcelable.usage));
@@ -52,7 +56,6 @@
     setSpatializationBehavior(parcelable.spatializationBehavior);
     setIsContentSpatialized(parcelable.isContentSpatialized);
 
-
     static_assert(sizeof(aaudio_input_preset_t) == sizeof(parcelable.inputPreset));
     setInputPreset(parcelable.inputPreset);
     setBufferCapacity(parcelable.bufferCapacity);
@@ -62,6 +65,15 @@
     static_assert(sizeof(aaudio_session_id_t) == sizeof(parcelable.sessionId));
     setSessionId(parcelable.sessionId);
     setPrivacySensitive(parcelable.isPrivacySensitive);
+    setHardwareSamplesPerFrame(parcelable.hardwareSamplesPerFrame);
+    setHardwareSampleRate(parcelable.hardwareSampleRate);
+    auto convHardwareFormat = android::aidl2legacy_AudioFormatDescription_audio_format_t(
+            parcelable.hardwareAudioFormat);
+    setHardwareFormat(convHardwareFormat.ok() ? convHardwareFormat.value() : AUDIO_FORMAT_INVALID);
+    if (!convHardwareFormat.ok()) {
+        ALOGE("hardwareAudioFormat (%s) aidl2legacy conversion failed",
+              parcelable.hardwareAudioFormat.toString().c_str());
+    }
 }
 
 AAudioStreamConfiguration&
@@ -82,6 +94,8 @@
     if (convAudioFormat.ok()) {
         result.audioFormat = convAudioFormat.value();
     } else {
+        ALOGE("audioFormat (%s) legacy2aidl conversion failed",
+              audio_format_to_string(getFormat()));
         result.audioFormat = AudioFormatDescription{};
         result.audioFormat.type =
                 android::media::audio::common::AudioFormatType::SYS_RESERVED_INVALID;
@@ -92,6 +106,10 @@
     result.usage = getUsage();
     static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
     result.contentType = getContentType();
+    static_assert(
+            sizeof(aaudio_spatialization_behavior_t) == sizeof(result.spatializationBehavior));
+    result.spatializationBehavior = getSpatializationBehavior();
+    result.isContentSpatialized = isContentSpatialized();
     static_assert(sizeof(aaudio_input_preset_t) == sizeof(result.inputPreset));
     result.inputPreset = getInputPreset();
     result.bufferCapacity = getBufferCapacity();
@@ -100,5 +118,18 @@
     static_assert(sizeof(aaudio_session_id_t) == sizeof(result.sessionId));
     result.sessionId = getSessionId();
     result.isPrivacySensitive = isPrivacySensitive();
+    result.hardwareSamplesPerFrame = getHardwareSamplesPerFrame();
+    result.hardwareSampleRate = getHardwareSampleRate();
+    auto convHardwareAudioFormat = android::legacy2aidl_audio_format_t_AudioFormatDescription(
+            getHardwareFormat());
+    if (convHardwareAudioFormat.ok()) {
+        result.hardwareAudioFormat = convHardwareAudioFormat.value();
+    } else {
+        ALOGE("hardwareAudioFormat (%s) legacy2aidl conversion failed",
+              audio_format_to_string(getHardwareFormat()));
+        result.hardwareAudioFormat = AudioFormatDescription{};
+        result.hardwareAudioFormat.type =
+                android::media::audio::common::AudioFormatType::SYS_RESERVED_INVALID;
+    }
     return result;
 }
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index b1262df..d15d2fa 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <map>
 #include <stdint.h>
 
 #include <binder/Parcel.h>
@@ -37,8 +38,7 @@
         : mUpMessageQueueParcelable(parcelable.upMessageQueueParcelable),
           mDownMessageQueueParcelable(parcelable.downMessageQueueParcelable),
           mUpDataQueueParcelable(parcelable.upDataQueueParcelable),
-          mDownDataQueueParcelable(parcelable.downDataQueueParcelable),
-          mNumSharedMemories(parcelable.sharedMemories.size()) {
+          mDownDataQueueParcelable(parcelable.downDataQueueParcelable) {
     for (size_t i = 0; i < parcelable.sharedMemories.size() && i < MAX_SHARED_MEMORIES; ++i) {
         // Re-construct.
         mSharedMemories[i].~SharedMemoryParcelable();
@@ -52,15 +52,48 @@
     return *this;
 }
 
+namespace {
+
+void updateSharedMemoryIndex(SharedRegion* sharedRegion, int oldIndex, int newIndex) {
+    if (sharedRegion->sharedMemoryIndex == oldIndex) {
+        sharedRegion->sharedMemoryIndex = newIndex;
+    }
+}
+
+void updateSharedMemoryIndex(RingBuffer* ringBuffer, int oldIndex, int newIndex) {
+    updateSharedMemoryIndex(&ringBuffer->readCounterParcelable, oldIndex, newIndex);
+    updateSharedMemoryIndex(&ringBuffer->writeCounterParcelable, oldIndex, newIndex);
+    updateSharedMemoryIndex(&ringBuffer->dataParcelable, oldIndex, newIndex);
+}
+
+void updateSharedMemoryIndex(Endpoint* endpoint, int oldIndex, int newIndex) {
+    updateSharedMemoryIndex(&endpoint->upMessageQueueParcelable, oldIndex, newIndex);
+    updateSharedMemoryIndex(&endpoint->downMessageQueueParcelable, oldIndex, newIndex);
+    updateSharedMemoryIndex(&endpoint->upDataQueueParcelable, oldIndex, newIndex);
+    updateSharedMemoryIndex(&endpoint->downDataQueueParcelable, oldIndex, newIndex);
+}
+
+} // namespace
+
 Endpoint AudioEndpointParcelable::parcelable()&& {
     Endpoint result;
     result.upMessageQueueParcelable = mUpMessageQueueParcelable.parcelable();
     result.downMessageQueueParcelable = mDownMessageQueueParcelable.parcelable();
     result.upDataQueueParcelable = mUpDataQueueParcelable.parcelable();
     result.downDataQueueParcelable = mDownDataQueueParcelable.parcelable();
-    result.sharedMemories.reserve(std::min(mNumSharedMemories, MAX_SHARED_MEMORIES));
-    for (size_t i = 0; i < mNumSharedMemories && i < MAX_SHARED_MEMORIES; ++i) {
-        result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
+    // To transfer through binder, only valid/in-use shared memory is allowed. By design, the
+    // shared memories that are currently in-use may not be placed continuously from position 0.
+    // However, when marshalling the shared memories into Endpoint, the shared memories will be
+    // re-indexed from 0. In that case, when placing a shared memory, it is needed to update the
+    // corresponding cached indexes.
+    for (int i = 0; i < MAX_SHARED_MEMORIES; ++i) {
+        if (mSharedMemories[i].isInUse()) {
+            const int index = result.sharedMemories.size();
+            result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
+            // Updating all the SharedRegion that is using `i` as shared memory index with the
+            // new shared memory index as `result.sharedMemories.size() - 1`.
+            updateSharedMemoryIndex(&result, i, index);
+        }
     }
     return result;
 }
@@ -71,28 +104,50 @@
  */
 int32_t AudioEndpointParcelable::addFileDescriptor(const unique_fd& fd,
                                                    int32_t sizeInBytes) {
-    if (mNumSharedMemories >= MAX_SHARED_MEMORIES) {
+    const int32_t index = getNextAvailableSharedMemoryPosition();
+    if (index < 0) {
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
-    int32_t index = mNumSharedMemories++;
     mSharedMemories[index].setup(fd, sizeInBytes);
     return index;
 }
 
 void AudioEndpointParcelable::closeDataFileDescriptor() {
-    const int32_t curDataMemoryIndex = mDownDataQueueParcelable.getSharedMemoryIndex();
-    mSharedMemories[curDataMemoryIndex].closeAndReleaseFd();
+    for (const int32_t memoryIndex : std::set{mDownDataQueueParcelable.getDataSharedMemoryIndex(),
+                                mDownDataQueueParcelable.getReadCounterSharedMemoryIndex(),
+                                mDownDataQueueParcelable.getWriteCounterSharedMemoryIndex()}) {
+        mSharedMemories[memoryIndex].closeAndReleaseFd();
+    }
 }
 
-void AudioEndpointParcelable::updateDataFileDescriptor(
+aaudio_result_t AudioEndpointParcelable::updateDataFileDescriptor(
         AudioEndpointParcelable* endpointParcelable) {
-    const int32_t curDataMemoryIndex = mDownDataQueueParcelable.getSharedMemoryIndex();
-    const int32_t newDataMemoryIndex =
-            endpointParcelable->mDownDataQueueParcelable.getSharedMemoryIndex();
-    mSharedMemories[curDataMemoryIndex].close();
-    mSharedMemories[curDataMemoryIndex].setup(
-            endpointParcelable->mSharedMemories[newDataMemoryIndex]);
-    mDownDataQueueParcelable.updateMemory(endpointParcelable->mDownDataQueueParcelable);
+    // Before updating data file descriptor, close the old shared memories.
+    closeDataFileDescriptor();
+    // The given endpoint parcelable and this one are two different objects, the indexes in
+    // `mSharedMemories` for `mDownDataQueueParcelable` can be different. In that case, an index
+    // map, which maps from the index in given endpoint parcelable to the index in this endpoint
+    // parcelable, is required when updating shared memory.
+    std::map<int32_t, int32_t> memoryIndexMap;
+    auto& downDataQueueParcelable = endpointParcelable->mDownDataQueueParcelable;
+    for (const int32_t memoryIndex : {downDataQueueParcelable.getDataSharedMemoryIndex(),
+                                      downDataQueueParcelable.getReadCounterSharedMemoryIndex(),
+                                      downDataQueueParcelable.getWriteCounterSharedMemoryIndex()}) {
+        if (memoryIndexMap.find(memoryIndex) != memoryIndexMap.end()) {
+            // This shared memory has been set up in this endpoint parcelable.
+            continue;
+        }
+        // Set up the memory in the next available shared memory position.
+        const int index = getNextAvailableSharedMemoryPosition();
+        if (index < 0) {
+            return AAUDIO_ERROR_OUT_OF_RANGE;
+        }
+        mSharedMemories[index].setup(endpointParcelable->mSharedMemories[memoryIndex]);
+        memoryIndexMap.emplace(memoryIndex, index);
+    }
+    mDownDataQueueParcelable.updateMemory(
+            endpointParcelable->mDownDataQueueParcelable, memoryIndexMap);
+    return AAUDIO_OK;
 }
 
 aaudio_result_t AudioEndpointParcelable::resolve(EndpointDescriptor *descriptor) {
@@ -114,26 +169,29 @@
 
 aaudio_result_t AudioEndpointParcelable::close() {
     int err = 0;
-    for (int i = 0; i < mNumSharedMemories; i++) {
-        int lastErr = mSharedMemories[i].close();
+    for (auto& sharedMemory : mSharedMemories) {
+        const int lastErr = sharedMemory.close();
         if (lastErr < 0) err = lastErr;
     }
     return AAudioConvert_androidToAAudioResult(err);
 }
 
-aaudio_result_t AudioEndpointParcelable::validate() const {
-    if (mNumSharedMemories < 0 || mNumSharedMemories >= MAX_SHARED_MEMORIES) {
-        ALOGE("invalid mNumSharedMemories = %d", mNumSharedMemories);
-        return AAUDIO_ERROR_INTERNAL;
+int32_t AudioEndpointParcelable::getNextAvailableSharedMemoryPosition() const {
+    for (int i = 0; i < MAX_SHARED_MEMORIES; ++i) {
+        if (!mSharedMemories[i].isInUse()) {
+            return i;
+        }
     }
-    return AAUDIO_OK;
+    return -1;
 }
 
 void AudioEndpointParcelable::dump() {
     ALOGD("======================================= BEGIN");
-    ALOGD("mNumSharedMemories = %d", mNumSharedMemories);
-    for (int i = 0; i < mNumSharedMemories; i++) {
-        mSharedMemories[i].dump();
+    for (int i = 0; i < MAX_SHARED_MEMORIES; ++i) {
+        if (mSharedMemories[i].isInUse()) {
+            ALOGD("Shared memory index=%d", i);
+            mSharedMemories[i].dump();
+        }
     }
     ALOGD("mUpMessageQueueParcelable =========");
     mUpMessageQueueParcelable.dump();
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index 5d2c38f..722dd14 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -61,8 +61,10 @@
      * Update current data file descriptor with given endpoint parcelable.
      * @param endpointParcelable an endpoint parcelable that contains new data file
      *                           descriptor information
+     * @return AAUDIO_OK if the data file descriptor updates successfully.
+     *         AAUDIO_ERROR_OUT_OF_RANGE if there is not enough space for the shared memory.
      */
-    void updateDataFileDescriptor(AudioEndpointParcelable* endpointParcelable);
+    aaudio_result_t updateDataFileDescriptor(AudioEndpointParcelable* endpointParcelable);
 
     aaudio_result_t resolve(EndpointDescriptor *descriptor);
     aaudio_result_t resolveDataQueue(RingBufferDescriptor *descriptor);
@@ -84,9 +86,10 @@
     RingBufferParcelable    mDownDataQueueParcelable;    // eg. playback
 
 private:
-    aaudio_result_t         validate() const;
+    // Return the first available shared memory position. Return -1 if all shared memories are
+    // in use.
+    int32_t getNextAvailableSharedMemoryPosition() const;
 
-    int32_t                 mNumSharedMemories = 0;
     SharedMemoryParcelable  mSharedMemories[MAX_SHARED_MEMORIES];
 };
 
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.cpp b/media/libaaudio/src/binding/RingBufferParcelable.cpp
index 3bc51d0..f8d748e 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.cpp
+++ b/media/libaaudio/src/binding/RingBufferParcelable.cpp
@@ -33,7 +33,6 @@
         : mReadCounterParcelable(parcelable.readCounterParcelable),
           mWriteCounterParcelable(parcelable.writeCounterParcelable),
           mDataParcelable(parcelable.dataParcelable),
-          mSharedMemoryIndex(parcelable.sharedMemoryIndex),
           mBytesPerFrame(parcelable.bytesPerFrame),
           mFramesPerBurst(parcelable.framesPerBurst),
           mCapacityInFrames(parcelable.capacityInFrames),
@@ -46,7 +45,6 @@
     result.readCounterParcelable = mReadCounterParcelable.parcelable();
     result.writeCounterParcelable = mWriteCounterParcelable.parcelable();
     result.dataParcelable = mDataParcelable.parcelable();
-    result.sharedMemoryIndex = mSharedMemoryIndex;
     result.bytesPerFrame = mBytesPerFrame;
     result.framesPerBurst = mFramesPerBurst;
     result.capacityInFrames = mCapacityInFrames;
@@ -62,19 +60,26 @@
                  int32_t readCounterOffset,
                  int32_t writeCounterOffset,
                  int32_t counterSizeBytes) {
-    mSharedMemoryIndex = sharedMemoryIndex;
-    mReadCounterParcelable.setup(sharedMemoryIndex, readCounterOffset, counterSizeBytes);
-    mWriteCounterParcelable.setup(sharedMemoryIndex, writeCounterOffset, counterSizeBytes);
-    mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
+    mReadCounterParcelable.setup({sharedMemoryIndex, readCounterOffset, counterSizeBytes});
+    mWriteCounterParcelable.setup({sharedMemoryIndex, writeCounterOffset, counterSizeBytes});
+    mDataParcelable.setup({sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes});
 }
 
 void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
                  int32_t dataMemoryOffset,
                  int32_t dataSizeInBytes) {
-    mSharedMemoryIndex = sharedMemoryIndex;
-    mReadCounterParcelable.setup(sharedMemoryIndex, 0, 0);
-    mWriteCounterParcelable.setup(sharedMemoryIndex, 0, 0);
-    mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
+    mReadCounterParcelable.setup({sharedMemoryIndex, 0, 0});
+    mWriteCounterParcelable.setup({sharedMemoryIndex, 0, 0});
+    mDataParcelable.setup({sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes});
+}
+
+void RingBufferParcelable::setupMemory(
+        const SharedRegionParcelable::MemoryInfoTuple& dataMemoryInfo,
+        const SharedRegionParcelable::MemoryInfoTuple& readCounterInfo,
+        const SharedRegionParcelable::MemoryInfoTuple& writeCounterInfo) {
+    mReadCounterParcelable.setup(readCounterInfo);
+    mWriteCounterParcelable.setup(writeCounterInfo);
+    mDataParcelable.setup(dataMemoryInfo);
 }
 
 int32_t RingBufferParcelable::getBytesPerFrame() const {
@@ -128,9 +133,11 @@
     return AAUDIO_OK;
 }
 
-void RingBufferParcelable::updateMemory(const RingBufferParcelable& parcelable) {
-    setupMemory(mSharedMemoryIndex, 0,
-                parcelable.getCapacityInFrames() * parcelable.getBytesPerFrame());
+void RingBufferParcelable::updateMemory(const RingBufferParcelable& parcelable,
+                                        const std::map<int32_t, int32_t>& memoryIndexMap) {
+    setupMemory(parcelable.mDataParcelable.getMemoryInfo(&memoryIndexMap),
+                parcelable.mReadCounterParcelable.getMemoryInfo(&memoryIndexMap),
+                parcelable.mWriteCounterParcelable.getMemoryInfo(&memoryIndexMap));
     setBytesPerFrame(parcelable.getBytesPerFrame());
     setFramesPerBurst(parcelable.getFramesPerBurst());
     setCapacityInFrames(parcelable.getCapacityInFrames());
@@ -152,7 +159,6 @@
     return AAUDIO_OK;
 }
 
-
 void RingBufferParcelable::dump() {
     ALOGD("mCapacityInFrames = %d ---------", mCapacityInFrames);
     if (mCapacityInFrames > 0) {
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.h b/media/libaaudio/src/binding/RingBufferParcelable.h
index 29d0d86..4363191 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.h
+++ b/media/libaaudio/src/binding/RingBufferParcelable.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_AAUDIO_RINGBUFFER_PARCELABLE_H
 #define ANDROID_AAUDIO_RINGBUFFER_PARCELABLE_H
 
+#include <map>
 #include <stdint.h>
 
 #include <aaudio/RingBuffer.h>
@@ -46,6 +47,22 @@
                      int32_t dataMemoryOffset,
                      int32_t dataSizeInBytes);
 
+    /**
+     * Set up memory for the RingBufferParcelable.
+     *
+     * This function will take three MemoryInfoTuple as parameters to set up memory. The
+     * MemoryInfoTuple contains the shared memory index, offset in the shared memory and size
+     * of the object. This will allow setting up the read counter, write counter and data memory
+     * that are located in different shared memory blocks.
+     *
+     * @param dataMemoryInfo
+     * @param readCounterInfo
+     * @param writeCounterInfo
+     */
+    void setupMemory(const SharedRegionParcelable::MemoryInfoTuple& dataMemoryInfo,
+                     const SharedRegionParcelable::MemoryInfoTuple& readCounterInfo,
+                     const SharedRegionParcelable::MemoryInfoTuple& writeCounterInfo);
+
     int32_t getBytesPerFrame() const;
 
     void setBytesPerFrame(int32_t bytesPerFrame);
@@ -62,10 +79,24 @@
 
     aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor);
 
-    void updateMemory(const RingBufferParcelable& parcelable);
+    /**
+     * Update this ring buffer with the given ring buffer.
+     *
+     * @param parcelable the ring buffer to be used to update this ring buffer.
+     * @param memoryIndexMap a map from the shared memory indexes used by the given ring buffer
+     *                       to the shared memory indexes used by this ring buffer.
+     */
+    void updateMemory(const RingBufferParcelable& parcelable,
+                      const std::map<int32_t, int32_t>& memoryIndexMap);
 
-    int32_t getSharedMemoryIndex() const {
-        return mSharedMemoryIndex;
+    int32_t getReadCounterSharedMemoryIndex() const {
+        return mReadCounterParcelable.getSharedMemoryIndex();
+    }
+    int32_t getWriteCounterSharedMemoryIndex() const {
+        return mWriteCounterParcelable.getSharedMemoryIndex();
+    }
+    int32_t getDataSharedMemoryIndex() const {
+        return mDataParcelable.getSharedMemoryIndex();
     }
 
     void dump();
@@ -77,7 +108,6 @@
     SharedRegionParcelable  mReadCounterParcelable;
     SharedRegionParcelable  mWriteCounterParcelable;
     SharedRegionParcelable  mDataParcelable;
-    int32_t                 mSharedMemoryIndex = -1;
     int32_t                 mBytesPerFrame = 0;     // index is in frames
     int32_t                 mFramesPerBurst = 0;    // for ISOCHRONOUS queues
     int32_t                 mCapacityInFrames = 0;  // zero if unused
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 741aefc..c360a1f 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -146,7 +146,7 @@
     return AAUDIO_OK;
 }
 
-void SharedMemoryParcelable::dump() {
+void SharedMemoryParcelable::dump() const {
     ALOGD("mFd = %d", mFd.get());
     ALOGD("mSizeInBytes = %" PRId64, mSizeInBytes);
 }
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 7762fef..909f3a6 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -64,7 +64,9 @@
 
     int32_t getSizeInBytes();
 
-    void dump();
+    bool isInUse() const { return mFd.get() != -1; }
+
+    void dump() const;
 
     // Extract a parcelable representation of this object.
     // Since we own a unique FD, move semantics are provided to avoid the need to dupe.
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.cpp b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
index 6fa109b..fd69ef1 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
@@ -46,12 +46,17 @@
     return result;
 }
 
-void SharedRegionParcelable::setup(int32_t sharedMemoryIndex,
-                                   int32_t offsetInBytes,
-                                   int32_t sizeInBytes) {
-    mSharedMemoryIndex = sharedMemoryIndex;
-    mOffsetInBytes = offsetInBytes;
-    mSizeInBytes = sizeInBytes;
+void SharedRegionParcelable::setup(MemoryInfoTuple memoryInfoTuple) {
+    mSharedMemoryIndex = std::get<MEMORY_INDEX>(memoryInfoTuple);
+    mOffsetInBytes = std::get<OFFSET>(memoryInfoTuple);
+    mSizeInBytes = std::get<SIZE>(memoryInfoTuple);
+}
+
+SharedRegionParcelable::MemoryInfoTuple SharedRegionParcelable::getMemoryInfo(
+        const std::map<int32_t, int32_t>* memoryIndexMap) const {
+    return {memoryIndexMap == nullptr ? mSharedMemoryIndex : memoryIndexMap->at(mSharedMemoryIndex),
+            mOffsetInBytes,
+            mSizeInBytes};
 }
 
 aaudio_result_t SharedRegionParcelable::resolve(SharedMemoryParcelable *memoryParcels,
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.h b/media/libaaudio/src/binding/SharedRegionParcelable.h
index c15fc30..74ea75d 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.h
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.h
@@ -37,12 +37,36 @@
     // Construct based on a parcelable representation.
     explicit SharedRegionParcelable(const SharedRegion& parcelable);
 
-    void setup(int32_t sharedMemoryIndex, int32_t offsetInBytes, int32_t sizeInBytes);
+    // A tuple that contains information for setting up shared memory.
+    // The information in the tuple is <shared memory index, offset, size in byte>.
+    using MemoryInfoTuple = std::tuple<int, int, int>;
+    // Enums to use as index to query from MemoryInfoTuple
+    enum {
+        MEMORY_INDEX = 0,
+        OFFSET = 1,
+        SIZE = 2,
+    };
+    void setup(MemoryInfoTuple memoryInfoTuple);
 
     aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, void **regionAddressPtr);
 
     bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
 
+    int32_t getSharedMemoryIndex() const { return mSharedMemoryIndex; }
+
+    /**
+     * Get the memory information of this SharedRegionParcelable.
+     *
+     * If the `memoryIndexMap` is not null, it indicates the caller has a different indexing for
+     * the shared memory. In that case, the `memoryIndexMap` must contains information from the
+     * shared memory indexes used by this object to the caller's shared memory indexes.
+     *
+     * @param memoryIndexMap a pointer to a map of memory index, which map the current shared
+     *                       memory index to a new shared memory index.
+     * @return
+     */
+    MemoryInfoTuple getMemoryInfo(const std::map<int32_t, int32_t>* memoryIndexMap) const;
+
     void dump();
 
     // Extract a parcelable representation of this object.
diff --git a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
index dd64493..998fc66 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
@@ -26,5 +26,4 @@
     int                 framesPerBurst;    // for ISOCHRONOUS queues
     int                 capacityInFrames;  // zero if unused
     int /* RingbufferFlags */ flags;  // = RingbufferFlags::NONE;
-    int                 sharedMemoryIndex;
-}
\ No newline at end of file
+}
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index 983e193..fa46e0d 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -34,4 +34,7 @@
     int /* aaudio_allowed_capture_policy_t */ allowedCapturePolicy;  // = AAUDIO_UNSPECIFIED;
     int /* aaudio_session_id_t */             sessionId;  //            = AAUDIO_SESSION_ID_NONE;
     boolean                                   isPrivacySensitive;  //   = false;
+    int                                       hardwareSamplesPerFrame;//= AAUDIO_UNSPECIFIED;
+    int                                       hardwareSampleRate;  //   = AAUDIO_UNSPECIFIED;
+    AudioFormatDescription                    hardwareAudioFormat;  //  = AUDIO_FORMAT_DEFAULT;
 }
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 2ed3e3c..5444565 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -20,7 +20,7 @@
 
 #include "AAudioFlowGraph.h"
 
-#include <flowgraph/ClipToRange.h>
+#include <flowgraph/Limiter.h>
 #include <flowgraph/ManyToMultiConverter.h>
 #include <flowgraph/MonoBlend.h>
 #include <flowgraph/MonoToMultiConverter.h>
@@ -78,11 +78,11 @@
     }
 
     // For a pure float graph, there is chance that the data range may be very large.
-    // So we should clip to a reasonable value that allows a little headroom.
+    // So we should limit to a reasonable value that allows a little headroom.
     if (sourceFormat == AUDIO_FORMAT_PCM_FLOAT && sinkFormat == AUDIO_FORMAT_PCM_FLOAT) {
-        mClipper = std::make_unique<ClipToRange>(sourceChannelCount);
-        lastOutput->connect(&mClipper->input);
-        lastOutput = &mClipper->output;
+        mLimiter = std::make_unique<Limiter>(sourceChannelCount);
+        lastOutput->connect(&mLimiter->input);
+        lastOutput = &mLimiter->output;
     }
 
     // Expand the number of channels if required.
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index 602c17f..35fef37 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -24,7 +24,7 @@
 
 #include <aaudio/AAudio.h>
 #include <audio_utils/Balance.h>
-#include <flowgraph/ClipToRange.h>
+#include <flowgraph/Limiter.h>
 #include <flowgraph/ManyToMultiConverter.h>
 #include <flowgraph/MonoBlend.h>
 #include <flowgraph/MonoToMultiConverter.h>
@@ -74,7 +74,7 @@
 private:
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSourceBuffered> mSource;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoBlend> mMonoBlend;
-    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ClipToRange> mClipper;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::Limiter> mLimiter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoToMultiConverter> mChannelConverter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ManyToMultiConverter>
             mManyToMultiConverter;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 9f0564f..84c715f 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -33,6 +33,7 @@
 #include <utils/Trace.h>
 
 #include "AudioEndpointParcelable.h"
+#include "binding/AAudioBinderClient.h"
 #include "binding/AAudioStreamRequest.h"
 #include "binding/AAudioStreamConfiguration.h"
 #include "binding/AAudioServiceMessage.h"
@@ -65,13 +66,13 @@
 AudioStreamInternal::AudioStreamInternal(AAudioServiceInterface  &serviceInterface, bool inService)
         : AudioStream()
         , mClockModel()
-        , mServiceStreamHandle(AAUDIO_HANDLE_INVALID)
         , mInService(inService)
         , mServiceInterface(serviceInterface)
         , mAtomicInternalTimestamp()
         , mWakeupDelayNanos(AAudioProperty_getWakeupDelayMicros() * AAUDIO_NANOS_PER_MICROSECOND)
         , mMinimumSleepNanos(AAudioProperty_getMinimumSleepMicros() * AAUDIO_NANOS_PER_MICROSECOND)
         {
+
 }
 
 AudioStreamInternal::~AudioStreamInternal() {
@@ -131,10 +132,14 @@
 
     request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
+    request.getConfiguration().setHardwareSamplesPerFrame(builder.getHardwareSamplesPerFrame());
+    request.getConfiguration().setHardwareSampleRate(builder.getHardwareSampleRate());
+    request.getConfiguration().setHardwareFormat(builder.getHardwareFormat());
+
     mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
 
-    mServiceStreamHandle = mServiceInterface.openStream(request, configurationOutput);
-    if (mServiceStreamHandle < 0
+    mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
+    if (getServiceHandle() < 0
             && (request.getConfiguration().getSamplesPerFrame() == 1
                     || request.getConfiguration().getChannelMask() == AAUDIO_CHANNEL_MONO)
             && getDirection() == AAUDIO_DIRECTION_OUTPUT
@@ -143,12 +148,12 @@
         // Only do this in the client. Otherwise we end up with a mono mixer in the service
         // that writes to a stereo MMAP stream.
         ALOGD("%s() - openStream() returned %d, try switching from MONO to STEREO",
-              __func__, mServiceStreamHandle);
+              __func__, getServiceHandle());
         request.getConfiguration().setChannelMask(AAUDIO_CHANNEL_STEREO);
-        mServiceStreamHandle = mServiceInterface.openStream(request, configurationOutput);
+        mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
     }
-    if (mServiceStreamHandle < 0) {
-        return mServiceStreamHandle;
+    if (getServiceHandle() < 0) {
+        return getServiceHandle();
     }
 
     // This must match the key generated in oboeservice/AAudioServiceStreamBase.cpp
@@ -156,7 +161,7 @@
     if (!mInService) {
         // No need to log if it is from service side.
         mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM)
-                     + std::to_string(mServiceStreamHandle);
+                     + std::to_string(getServiceHandle());
     }
 
     android::mediametrics::LogItem(mMetricsId)
@@ -192,7 +197,11 @@
     // Save device format so we can do format conversion and volume scaling together.
     setDeviceFormat(configurationOutput.getFormat());
 
-    result = mServiceInterface.getStreamDescription(mServiceStreamHandle, mEndPointParcelable);
+    setHardwareSamplesPerFrame(configurationOutput.getHardwareSamplesPerFrame());
+    setHardwareSampleRate(configurationOutput.getHardwareSampleRate());
+    setHardwareFormat(configurationOutput.getHardwareFormat());
+
+    result = mServiceInterface.getStreamDescription(mServiceStreamHandleInfo, mEndPointParcelable);
     if (result != AAUDIO_OK) {
         goto error;
     }
@@ -313,23 +322,22 @@
 // This must be called under mStreamLock.
 aaudio_result_t AudioStreamInternal::release_l() {
     aaudio_result_t result = AAUDIO_OK;
-    ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
-    if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
-        aaudio_stream_state_t currentState = getState();
+    ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, getServiceHandle());
+    if (getServiceHandle() != AAUDIO_HANDLE_INVALID) {
         // Don't release a stream while it is running. Stop it first.
         // If DISCONNECTED then we should still try to stop in case the
         // error callback is still running.
-        if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        if (isActive() || isDisconnected()) {
             requestStop_l();
         }
 
         logReleaseBufferState();
 
         setState(AAUDIO_STREAM_STATE_CLOSING);
-        aaudio_handle_t serviceStreamHandle = mServiceStreamHandle;
-        mServiceStreamHandle = AAUDIO_HANDLE_INVALID;
+        auto serviceStreamHandleInfo = mServiceStreamHandleInfo;
+        mServiceStreamHandleInfo = AAudioHandleInfo();
 
-        mServiceInterface.closeStream(serviceStreamHandle);
+        mServiceInterface.closeStream(serviceStreamHandleInfo);
         mCallbackBuffer.reset();
 
         // Update local frame counters so we can query them after releasing the endpoint.
@@ -371,13 +379,17 @@
             mAudioEndpoint->read(buffer, getBufferCapacity());
     mEndPointParcelable.closeDataFileDescriptor();
     aaudio_result_t result = mServiceInterface.exitStandby(
-            mServiceStreamHandle, endpointParcelable);
+            mServiceStreamHandleInfo, endpointParcelable);
     if (result != AAUDIO_OK) {
         ALOGE("Failed to exit standby, error=%d", result);
         goto exit;
     }
     // Reconstruct data queue descriptor using new shared file descriptor.
-    mEndPointParcelable.updateDataFileDescriptor(&endpointParcelable);
+    result = mEndPointParcelable.updateDataFileDescriptor(&endpointParcelable);
+    if (result != AAUDIO_OK) {
+        ALOGE("%s failed to update data file descriptor, error=%d", __func__, result);
+        goto exit;
+    }
     result = mEndPointParcelable.resolveDataQueue(&mEndpointDescriptor.dataQueueDescriptor);
     if (result != AAUDIO_OK) {
         ALOGE("Failed to resolve data queue after exiting standby, error=%d", result);
@@ -423,7 +435,7 @@
 aaudio_result_t AudioStreamInternal::requestStart_l()
 {
     int64_t startTime;
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         ALOGD("requestStart() mServiceStreamHandle invalid");
         return AAUDIO_ERROR_INVALID_STATE;
     }
@@ -432,11 +444,11 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
-    aaudio_stream_state_t originalState = getState();
-    if (originalState == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         ALOGD("requestStart() but DISCONNECTED");
         return AAUDIO_ERROR_DISCONNECTED;
     }
+    aaudio_stream_state_t originalState = getState();
     setState(AAUDIO_STREAM_STATE_STARTING);
 
     // Clear any stale timestamps from the previous run.
@@ -444,19 +456,19 @@
 
     prepareBuffersForStart(); // tell subclasses to get ready
 
-    aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandle);
+    aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandleInfo);
     if (result == AAUDIO_ERROR_STANDBY) {
         // The stream is at standby mode. Need to exit standby before starting the stream.
         result = exitStandby_l();
         if (result == AAUDIO_OK) {
-            result = mServiceInterface.startStream(mServiceStreamHandle);
+            result = mServiceInterface.startStream(mServiceStreamHandleInfo);
         }
     }
     if (result != AAUDIO_OK) {
         ALOGD("%s() error = %d, stream was probably stolen", __func__, result);
         // Stealing was added in R. Coerce result to improve backward compatibility.
         result = AAUDIO_ERROR_DISCONNECTED;
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected();
     }
 
     startTime = AudioClock::getNanoseconds();
@@ -473,7 +485,6 @@
         result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
     }
     if (result != AAUDIO_OK) {
-        // TODO(b/214607638): Do we want to roll back to original state or keep as disconnected?
         setState(originalState);
     }
     return result;
@@ -499,8 +510,7 @@
 // This must be called under mStreamLock.
 aaudio_result_t AudioStreamInternal::stopCallback_l()
 {
-    if (isDataCallbackSet()
-            && (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+    if (isDataCallbackSet() && (isActive() || isDisconnected())) {
         mCallbackEnabled.store(false);
         aaudio_result_t result = joinThread_l(nullptr); // may temporarily unlock mStreamLock
         if (result == AAUDIO_ERROR_INVALID_HANDLE) {
@@ -525,14 +535,14 @@
     // and the callback may have stopped the stream.
     // Check to make sure the stream still needs to be stopped.
     // See also AudioStream::safeStop_l().
-    if (!(isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+    if (!(isActive() || isDisconnected())) {
         ALOGD("%s() returning early, not active or disconnected", __func__);
         return AAUDIO_OK;
     }
 
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         ALOGW("%s() mServiceStreamHandle invalid = 0x%08X",
-              __func__, mServiceStreamHandle);
+              __func__, getServiceHandle());
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
@@ -540,7 +550,7 @@
     setState(AAUDIO_STREAM_STATE_STOPPING);
     mAtomicInternalTimestamp.clear();
 
-    result = mServiceInterface.stopStream(mServiceStreamHandle);
+    result = mServiceInterface.stopStream(mServiceStreamHandleInfo);
     if (result == AAUDIO_ERROR_INVALID_HANDLE) {
         ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
         result = AAUDIO_OK;
@@ -549,31 +559,31 @@
 }
 
 aaudio_result_t AudioStreamInternal::registerThread() {
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         ALOGW("%s() mServiceStreamHandle invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    return mServiceInterface.registerAudioThread(mServiceStreamHandle,
-                                              gettid(),
-                                              getPeriodNanoseconds());
+    return mServiceInterface.registerAudioThread(mServiceStreamHandleInfo,
+                                                 gettid(),
+                                                 getPeriodNanoseconds());
 }
 
 aaudio_result_t AudioStreamInternal::unregisterThread() {
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         ALOGW("%s() mServiceStreamHandle invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    return mServiceInterface.unregisterAudioThread(mServiceStreamHandle, gettid());
+    return mServiceInterface.unregisterAudioThread(mServiceStreamHandleInfo, gettid());
 }
 
 aaudio_result_t AudioStreamInternal::startClient(const android::AudioClient& client,
                                                  const audio_attributes_t *attr,
                                                  audio_port_handle_t *portHandle) {
     ALOGV("%s() called", __func__);
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    aaudio_result_t result =  mServiceInterface.startClient(mServiceStreamHandle,
+    aaudio_result_t result =  mServiceInterface.startClient(mServiceStreamHandleInfo,
                                                             client, attr, portHandle);
     ALOGV("%s(%d) returning %d", __func__, *portHandle, result);
     return result;
@@ -581,10 +591,10 @@
 
 aaudio_result_t AudioStreamInternal::stopClient(audio_port_handle_t portHandle) {
     ALOGV("%s(%d) called", __func__, portHandle);
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    aaudio_result_t result = mServiceInterface.stopClient(mServiceStreamHandle, portHandle);
+    aaudio_result_t result = mServiceInterface.stopClient(mServiceStreamHandleInfo, portHandle);
     ALOGV("%s(%d) returning %d", __func__, portHandle, result);
     return result;
 }
@@ -605,13 +615,6 @@
     return AAUDIO_ERROR_INVALID_STATE;
 }
 
-aaudio_result_t AudioStreamInternal::updateStateMachine() {
-    if (isDataCallbackActive()) {
-        return AAUDIO_OK; // state is getting updated by the callback thread read/write call
-    }
-    return processCommands();
-}
-
 void AudioStreamInternal::logTimestamp(AAudioServiceMessage &command) {
     static int64_t oldPosition = 0;
     static int64_t oldTime = 0;
@@ -654,6 +657,8 @@
             if (getState() == AAUDIO_STREAM_STATE_STARTING) {
                 setState(AAUDIO_STREAM_STATE_STARTED);
             }
+            mPlayerBase->triggerPortIdUpdate(static_cast<audio_port_handle_t>(
+                                                 message->event.dataLong));
             break;
         case AAUDIO_SERVICE_EVENT_PAUSED:
             ALOGD("%s - got AAUDIO_SERVICE_EVENT_PAUSED", __func__);
@@ -680,7 +685,7 @@
                 mAudioEndpoint->eraseDataMemory();
             }
             result = AAUDIO_ERROR_DISCONNECTED;
-            setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+            setDisconnected();
             ALOGW("%s - AAUDIO_SERVICE_EVENT_DISCONNECTED - FIFO cleared", __func__);
             break;
         case AAUDIO_SERVICE_EVENT_VOLUME:
@@ -766,6 +771,22 @@
 aaudio_result_t AudioStreamInternal::processData(void *buffer, int32_t numFrames,
                                                  int64_t timeoutNanoseconds)
 {
+    if (isDisconnected()) {
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
+    if (!mInService &&
+        AAudioBinderClient::getInstance().getServiceLifetimeId() != getServiceLifetimeId()) {
+        // The service lifetime id will be changed whenever the binder died. In that case, if
+        // the service lifetime id from AAudioBinderClient is different from the cached one,
+        // returns AAUDIO_ERROR_DISCONNECTED.
+        // Note that only compare the service lifetime id if it is not in service as the streams
+        // in service will all be gone when aaudio service dies.
+        mClockModel.stop(AudioClock::getNanoseconds());
+        // Set the stream as disconnected as the service lifetime id will only change when
+        // the binder dies.
+        setDisconnected();
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     const char * traceName = "aaProc";
     const char * fifoName = "aaRdy";
     ATRACE_BEGIN(traceName);
@@ -814,7 +835,6 @@
 
             if (wakeTimeNanos > deadlineNanos) {
                 // If we time out, just return the framesWritten so far.
-                // TODO remove after we fix the deadline bug
                 ALOGW("processData(): entered at %lld nanos, currently %lld",
                       (long long) entryTimeNanos, (long long) currentTimeNanos);
                 ALOGW("processData(): TIMEOUT after %lld nanos",
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 2367572..9c06121 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -48,7 +48,7 @@
                                        int64_t *framePosition,
                                        int64_t *timeNanoseconds) override;
 
-    virtual aaudio_result_t updateStateMachine() override;
+    virtual aaudio_result_t processCommands() override;
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
@@ -83,7 +83,11 @@
     aaudio_result_t stopClient(audio_port_handle_t clientHandle);
 
     aaudio_handle_t getServiceHandle() const {
-        return mServiceStreamHandle;
+        return mServiceStreamHandleInfo.getHandle();
+    }
+
+    int32_t getServiceLifetimeId() const {
+        return mServiceStreamHandleInfo.getServiceLifetimeId();
     }
 
 protected:
@@ -110,8 +114,6 @@
 
     aaudio_result_t drainTimestampsFromService();
 
-    aaudio_result_t processCommands();
-
     aaudio_result_t stopCallback_l();
 
     virtual void prepareBuffersForStart() {}
@@ -150,7 +152,8 @@
 
     std::unique_ptr<AudioEndpoint> mAudioEndpoint;   // source for reads or sink for writes
 
-    aaudio_handle_t          mServiceStreamHandle; // opaque handle returned from service
+    // opaque handle returned from service
+    AAudioHandleInfo         mServiceStreamHandleInfo;
 
     int32_t                  mXRunCount = 0;      // how many underrun events?
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 1efccb1..f5cc2be 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -25,6 +25,7 @@
 #include "client/AudioStreamInternalCapture.h"
 #include "utility/AudioClock.h"
 
+#undef ATRACE_TAG
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 #include <utils/Trace.h>
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 7c7a969..89dd8ff 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -74,7 +74,7 @@
     if (result != AAUDIO_OK) {
         return result;
     }
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         ALOGW("%s() mServiceStreamHandle invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
@@ -82,17 +82,17 @@
     mClockModel.stop(AudioClock::getNanoseconds());
     setState(AAUDIO_STREAM_STATE_PAUSING);
     mAtomicInternalTimestamp.clear();
-    return mServiceInterface.pauseStream(mServiceStreamHandle);
+    return mServiceInterface.pauseStream(mServiceStreamHandleInfo);
 }
 
 aaudio_result_t AudioStreamInternalPlay::requestFlush_l() {
-    if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+    if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
         ALOGW("%s() mServiceStreamHandle invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
     setState(AAUDIO_STREAM_STATE_FLUSHING);
-    return mServiceInterface.flushStream(mServiceStreamHandle);
+    return mServiceInterface.flushStream(mServiceStreamHandleInfo);
 }
 
 void AudioStreamInternalPlay::prepareBuffersForStart() {
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index 6921271..a39e90e 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -43,12 +43,12 @@
 // and dumped to the log when the stream is stopped.
 
 IsochronousClockModel::IsochronousClockModel()
-        : mLatenessForDriftNanos(kInitialLatenessForDriftNanos)
 {
     if ((AAudioProperty_getLogMask() & AAUDIO_LOG_CLOCK_MODEL_HISTOGRAM) != 0) {
         mHistogramMicros = std::make_unique<Histogram>(kHistogramBinCount,
                 kHistogramBinWidthMicros);
     }
+    update();
 }
 
 void IsochronousClockModel::setPositionAndTime(int64_t framePosition, int64_t nanoTime) {
@@ -61,15 +61,19 @@
     ALOGV("start(nanos = %lld)\n", (long long) nanoTime);
     mMarkerNanoTime = nanoTime;
     mState = STATE_STARTING;
+    mConsecutiveVeryLateCount = 0;
+    mDspStallCount = 0;
     if (mHistogramMicros) {
         mHistogramMicros->clear();
     }
 }
 
 void IsochronousClockModel::stop(int64_t nanoTime) {
-    ALOGD("stop(nanos = %lld) max lateness = %d micros\n",
-        (long long) nanoTime,
-        (int) (mMaxMeasuredLatenessNanos / 1000));
+    ALOGD("stop(nanos = %lld) max lateness = %d micros, DSP stalled %d times",
+          (long long) nanoTime,
+          (int) (mMaxMeasuredLatenessNanos / 1000),
+          mDspStallCount
+    );
     setPositionAndTime(convertTimeToPosition(nanoTime), nanoTime);
     // TODO should we set position?
     mState = STATE_STOPPED;
@@ -108,7 +112,9 @@
 
 //    ALOGD("processTimestamp() - mSampleRate = %d", mSampleRate);
 //    ALOGD("processTimestamp() - mState = %d", mState);
+    // Lateness relative to the start of the window.
     int64_t latenessNanos = nanosDelta - expectedNanosDelta;
+    int32_t nextConsecutiveVeryLateCount = 0;
     switch (mState) {
     case STATE_STOPPED:
         break;
@@ -137,58 +143,94 @@
             // Or we may be drifting due to a fast HW clock.
             setPositionAndTime(framePosition, nanoTime);
 #if ICM_LOG_DRIFT
-            int earlyDeltaMicros = (int) ((expectedNanosDelta - nanosDelta)/ 1000);
-            ALOGD("%s() - STATE_RUNNING - #%d, %4d micros EARLY",
+            int earlyDeltaMicros = (int) ((expectedNanosDelta - nanosDelta)
+                    / AAUDIO_NANOS_PER_MICROSECOND);
+            ALOGD("%s() - STATE_RUNNING - #%d, %5d micros EARLY",
                 __func__, mTimestampCount, earlyDeltaMicros);
 #endif
-        } else if (latenessNanos > mLatenessForDriftNanos) {
-            // When we are on the late side, it may be because of preemption in the kernel,
-            // or timing jitter caused by resampling in the DSP,
-            // or we may be drifting due to a slow HW clock.
-            // We add slight drift value just in case there is actual long term drift
-            // forward caused by a slower clock.
-            // If the clock is faster than the model will get pushed earlier
-            // by the code in the earlier branch.
-            // The two opposing forces should allow the model to track the real clock
-            // over a long time.
-            int64_t driftingTime = mMarkerNanoTime + expectedNanosDelta + kDriftNanos;
-            setPositionAndTime(framePosition,  driftingTime);
-#if ICM_LOG_DRIFT
-            ALOGD("%s() - STATE_RUNNING - #%d, DRIFT, lateness = %d micros",
+        } else if (latenessNanos > mLatenessForJumpNanos) {
+            ALOGD("%s() - STATE_RUNNING - #%d, %5d micros VERY LATE, %d times",
                   __func__,
                   mTimestampCount,
-                  (int) (latenessNanos / 1000));
-#endif
+                  (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+                  mConsecutiveVeryLateCount
+            );
+            // A lateness this large is probably due to a stall in the DSP.
+            // A pause causes a persistent lateness so we can detect it by counting
+            // consecutive late timestamps.
+            if (mConsecutiveVeryLateCount >= kVeryLateCountsNeededToTriggerJump) {
+                // Assume the timestamp is valid and let subsequent EARLY timestamps
+                // move the window quickly to the correct place.
+                setPositionAndTime(framePosition, nanoTime); // JUMP!
+                mDspStallCount++;
+                // Throttle the warnings but do not silence them.
+                // They indicate a bug that needs to be fixed!
+                if ((nanoTime - mLastJumpWarningTimeNanos) > AAUDIO_NANOS_PER_SECOND) {
+                    ALOGW("%s() - STATE_RUNNING - #%d, %5d micros VERY LATE! Force window jump"
+                          ", mDspStallCount = %d",
+                          __func__,
+                          mTimestampCount,
+                          (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+                          mDspStallCount
+                    );
+                    mLastJumpWarningTimeNanos = nanoTime;
+                }
+            } else {
+                nextConsecutiveVeryLateCount = mConsecutiveVeryLateCount + 1;
+                driftForward(latenessNanos, expectedNanosDelta, framePosition);
+            }
+        } else if (latenessNanos > mLatenessForDriftNanos) {
+            driftForward(latenessNanos, expectedNanosDelta, framePosition);
         }
+        mConsecutiveVeryLateCount = nextConsecutiveVeryLateCount;
 
         // Modify mMaxMeasuredLatenessNanos.
         // This affects the "late" side of the window, which controls input glitches.
         if (latenessNanos > mMaxMeasuredLatenessNanos) { // increase
 #if ICM_LOG_DRIFT
-            ALOGD("%s() - STATE_RUNNING - #%d, newmax %d - oldmax %d = %4d micros LATE",
+            ALOGD("%s() - STATE_RUNNING - #%d, newmax %d, oldmax %d micros LATE",
                     __func__,
                     mTimestampCount,
-                    (int) (latenessNanos / 1000),
-                    mMaxMeasuredLatenessNanos / 1000,
-                    (int) ((latenessNanos - mMaxMeasuredLatenessNanos) / 1000)
+                    (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+                    (int) (mMaxMeasuredLatenessNanos / AAUDIO_NANOS_PER_MICROSECOND)
                     );
 #endif
             mMaxMeasuredLatenessNanos = (int32_t) latenessNanos;
-            // Calculate upper region that will trigger a drift forwards.
-            mLatenessForDriftNanos = mMaxMeasuredLatenessNanos - (mMaxMeasuredLatenessNanos >> 4);
-        } else { // decrease
-            // If this is an outlier in lateness then mMaxMeasuredLatenessNanos can go high
-            // and stay there. So we slowly reduce mMaxMeasuredLatenessNanos for better
-            // long term stability. The two opposing forces will keep mMaxMeasuredLatenessNanos
-            // within a reasonable range.
-            mMaxMeasuredLatenessNanos -= kDriftNanos;
         }
+
         break;
     default:
         break;
     }
 }
 
+// When we are on the late side, it may be because of preemption in the kernel,
+// or timing jitter caused by resampling in the DSP,
+// or we may be drifting due to a slow HW clock.
+// We add slight drift value just in case there is actual long term drift
+// forward caused by a slower clock.
+// If the clock is faster than the model will get pushed earlier
+// by the code in the earlier branch.
+// The two opposing forces should allow the model to track the real clock
+// over a long time.
+void IsochronousClockModel::driftForward(int64_t latenessNanos,
+                                         int64_t expectedNanosDelta,
+                                         int64_t framePosition) {
+    const int64_t driftNanos = (latenessNanos - mLatenessForDriftNanos) >> kShifterForDrift;
+    const int64_t minDriftNanos = std::min(driftNanos, kMaxDriftNanos);
+    const int64_t expectedMarkerNanoTime = mMarkerNanoTime + expectedNanosDelta;
+    const int64_t driftedTime = expectedMarkerNanoTime + minDriftNanos;
+    setPositionAndTime(framePosition, driftedTime);
+#if ICM_LOG_DRIFT
+    ALOGD("%s() - STATE_RUNNING - #%d, %5d micros LATE, nudge window forward by %d micros",
+          __func__,
+          mTimestampCount,
+          (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+          (int) (minDriftNanos / AAUDIO_NANOS_PER_MICROSECOND)
+    );
+#endif
+}
+
 void IsochronousClockModel::setSampleRate(int32_t sampleRate) {
     mSampleRate = sampleRate;
     update();
@@ -197,11 +239,18 @@
 void IsochronousClockModel::setFramesPerBurst(int32_t framesPerBurst) {
     mFramesPerBurst = framesPerBurst;
     update();
+    ALOGD("%s() - mFramesPerBurst = %d - mBurstPeriodNanos = %" PRId64,
+          __func__,
+          mFramesPerBurst,
+          mBurstPeriodNanos
+          );
 }
 
 // Update expected lateness based on sampleRate and framesPerBurst
 void IsochronousClockModel::update() {
-    mBurstPeriodNanos = convertDeltaPositionToTime(mFramesPerBurst); // uses mSampleRate
+    mBurstPeriodNanos = convertDeltaPositionToTime(mFramesPerBurst);
+    mLatenessForDriftNanos = mBurstPeriodNanos + kLatenessMarginForSchedulingJitter;
+    mLatenessForJumpNanos = mLatenessForDriftNanos * kScalerForJumpLateness;
 }
 
 int64_t IsochronousClockModel::convertDeltaPositionToTime(int64_t framesDelta) const {
@@ -257,11 +306,11 @@
 }
 
 void IsochronousClockModel::dump() const {
-    ALOGD("mMarkerFramePosition = %" PRIu64, mMarkerFramePosition);
-    ALOGD("mMarkerNanoTime      = %" PRIu64, mMarkerNanoTime);
+    ALOGD("mMarkerFramePosition = %" PRId64, mMarkerFramePosition);
+    ALOGD("mMarkerNanoTime      = %" PRId64, mMarkerNanoTime);
     ALOGD("mSampleRate          = %6d", mSampleRate);
     ALOGD("mFramesPerBurst      = %6d", mFramesPerBurst);
-    ALOGD("mMaxMeasuredLatenessNanos = %6d", mMaxMeasuredLatenessNanos);
+    ALOGD("mMaxMeasuredLatenessNanos = %6" PRId64, mMaxMeasuredLatenessNanos);
     ALOGD("mState               = %6d", mState);
 }
 
diff --git a/media/libaaudio/src/client/IsochronousClockModel.h b/media/libaaudio/src/client/IsochronousClockModel.h
index 3007237..5be745e 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.h
+++ b/media/libaaudio/src/client/IsochronousClockModel.h
@@ -129,6 +129,9 @@
 
 private:
 
+    void driftForward(int64_t latenessNanos,
+                      int64_t expectedNanosDelta,
+                      int64_t framePosition);
     int32_t getLateTimeOffsetNanos() const;
     void update();
 
@@ -139,28 +142,44 @@
         STATE_RUNNING
     };
 
-    // Amount of time to drift forward when we get a late timestamp.
-    static constexpr int32_t   kDriftNanos         =   1 * 1000;
+    // Maximum amount of time to drift forward when we get a late timestamp.
+    static constexpr int64_t   kMaxDriftNanos      = 10 * AAUDIO_NANOS_PER_MICROSECOND;
     // Safety margin to add to the late edge of the timestamp window.
-    static constexpr int32_t   kExtraLatenessNanos = 100 * 1000;
-    // Initial small threshold for causing a drift later in time.
-    static constexpr int32_t   kInitialLatenessForDriftNanos = 10 * 1000;
+    static constexpr int32_t   kExtraLatenessNanos = 100 * AAUDIO_NANOS_PER_MICROSECOND;
+    // Predicted lateness due to scheduling jitter in the HAL timestamp collection.
+    static constexpr int32_t   kLatenessMarginForSchedulingJitter
+            = 1000 * AAUDIO_NANOS_PER_MICROSECOND;
+    // Amount we multiply mLatenessForDriftNanos to get mLatenessForJumpNanos.
+    // This determines when we go from thinking the clock is drifting to
+    // when it has actually paused briefly.
+    static constexpr int32_t   kScalerForJumpLateness = 5;
+    // Amount to divide lateness past the expected burst window to generate
+    // the drift value for the window. This is meant to be a very slight nudge forward.
+    static constexpr int32_t   kShifterForDrift = 6; // divide by 2^N
+    static constexpr int32_t   kVeryLateCountsNeededToTriggerJump = 2;
 
     static constexpr int32_t   kHistogramBinWidthMicros = 50;
-    static constexpr int32_t   kHistogramBinCount = 128;
+    static constexpr int32_t   kHistogramBinCount       = 128;
 
     int64_t             mMarkerFramePosition{0}; // Estimated HW position.
     int64_t             mMarkerNanoTime{0};      // Estimated HW time.
+    int64_t             mBurstPeriodNanos{0};    // Time between HW bursts.
+    // Includes mBurstPeriodNanos because we sample randomly over time.
+    int64_t             mMaxMeasuredLatenessNanos{0};
+    // Threshold for lateness that triggers a drift later in time.
+    int64_t             mLatenessForDriftNanos{0}; // Set in update()
+    // Based on the observed lateness when the DSP is paused for playing a touch sound.
+    int64_t             mLatenessForJumpNanos{0}; // Set in update()
+    int64_t             mLastJumpWarningTimeNanos{0}; // For throttling warnings.
+
     int32_t             mSampleRate{48000};
     int32_t             mFramesPerBurst{48};     // number of frames transferred at one time.
-    int32_t             mBurstPeriodNanos{0};    // Time between HW bursts.
-    // Includes mBurstPeriodNanos because we sample randomly over time.
-    int32_t             mMaxMeasuredLatenessNanos{0};
-    // Threshold for lateness that triggers a drift later in time.
-    int32_t             mLatenessForDriftNanos;
+    int32_t             mConsecutiveVeryLateCount{0};  // To detect persistent DSP lateness.
+
     clock_model_state_t mState{STATE_STOPPED};   // State machine handles startup sequence.
 
     int32_t             mTimestampCount = 0;  // For logging.
+    int32_t             mDspStallCount = 0;  // For logging.
 
     // distribution of timestamps relative to earliest
     std::unique_ptr<android::audio_utils::Histogram>   mHistogramMicros;
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 90ff4a5..8a13a6f 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -418,12 +418,24 @@
     return audioStream->getSampleRate();
 }
 
+AAUDIO_API int32_t AAudioStream_getHardwareSampleRate(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    return audioStream->getHardwareSampleRate();
+}
+
 AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
     return audioStream->getSamplesPerFrame();
 }
 
+AAUDIO_API int32_t AAudioStream_getHardwareChannelCount(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    return audioStream->getHardwareSamplesPerFrame();
+}
+
 AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream)
 {
     return AAudioStream_getChannelCount(stream);
@@ -432,7 +444,7 @@
 AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    return audioStream->getState();
+    return audioStream->getStateExternal();
 }
 
 AAUDIO_API aaudio_format_t AAudioStream_getFormat(AAudioStream* stream)
@@ -443,6 +455,14 @@
     return AAudioConvert_androidToAAudioDataFormat(internalFormat);
 }
 
+AAUDIO_API aaudio_format_t AAudioStream_getHardwareFormat(AAudioStream* stream)
+{
+    AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+    // Use audio_format_t internally.
+    audio_format_t internalFormat = audioStream->getHardwareFormat();
+    return AAudioConvert_androidToNearestAAudioDataFormat(internalFormat);
+}
+
 AAUDIO_API aaudio_result_t AAudioStream_setBufferSizeInFrames(AAudioStream* stream,
                                                 int32_t requestedFrames)
 {
@@ -566,9 +586,7 @@
                                       int64_t *timeNanoseconds)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    if (framePosition == nullptr) {
-        return AAUDIO_ERROR_NULL;
-    } else if (timeNanoseconds == nullptr) {
+    if (framePosition == nullptr || timeNanoseconds == nullptr) {
         return AAUDIO_ERROR_NULL;
     } else if (clockid != CLOCK_MONOTONIC && clockid != CLOCK_BOOTTIME) {
         return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 8b7b75e..f305e46 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -49,6 +49,9 @@
     mOpPackageName        = other.mOpPackageName;
     mAttributionTag       = other.mAttributionTag;
     mChannelMask          = other.mChannelMask;
+    mHardwareSamplesPerFrame = other.mHardwareSamplesPerFrame;
+    mHardwareSampleRate   = other.mHardwareSampleRate;
+    mHardwareAudioFormat  = other.mHardwareAudioFormat;
 }
 
 static aaudio_result_t isFormatValid(audio_format_t format) {
@@ -58,6 +61,7 @@
         case AUDIO_FORMAT_PCM_32_BIT:
         case AUDIO_FORMAT_PCM_FLOAT:
         case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+        case AUDIO_FORMAT_IEC61937:
             break; // valid
         default:
             ALOGD("audioFormat not valid, audio_format_t = 0x%08x", format);
@@ -83,7 +87,6 @@
     switch (mSessionId) {
         case AAUDIO_SESSION_ID_NONE:
         case AAUDIO_SESSION_ID_ALLOCATE:
-            break;
         default:
             break;
     }
@@ -203,7 +206,7 @@
     return validateChannelMask();
 }
 
-bool AAudioStreamParameters::validateChannelMask() const {
+aaudio_result_t AAudioStreamParameters::validateChannelMask() const {
     if (mChannelMask == AAUDIO_UNSPECIFIED) {
         return AAUDIO_OK;
     }
@@ -311,4 +314,7 @@
         "(null)" : mOpPackageName.value().c_str());
     ALOGD("mAttributionTag       = %s", !mAttributionTag.has_value() ?
         "(null)" : mAttributionTag.value().c_str());
-}
+    ALOGD("mHardwareSamplesPerFrame = %6d", mHardwareSamplesPerFrame);
+    ALOGD("mHardwareSampleRate   = %6d", mHardwareSampleRate);
+    ALOGD("mHardwareAudioFormat  = %6d", (int)mHardwareAudioFormat);
+}
\ No newline at end of file
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index cb998bf..7c78f03 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -171,6 +171,30 @@
         mSamplesPerFrame = AAudioConvert_channelMaskToCount(channelMask);
     }
 
+    int32_t getHardwareSamplesPerFrame() const {
+        return mHardwareSamplesPerFrame;
+    }
+
+    void setHardwareSamplesPerFrame(int32_t hardwareSamplesPerFrame) {
+        mHardwareSamplesPerFrame = hardwareSamplesPerFrame;
+    }
+
+    int32_t getHardwareSampleRate() const {
+        return mHardwareSampleRate;
+    }
+
+    void setHardwareSampleRate(int32_t hardwareSampleRate) {
+        mHardwareSampleRate = hardwareSampleRate;
+    }
+
+    audio_format_t getHardwareFormat() const {
+        return mHardwareAudioFormat;
+    }
+
+    void setHardwareFormat(audio_format_t hardwareAudioFormat) {
+        mHardwareAudioFormat = hardwareAudioFormat;
+    }
+
     /**
      * @return bytes per frame of getFormat()
      */
@@ -189,7 +213,7 @@
     void dump() const;
 
 private:
-    bool validateChannelMask() const;
+    aaudio_result_t validateChannelMask() const;
 
     int32_t                         mSamplesPerFrame      = AAUDIO_UNSPECIFIED;
     int32_t                         mSampleRate           = AAUDIO_UNSPECIFIED;
@@ -210,6 +234,10 @@
     std::optional<std::string>      mOpPackageName        = {};
     std::optional<std::string>      mAttributionTag       = {};
     aaudio_channel_mask_t           mChannelMask          = AAUDIO_UNSPECIFIED;
+    int                             mHardwareSamplesPerFrame
+                                                          = AAUDIO_UNSPECIFIED;
+    int                             mHardwareSampleRate   = AAUDIO_UNSPECIFIED;
+    audio_format_t                  mHardwareAudioFormat  = AUDIO_FORMAT_DEFAULT;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index 0e5b8be..30f9677 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -82,6 +82,7 @@
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_FLOAT);
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I24_PACKED);
         AAUDIO_CASE_ENUM(AAUDIO_FORMAT_PCM_I32);
+        AAUDIO_CASE_ENUM(AAUDIO_FORMAT_IEC61937);
     }
     return "Unrecognized";
 }
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 06f05b0..56ef1e6 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -21,7 +21,9 @@
 #include <atomic>
 #include <stdint.h>
 
+#include <linux/futex.h>
 #include <media/MediaMetricsItem.h>
+#include <sys/syscall.h>
 
 #include <aaudio/AAudio.h>
 
@@ -59,10 +61,9 @@
     // If the stream is deleted when OPEN or in use then audio resources will leak.
     // This would indicate an internal error. So we want to find this ASAP.
     LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
-                          || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED
-                          || getState() == AAUDIO_STREAM_STATE_DISCONNECTED),
-                        "~AudioStream() - still in use, state = %s",
-                        AudioGlobal_convertStreamStateToText(getState()));
+                          || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
+                        "~AudioStream() - still in use, state = %s disconnected = %d",
+                        AudioGlobal_convertStreamStateToText(getState()), isDisconnected());
 }
 
 aaudio_result_t AudioStream::open(const AudioStreamBuilder& builder)
@@ -123,13 +124,17 @@
         android::mediametrics::LogItem item(mMetricsId);
         item.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_OPEN)
             .set(AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL,
-                AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
+                    AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
             .set(AMEDIAMETRICS_PROP_SHARINGMODEACTUAL,
-                AudioGlobal_convertSharingModeToText(getSharingMode()))
+                    AudioGlobal_convertSharingModeToText(getSharingMode()))
             .set(AMEDIAMETRICS_PROP_BUFFERCAPACITYFRAMES, getBufferCapacity())
             .set(AMEDIAMETRICS_PROP_BURSTFRAMES, getFramesPerBurst())
             .set(AMEDIAMETRICS_PROP_DIRECTION,
-                AudioGlobal_convertDirectionToText(getDirection()));
+                    AudioGlobal_convertDirectionToText(getDirection()))
+            .set(AMEDIAMETRICS_PROP_ENCODINGHARDWARE,
+                    android::toString(getHardwareFormat()).c_str())
+            .set(AMEDIAMETRICS_PROP_CHANNELCOUNTHARDWARE, (int32_t)getHardwareSamplesPerFrame())
+            .set(AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, (int32_t)getHardwareSampleRate());
 
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
             item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
@@ -156,6 +161,11 @@
 
     std::lock_guard<std::mutex> lock(mStreamLock);
 
+    if (isDisconnected()) {
+        ALOGW("%s() stream is disconnected", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+
     switch (getState()) {
         // Is this a good time to start?
         case AAUDIO_STREAM_STATE_OPEN:
@@ -174,8 +184,13 @@
                   AudioGlobal_convertStreamStateToText(getState()));
             return AAUDIO_ERROR_INVALID_STATE;
 
-        // Don't start when the stream is dead!
         case AAUDIO_STREAM_STATE_DISCONNECTED:
+            // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED, trying to
+            // start will finally return ERROR_DISCONNECTED.
+            ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
+            return AAUDIO_ERROR_INTERNAL;
+
+        // Don't start when the stream is dead!
         case AAUDIO_STREAM_STATE_CLOSING:
         case AAUDIO_STREAM_STATE_CLOSED:
         default:
@@ -208,7 +223,11 @@
         // Proceed with pausing.
         case AAUDIO_STREAM_STATE_STARTING:
         case AAUDIO_STREAM_STATE_STARTED:
+            break;
+
         case AAUDIO_STREAM_STATE_DISCONNECTED:
+            // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED
+            ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
             break;
 
             // Transition from one inactive state to another.
@@ -287,7 +306,10 @@
         // Proceed with stopping.
         case AAUDIO_STREAM_STATE_STARTING:
         case AAUDIO_STREAM_STATE_STARTED:
+            break;
         case AAUDIO_STREAM_STATE_DISCONNECTED:
+            // This must not happen after deprecating AAUDIO_STREAM_STATE_DISCONNECTED
+            ALOGE("%s, unexpected state = AAUDIO_STREAM_STATE_DISCONNECTED", __func__);
             break;
 
         // Transition from one inactive state to another.
@@ -362,37 +384,46 @@
 }
 
 void AudioStream::setState(aaudio_stream_state_t state) {
-    ALOGD("%s(s#%d) from %d to %d", __func__, getId(), mState, state);
-    if (state == mState) {
+    aaudio_stream_state_t oldState = mState.load();
+    ALOGD("%s(s#%d) from %d to %d", __func__, getId(), oldState, state);
+    if (state == oldState) {
         return; // no change
     }
-    // Track transition to DISCONNECTED state.
-    if (state == AAUDIO_STREAM_STATE_DISCONNECTED) {
-        android::mediametrics::LogItem(mMetricsId)
-                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
-                .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
-                .record();
-    }
+    LOG_ALWAYS_FATAL_IF(state == AAUDIO_STREAM_STATE_DISCONNECTED,
+                        "Disconnected state must be separated from mState");
     // CLOSED is a final state
-    if (mState == AAUDIO_STREAM_STATE_CLOSED) {
+    if (oldState == AAUDIO_STREAM_STATE_CLOSED) {
         ALOGW("%s(%d) tried to set to %d but already CLOSED", __func__, getId(), state);
 
     // Once CLOSING, we can only move to CLOSED state.
-    } else if (mState == AAUDIO_STREAM_STATE_CLOSING
+    } else if (oldState == AAUDIO_STREAM_STATE_CLOSING
                && state != AAUDIO_STREAM_STATE_CLOSED) {
         ALOGW("%s(%d) tried to set to %d but already CLOSING", __func__, getId(), state);
 
-    // Once DISCONNECTED, we can only move to CLOSING or CLOSED state.
-    } else if (mState == AAUDIO_STREAM_STATE_DISCONNECTED
-               && !(state == AAUDIO_STREAM_STATE_CLOSING
-                   || state == AAUDIO_STREAM_STATE_CLOSED)) {
-        ALOGW("%s(%d) tried to set to %d but already DISCONNECTED", __func__, getId(), state);
-
     } else {
-        mState = state;
+        mState.store(state);
+        // Wake up a wakeForStateChange thread if it exists.
+        syscall(SYS_futex, &mState, FUTEX_WAKE_PRIVATE, INT_MAX, NULL, NULL, 0);
     }
 }
 
+void AudioStream::setDisconnected() {
+    const bool old = isDisconnected();
+    ALOGD("%s setting disconnected, current disconnected: %d, current state: %d",
+          __func__, old, getState());
+    if (old) {
+        return; // no change, the stream is already disconnected
+    }
+    mDisconnected.store(true);
+    // Wake up a wakeForStateChange thread if it exists.
+    syscall(SYS_futex, &mState, FUTEX_WAKE_PRIVATE, INT_MAX, NULL, NULL, 0);
+    // Track transition to DISCONNECTED state.
+    android::mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT)
+            .set(AMEDIAMETRICS_PROP_STATE, AudioGlobal_convertStreamStateToText(getState()))
+            .record();
+}
+
 aaudio_result_t AudioStream::waitForStateChange(aaudio_stream_state_t currentState,
                                                 aaudio_stream_state_t *nextState,
                                                 int64_t timeoutNanoseconds)
@@ -403,20 +434,26 @@
     }
 
     int64_t durationNanos = 20 * AAUDIO_NANOS_PER_MILLISECOND; // arbitrary
-    aaudio_stream_state_t state = getState();
+    aaudio_stream_state_t state = getStateExternal();
     while (state == currentState && timeoutNanoseconds > 0) {
         if (durationNanos > timeoutNanoseconds) {
             durationNanos = timeoutNanoseconds;
         }
-        AudioClock::sleepForNanos(durationNanos);
-        timeoutNanoseconds -= durationNanos;
+        struct timespec time;
+        time.tv_sec = durationNanos / AAUDIO_NANOS_PER_SECOND;
+        // Add the fractional nanoseconds.
+        time.tv_nsec = durationNanos - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
 
+        // Sleep for durationNanos. If mState changes from the callback
+        // thread, this thread will wake up earlier.
+        syscall(SYS_futex, &mState, FUTEX_WAIT_PRIVATE, currentState, &time, NULL, 0);
+        timeoutNanoseconds -= durationNanos;
         aaudio_result_t result = updateStateMachine();
         if (result != AAUDIO_OK) {
             return result;
         }
 
-        state = getState();
+        state = getStateExternal();
     }
     if (nextState != nullptr) {
         *nextState = state;
@@ -607,6 +644,13 @@
     doSetVolume(); // apply this change
 }
 
+aaudio_stream_state_t AudioStream::getStateExternal() const {
+    if (isDisconnected()) {
+        return AAUDIO_STREAM_STATE_DISCONNECTED;
+    }
+    return getState();
+}
+
 void AudioStream::MyPlayerBase::registerWithAudioManager(const android::sp<AudioStream>& parent) {
     std::lock_guard<std::mutex> lock(mParentLock);
     mParent = parent;
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 5fb4528..9b4b734 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -100,10 +100,17 @@
                                        int64_t *timeNanoseconds) = 0;
 
     /**
-     * Update state machine.()
-     * @return
+     * Update state machine.
+     * @return result of the operation.
      */
-    virtual aaudio_result_t updateStateMachine() = 0;
+    aaudio_result_t updateStateMachine() {
+        if (isDataCallbackActive()) {
+            return AAUDIO_OK; // state is getting updated by the callback thread read/write call
+        }
+        return processCommands();
+    };
+
+    virtual aaudio_result_t processCommands() = 0;
 
     // =========== End ABSTRACT methods ===========================
 
@@ -184,9 +191,11 @@
     // ============== Queries ===========================
 
     aaudio_stream_state_t getState() const {
-        return mState;
+        return mState.load();
     }
 
+    aaudio_stream_state_t getStateExternal() const;
+
     virtual int32_t getBufferSize() const {
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
@@ -215,14 +224,26 @@
         return mSampleRate;
     }
 
+    aaudio_result_t getHardwareSampleRate() const {
+        return mHardwareSampleRate;
+    }
+
     audio_format_t getFormat()  const {
         return mFormat;
     }
 
+    audio_format_t getHardwareFormat()  const {
+        return mHardwareFormat;
+    }
+
     aaudio_result_t getSamplesPerFrame() const {
         return mSamplesPerFrame;
     }
 
+    aaudio_result_t getHardwareSamplesPerFrame() const {
+        return mHardwareSamplesPerFrame;
+    }
+
     virtual int32_t getPerformanceMode() const {
         return mPerformanceMode;
     }
@@ -403,7 +424,7 @@
      * This should only be called for client streams and not for streams
      * that run in the service.
      */
-    void registerPlayerBase() {
+    virtual void registerPlayerBase() {
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
             mPlayerBase->registerWithAudioManager(this);
         }
@@ -521,6 +542,11 @@
     }
 
     // This should not be called after the open() call.
+    void setHardwareSampleRate(int32_t hardwareSampleRate) {
+        mHardwareSampleRate = hardwareSampleRate;
+    }
+
+    // This should not be called after the open() call.
     void setFramesPerBurst(int32_t framesPerBurst) {
         mFramesPerBurst = framesPerBurst;
     }
@@ -541,6 +567,16 @@
     }
 
     // This should not be called after the open() call.
+    void setHardwareFormat(audio_format_t format) {
+        mHardwareFormat = format;
+    }
+
+    // This should not be called after the open() call.
+    void setHardwareSamplesPerFrame(int32_t hardwareSamplesPerFrame) {
+        mHardwareSamplesPerFrame = hardwareSamplesPerFrame;
+    }
+
+    // This should not be called after the open() call.
     void setDeviceFormat(audio_format_t format) {
         mDeviceFormat = format;
     }
@@ -551,6 +587,11 @@
 
     void setState(aaudio_stream_state_t state);
 
+    bool isDisconnected() const {
+        return mDisconnected.load();
+    }
+    void setDisconnected();
+
     void setDeviceId(int32_t deviceId) {
         mDeviceId = deviceId;
     }
@@ -657,6 +698,8 @@
 
     std::mutex                 mStreamLock;
 
+    const android::sp<MyPlayerBase>   mPlayerBase;
+
 private:
 
     aaudio_result_t safeStop_l() REQUIRES(mStreamLock);
@@ -672,17 +715,21 @@
         close_l();
     }
 
-    const android::sp<MyPlayerBase>   mPlayerBase;
+    std::atomic<aaudio_stream_state_t>          mState{AAUDIO_STREAM_STATE_UNINITIALIZED};
+
+    std::atomic_bool            mDisconnected{false};
 
     // These do not change after open().
     int32_t                     mSamplesPerFrame = AAUDIO_UNSPECIFIED;
+    int32_t                     mHardwareSamplesPerFrame = AAUDIO_UNSPECIFIED;
     aaudio_channel_mask_t       mChannelMask = AAUDIO_UNSPECIFIED;
     int32_t                     mSampleRate = AAUDIO_UNSPECIFIED;
+    int32_t                     mHardwareSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mDeviceId = AAUDIO_UNSPECIFIED;
     aaudio_sharing_mode_t       mSharingMode = AAUDIO_SHARING_MODE_SHARED;
     bool                        mSharingModeMatchRequired = false; // must match sharing mode requested
     audio_format_t              mFormat = AUDIO_FORMAT_DEFAULT;
-    aaudio_stream_state_t       mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
+    audio_format_t              mHardwareFormat = AUDIO_FORMAT_DEFAULT;
     aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
     int32_t                     mFramesPerBurst = 0;
     int32_t                     mBufferCapacity = 0;
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index a100aa9..ac4e2b3 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -24,7 +24,6 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
-#include <android/media/audio/common/AudioMMapPolicy.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <media/AudioSystem.h>
@@ -37,10 +36,10 @@
 #include "core/AudioStreamBuilder.h"
 #include "legacy/AudioStreamRecord.h"
 #include "legacy/AudioStreamTrack.h"
+#include "utility/AAudioUtilities.h"
 
 using namespace aaudio;
 
-using android::media::audio::common::AudioMMapPolicy;
 using android::media::audio::common::AudioMMapPolicyInfo;
 using android::media::audio::common::AudioMMapPolicyType;
 
@@ -95,37 +94,6 @@
     return result;
 }
 
-namespace {
-
-aaudio_policy_t aidl2legacy_aaudio_policy(AudioMMapPolicy aidl) {
-    switch (aidl) {
-        case AudioMMapPolicy::NEVER:
-            return AAUDIO_POLICY_NEVER;
-        case AudioMMapPolicy::AUTO:
-            return AAUDIO_POLICY_AUTO;
-        case AudioMMapPolicy::ALWAYS:
-            return AAUDIO_POLICY_ALWAYS;
-        case AudioMMapPolicy::UNSPECIFIED:
-        default:
-            return AAUDIO_UNSPECIFIED;
-    }
-}
-
-// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
-// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
-aaudio_policy_t getAAudioPolicy(
-        const std::vector<AudioMMapPolicyInfo>& policyInfos) {
-    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
-    for (size_t i = 1; i < policyInfos.size(); ++i) {
-        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
-            return AAUDIO_POLICY_AUTO;
-        }
-    }
-    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
-}
-
-} // namespace
-
 // Try to open using MMAP path if that is allowed.
 // Fall back to Legacy path if MMAP not available.
 // Exact behavior is controlled by MMapPolicy.
@@ -145,12 +113,28 @@
     }
 
     std::vector<AudioMMapPolicyInfo> policyInfos;
-    // The API setting is the highest priority.
     aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
-    // If not specified then get from a system property.
-    if (mmapPolicy == AAUDIO_UNSPECIFIED && android::AudioSystem::getMmapPolicyInfo(
-                AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
-        mmapPolicy = getAAudioPolicy(policyInfos);
+    if (android::AudioSystem::getMmapPolicyInfo(
+            AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
+        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(policyInfos);
+        if (mmapPolicy == AAUDIO_POLICY_ALWAYS && systemMmapPolicy == AAUDIO_POLICY_NEVER) {
+            // No need to try as AAudioService is not created and the client only wants MMAP path.
+            return AAUDIO_ERROR_NO_SERVICE;
+        }
+        // Use system property for mmap policy if
+        //    1. The API setting does not specify mmap policy or
+        //    2. The system property specifies MMAP policy as never. In this case, AAudioService
+        //       will not be started, no need to try mmap path.
+        if (mmapPolicy == AAUDIO_UNSPECIFIED || systemMmapPolicy == AAUDIO_POLICY_NEVER) {
+            mmapPolicy = systemMmapPolicy;
+        }
+    } else {
+        // If it fails querying mmap policy info, it is highly possible that the AAudioService is
+        // not created. In this case, we don't try mmap path.
+        if (mmapPolicy == AAUDIO_POLICY_ALWAYS) {
+            return AAUDIO_ERROR_NO_SERVICE;
+        }
+        mmapPolicy = AAUDIO_POLICY_NEVER;
     }
     // If still not specified then use the default.
     if (mmapPolicy == AAUDIO_UNSPECIFIED) {
@@ -161,7 +145,7 @@
     aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
     if (android::AudioSystem::getMmapPolicyInfo(
             AudioMMapPolicyType::EXCLUSIVE, &policyInfos) == NO_ERROR) {
-        mmapExclusivePolicy = getAAudioPolicy(policyInfos);
+        mmapExclusivePolicy = AAudio_getAAudioPolicy(policyInfos);
     }
     if (mmapExclusivePolicy == AAUDIO_UNSPECIFIED) {
         mmapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
@@ -192,6 +176,11 @@
         allowMMap = false;
     }
 
+    if (getFormat() == AUDIO_FORMAT_IEC61937) {
+        ALOGD("%s IEC61937 format is selected, do not allow MMAP in this case.", __func__);
+        allowMMap = false;
+    }
+
     if (!allowMMap && !allowLegacy) {
         ALOGE("%s() no backend available: neither MMAP nor legacy path are allowed", __func__);
         return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
diff --git a/media/libaaudio/src/flowgraph/Limiter.cpp b/media/libaaudio/src/flowgraph/Limiter.cpp
new file mode 100644
index 0000000..def905a
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/Limiter.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <math.h>
+#include <unistd.h>
+#include "FlowGraphNode.h"
+#include "Limiter.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+Limiter::Limiter(int32_t channelCount)
+        : FlowGraphFilter(channelCount) {
+}
+
+int32_t Limiter::onProcess(int32_t numFrames) {
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
+
+    int32_t numSamples = numFrames * output.getSamplesPerFrame();
+
+    // Cache the last valid output to reduce memory read/write
+    float lastValidOutput = mLastValidOutput;
+
+    for (int32_t i = 0; i < numSamples; i++) {
+        // Use the previous output if the input is NaN
+        if (!isnan(*inputBuffer)) {
+            lastValidOutput = processFloat(*inputBuffer);
+        }
+        inputBuffer++;
+        *outputBuffer++ = lastValidOutput;
+    }
+    mLastValidOutput = lastValidOutput;
+
+    return numFrames;
+}
+
+float Limiter::processFloat(float in)
+{
+    float in_abs = fabsf(in);
+    if (in_abs <= 1) {
+        return in;
+    }
+    float out;
+    if (in_abs < kXWhenYis3Decibels) {
+        out = (kPolynomialSplineA * in_abs + kPolynomialSplineB) * in_abs + kPolynomialSplineC;
+    } else {
+        out = M_SQRT2;
+    }
+    if (in < 0) {
+        out = -out;
+    }
+    return out;
+}
diff --git a/media/libaaudio/src/flowgraph/Limiter.h b/media/libaaudio/src/flowgraph/Limiter.h
new file mode 100644
index 0000000..393a7bf
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/Limiter.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_LIMITER_H
+#define FLOWGRAPH_LIMITER_H
+
+#include <atomic>
+#include <unistd.h>
+#include <sys/types.h>
+
+#include "FlowGraphNode.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+class Limiter : public FlowGraphFilter {
+public:
+    explicit Limiter(int32_t channelCount);
+
+    int32_t onProcess(int32_t numFrames) override;
+
+    const char *getName() override {
+        return "Limiter";
+    }
+
+private:
+    // These numbers are based on a polynomial spline for a quadratic solution Ax^2 + Bx + C
+    // The range is up to 3 dB, (10^(3/20)), to match AudioTrack for float data.
+    static constexpr float kPolynomialSplineA = -0.6035533905; // -(1+sqrt(2))/4
+    static constexpr float kPolynomialSplineB = 2.2071067811; // (3+sqrt(2))/2
+    static constexpr float kPolynomialSplineC = -0.6035533905; // -(1+sqrt(2))/4
+    static constexpr float kXWhenYis3Decibels = 1.8284271247; // -1+2sqrt(2)
+
+    /**
+     * Process an input based on the following:
+     * If between -1 and 1, return the input value.
+     * If above kXWhenYis3Decibels, return sqrt(2).
+     * If below -kXWhenYis3Decibels, return -sqrt(2).
+     * If between 1 and kXWhenYis3Decibels, use a quadratic spline (Ax^2 + Bx + C).
+     * If between -kXWhenYis3Decibels and -1, use the absolute value for the spline and flip it.
+     * The derivative of the spline is 1 at 1 and 0 at kXWhenYis3Decibels.
+     * This way, the graph is both continuous and differentiable.
+     */
+    float processFloat(float in);
+
+    // Use the previous valid output for NaN inputs
+    float mLastValidOutput = 0.0f;
+};
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_LIMITER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
index 7193ff3..a3ce58c 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -40,7 +40,7 @@
     ratio.reduce();
     mNumerator = ratio.getNumerator();
     mDenominator = ratio.getDenominator();
-    mIntegerPhase = mDenominator;
+    mIntegerPhase = mDenominator; // so we start with a write needed
 }
 
 // static factory method
diff --git a/media/libaaudio/src/flowgraph/resampler/README.md b/media/libaaudio/src/flowgraph/resampler/README.md
index ea319c7..356f06c 100644
--- a/media/libaaudio/src/flowgraph/resampler/README.md
+++ b/media/libaaudio/src/flowgraph/resampler/README.md
@@ -40,7 +40,7 @@
 
 For example, suppose you are converting from 44100 Hz to 48000 Hz and using an input buffer with 960 frames. If you calculate the number of output frames you get:
 
-    960 * 48000 * 44100 = 1044.897959...
+    960.0 * 48000 / 44100 = 1044.897959...
 
 You cannot generate a fractional number of frames. So the resampler will sometimes generate 1044 frames and sometimes 1045 frames. On average it will generate 1044.897959 frames. The resampler stores the fraction internally and keeps track of when to consume or generate a frame.
 
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
index 42d0ca2..a14ee47 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
@@ -24,9 +24,10 @@
         : MultiChannelResampler(builder)
         , mSingleFrame2(builder.getChannelCount()) {
     assert((getNumTaps() % 4) == 0); // Required for loop unrolling.
-    mNumRows = kMaxCoefficients / getNumTaps(); // no guard row needed
-    mPhaseScaler = (double) mNumRows / mDenominator;
-    double phaseIncrement = 1.0 / mNumRows;
+    mNumRows = kMaxCoefficients / getNumTaps(); // includes guard row
+    const int32_t numRowsNoGuard = mNumRows - 1;
+    mPhaseScaler = (double) numRowsNoGuard / mDenominator;
+    const double phaseIncrement = 1.0 / numRowsNoGuard;
     generateCoefficients(builder.getInputRate(),
                          builder.getOutputRate(),
                          mNumRows,
@@ -40,39 +41,31 @@
     std::fill(mSingleFrame2.begin(), mSingleFrame2.end(), 0.0);
 
     // Determine indices into coefficients table.
-    double tablePhase = getIntegerPhase() * mPhaseScaler;
-    int index1 = static_cast<int>(floor(tablePhase));
-    if (index1 >= mNumRows) { // no guard row needed because we wrap the indices
-        tablePhase -= mNumRows;
-        index1 -= mNumRows;
-    }
-
-    int index2 = index1 + 1;
-    if (index2 >= mNumRows) { // no guard row needed because we wrap the indices
-        index2 -= mNumRows;
-    }
-
-    float *coefficients1 = &mCoefficients[static_cast<size_t>(index1)
-            * static_cast<size_t>(getNumTaps())];
-    float *coefficients2 = &mCoefficients[static_cast<size_t>(index2)
-            * static_cast<size_t>(getNumTaps())];
+    const double tablePhase = getIntegerPhase() * mPhaseScaler;
+    const int indexLow = static_cast<int>(floor(tablePhase));
+    const int indexHigh = indexLow + 1; // OK because using a guard row.
+    assert (indexHigh < mNumRows);
+    float *coefficientsLow = &mCoefficients[static_cast<size_t>(indexLow)
+                                            * static_cast<size_t>(getNumTaps())];
+    float *coefficientsHigh = &mCoefficients[static_cast<size_t>(indexHigh)
+                                             * static_cast<size_t>(getNumTaps())];
 
     float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
-    for (int i = 0; i < mNumTaps; i++) {
-        float coefficient1 = *coefficients1++;
-        float coefficient2 = *coefficients2++;
+    for (int tap = 0; tap < mNumTaps; tap++) {
+        const float coefficientLow = *coefficientsLow++;
+        const float coefficientHigh = *coefficientsHigh++;
         for (int channel = 0; channel < getChannelCount(); channel++) {
-            float sample = *xFrame++;
-            mSingleFrame[channel] +=  sample * coefficient1;
-            mSingleFrame2[channel] += sample * coefficient2;
+            const float sample = *xFrame++;
+            mSingleFrame[channel] += sample * coefficientLow;
+            mSingleFrame2[channel] += sample * coefficientHigh;
         }
     }
 
     // Interpolate and copy to output.
-    float fraction = tablePhase - index1;
+    const float fraction = tablePhase - indexLow;
     for (int channel = 0; channel < getChannelCount(); channel++) {
-        float low = mSingleFrame[channel];
-        float high = mSingleFrame2[channel];
+        const float low = mSingleFrame[channel];
+        const float high = mSingleFrame2[channel];
         frame[channel] = low + (fraction * (high - low));
     }
 }
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
index 432137e..d459abf 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
@@ -57,9 +57,6 @@
     float *coefficients1 = &mCoefficients[static_cast<size_t>(index1)
             * static_cast<size_t>(getNumTaps())];
     int index2 = (index1 + 1);
-    if (index2 >= mNumRows) { // no guard row needed because we wrap the indices
-        index2 = 0;
-    }
     float *coefficients2 = &mCoefficients[static_cast<size_t>(index2)
             * static_cast<size_t>(getNumTaps())];
     float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index dd11169..8595308 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -85,7 +85,7 @@
     // AudioRecord::Buffer
     // TODO define our own AudioBuffer and pass it from the subclasses.
     size_t written = buffer.size();
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         ALOGW("%s() data, stream disconnected", __func__);
         // This will kill the stream and prevent it from being restarted.
         // That is OK because the stream is disconnected.
@@ -127,7 +127,7 @@
             mCallbackEnabled.store(false);
         }
 
-        if (updateStateMachine() != AAUDIO_OK) {
+        if (processCommands() != AAUDIO_OK) {
             forceDisconnect();
             mCallbackEnabled.store(false);
         }
@@ -150,7 +150,7 @@
     // AudioRecord::Buffer
     // TODO define our own AudioBuffer and pass it from the subclasses.
     size_t written = buffer.size();
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         ALOGW("%s() data, stream disconnected", __func__);
         // This will kill the stream and prevent it from being restarted.
         // That is OK because the stream is disconnected.
@@ -192,7 +192,7 @@
             mCallbackEnabled.store(false);
         }
 
-        if (updateStateMachine() != AAUDIO_OK) {
+        if (processCommands() != AAUDIO_OK) {
             forceDisconnect();
             mCallbackEnabled.store(false);
         }
@@ -214,11 +214,11 @@
 
 void AudioStreamLegacy::forceDisconnect(bool errorCallbackEnabled) {
     // There is no need to disconnect if already in these states.
-    if (getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+    if (!isDisconnected()
             && getState() != AAUDIO_STREAM_STATE_CLOSING
             && getState() != AAUDIO_STREAM_STATE_CLOSED
             ) {
-        setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+        setDisconnected();
         if (errorCallbackEnabled) {
             maybeCallErrorCallback(AAUDIO_ERROR_DISCONNECTED);
         }
@@ -268,7 +268,7 @@
     ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
     if (getDeviceId() != AAUDIO_UNSPECIFIED
             && getDeviceId() != deviceId
-            && getState() != AAUDIO_STREAM_STATE_DISCONNECTED
+            && !isDisconnected()
             ) {
         // Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
         // If we have a data callback and the stream is active, then ask the data callback
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index 1e39e0f..e760dab 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -208,6 +208,10 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    setHardwareSamplesPerFrame(mAudioRecord->getHalChannelCount());
+    setHardwareSampleRate(mAudioRecord->getHalSampleRate());
+    setHardwareFormat(mAudioRecord->getHalFormat());
+
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
         // The block adapter runs before the format conversion.
@@ -364,8 +368,7 @@
     return checkForDisconnectRequest(false);
 }
 
-aaudio_result_t AudioStreamRecord::updateStateMachine()
-{
+aaudio_result_t AudioStreamRecord::processCommands() {
     aaudio_result_t result = AAUDIO_OK;
     aaudio_wrapping_frames_t position;
     status_t err;
@@ -404,7 +407,7 @@
         return result;
     }
 
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         return AAUDIO_ERROR_DISCONNECTED;
     }
 
@@ -447,7 +450,7 @@
         // In this context, a DEAD_OBJECT is more likely to be a disconnect notification due to
         // AudioRecord invalidation.
         if (bytesActuallyRead == DEAD_OBJECT) {
-            setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+            setDisconnected();
             return AAUDIO_ERROR_DISCONNECTED;
         }
         return AAudioConvert_androidToAAudioResult(bytesActuallyRead);
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 5ce73f9..252ff3c 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -58,7 +58,7 @@
 
     int64_t getFramesWritten() override;
 
-    aaudio_result_t updateStateMachine() override;
+    aaudio_result_t processCommands() override;
 
     aaudio_direction_t getDirection() const override {
         return AAUDIO_DIRECTION_INPUT;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 6f1dc92..67ee42e 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -132,10 +132,11 @@
             AAudioConvert_contentTypeToInternal(builder.getContentType());
     const audio_usage_t usage =
             AAudioConvert_usageToInternal(builder.getUsage());
-    const audio_flags_mask_t attributesFlags =
-        AAudioConvert_allowCapturePolicyToAudioFlagsMask(builder.getAllowedCapturePolicy(),
-                                                         builder.getSpatializationBehavior(),
-                                                         builder.isContentSpatialized());
+    const audio_flags_mask_t attributesFlags = AAudio_computeAudioFlagsMask(
+                                                            builder.getAllowedCapturePolicy(),
+                                                            builder.getSpatializationBehavior(),
+                                                            builder.isContentSpatialized(),
+                                                            flags);
 
     const audio_attributes_t attributes = {
             .content_type = contentType,
@@ -202,6 +203,10 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    setHardwareSamplesPerFrame(mAudioTrack->getHalChannelCount());
+    setHardwareSampleRate(mAudioTrack->getHalSampleRate());
+    setHardwareFormat(mAudioTrack->getHalFormat());
+
     // We may need to pass the data through a block size adapter to guarantee constant size.
     if (mCallbackBufferSize != AAUDIO_UNSPECIFIED) {
         // This may need to change if we add format conversion before
@@ -248,7 +253,7 @@
 
     if (getState() != AAUDIO_STREAM_STATE_UNINITIALIZED) {
         ALOGE("%s - Open canceled since state = %d", __func__, getState());
-        if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED)
+        if (isDisconnected())
         {
             ALOGE("%s - Opening while state is disconnected", __func__);
             safeReleaseClose();
@@ -378,8 +383,7 @@
     return checkForDisconnectRequest(false);;
 }
 
-aaudio_result_t AudioStreamTrack::updateStateMachine()
-{
+aaudio_result_t AudioStreamTrack::processCommands() {
     status_t err;
     aaudio_wrapping_frames_t position;
     switch (getState()) {
@@ -407,7 +411,6 @@
             if (err != OK) {
                 return AAudioConvert_androidToAAudioResult(err);
             } else if (position == 0) {
-                // TODO Advance frames read to match written.
                 setState(AAUDIO_STREAM_STATE_FLUSHED);
             }
         }
@@ -434,7 +437,7 @@
         return result;
     }
 
-    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+    if (isDisconnected()) {
         return AAUDIO_ERROR_DISCONNECTED;
     }
 
@@ -448,7 +451,7 @@
         // in this context, a DEAD_OBJECT is more likely to be a disconnect notification due to
         // AudioTrack invalidation
         if (bytesWritten == DEAD_OBJECT) {
-            setState(AAUDIO_STREAM_STATE_DISCONNECTED);
+            setDisconnected();
             return AAUDIO_ERROR_DISCONNECTED;
         }
         return AAudioConvert_androidToAAudioResult(bytesWritten);
@@ -553,6 +556,16 @@
     return status;
 }
 
+void AudioStreamTrack::registerPlayerBase() {
+    AudioStream::registerPlayerBase();
+
+    if (mAudioTrack == nullptr) {
+        ALOGW("%s: cannot set piid, AudioTrack is null", __func__);
+        return;
+    }
+    mAudioTrack->setPlayerIId(mPlayerBase->getPlayerIId());
+}
+
 #if AAUDIO_USE_VOLUME_SHAPER
 
 using namespace android::media::VolumeShaper;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 0f4d72b..05609c4 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -79,7 +79,7 @@
         return AAUDIO_DIRECTION_OUTPUT;
     }
 
-    aaudio_result_t updateStateMachine() override;
+    aaudio_result_t processCommands() override;
 
     int64_t incrementClientFrameCounter(int32_t frames) override {
         return incrementFramesWritten(frames);
@@ -87,6 +87,8 @@
 
     android::status_t doSetVolume() override;
 
+    void registerPlayerBase() override;
+
 #if AAUDIO_USE_VOLUME_SHAPER
     virtual android::binder::Status applyVolumeShaper(
             const android::media::VolumeShaper::Configuration& configuration,
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index f45b816..e28dcb4 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -67,6 +67,9 @@
     AAudioStream_getChannelMask;  # introduced=32
     AAudioStream_getSpatializationBehavior;  # introduced=32
     AAudioStream_isContentSpatialized;       # introduced=32
+    AAudioStream_getHardwareChannelCount; # introduced=UpsideDownCake
+    AAudioStream_getHardwareFormat;       # introduced=UpsideDownCake
+    AAudioStream_getHardwareSampleRate;   # introduced=UpsideDownCake
   local:
     *;
 };
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 872faca..e8324a8 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -16,24 +16,28 @@
 
 #define LOG_TAG "AAudio"
 //#define LOG_NDEBUG 0
-#include <utils/Log.h>
 
-#include <cutils/properties.h>
+#include <assert.h>
+#include <math.h>
 #include <stdint.h>
+
+#include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
+#include <cutils/properties.h>
 #include <sys/types.h>
+#include <system/audio.h>
 #include <utils/Errors.h>
+#include <utils/Log.h>
 
 #include "aaudio/AAudio.h"
 #include "core/AudioGlobal.h"
-#include <aaudio/AAudioTesting.h>
-#include <math.h>
-#include <system/audio.h>
-#include <assert.h>
-
 #include "utility/AAudioUtilities.h"
 
 using namespace android;
 
+using android::media::audio::common::AudioMMapPolicy;
+using android::media::audio::common::AudioMMapPolicyInfo;
+
 status_t AAudioConvert_aaudioToAndroidStatus(aaudio_result_t result) {
     // This covers the case for AAUDIO_OK and for positive results.
     if (result >= 0) {
@@ -140,6 +144,9 @@
     case AAUDIO_FORMAT_PCM_I32:
         androidFormat = AUDIO_FORMAT_PCM_32_BIT;
         break;
+    case AAUDIO_FORMAT_IEC61937:
+        androidFormat = AUDIO_FORMAT_IEC61937;
+        break;
     default:
         androidFormat = AUDIO_FORMAT_INVALID;
         ALOGE("%s() 0x%08X unrecognized", __func__, aaudioFormat);
@@ -166,6 +173,9 @@
     case AUDIO_FORMAT_PCM_32_BIT:
         aaudioFormat = AAUDIO_FORMAT_PCM_I32;
         break;
+    case AUDIO_FORMAT_IEC61937:
+        aaudioFormat = AAUDIO_FORMAT_IEC61937;
+        break;
     default:
         aaudioFormat = AAUDIO_FORMAT_INVALID;
         ALOGE("%s() 0x%08X unrecognized", __func__, androidFormat);
@@ -174,6 +184,17 @@
     return aaudioFormat;
 }
 
+aaudio_format_t AAudioConvert_androidToNearestAAudioDataFormat(audio_format_t androidFormat) {
+    // Special case AUDIO_FORMAT_PCM_8_24_BIT because this function should be used to find the
+    // resolution of the data format. Setting AUDIO_FORMAT_PCM_8_24_BIT directly is not available
+    // from AAudio but hardware may use AUDIO_FORMAT_PCM_8_24_BIT under the hood.
+    if (androidFormat == AUDIO_FORMAT_PCM_8_24_BIT) {
+        ALOGD("%s() converting 8.24 to 24 bit packed", __func__);
+        return AAUDIO_FORMAT_PCM_I24_PACKED;
+    }
+    return AAudioConvert_androidToAAudioDataFormat(androidFormat);
+}
+
 // Make a message string from the condition.
 #define STATIC_ASSERT(condition) static_assert(condition, #condition)
 
@@ -234,10 +255,11 @@
     return (audio_source_t) preset; // same value
 }
 
-audio_flags_mask_t AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+audio_flags_mask_t AAudio_computeAudioFlagsMask(
         aaudio_allowed_capture_policy_t policy,
         aaudio_spatialization_behavior_t spatializationBehavior,
-        bool isContentSpatialized) {
+        bool isContentSpatialized,
+        audio_output_flags_t outputFlags) {
     audio_flags_mask_t flagsMask = AUDIO_FLAG_NONE;
     switch (policy) {
         case AAUDIO_UNSPECIFIED:
@@ -274,6 +296,15 @@
         flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_CONTENT_SPATIALIZED);
     }
 
+    if ((outputFlags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0) {
+        flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_HW_AV_SYNC);
+    }
+    if ((outputFlags & AUDIO_OUTPUT_FLAG_FAST) != 0) {
+        flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_LOW_LATENCY);
+    } else if ((outputFlags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) {
+        flagsMask = static_cast<audio_flags_mask_t>(flagsMask | AUDIO_FLAG_DEEP_BUFFER);
+    }
+
     return flagsMask;
 }
 
@@ -632,3 +663,31 @@
     }
     return result;
 }
+
+namespace {
+
+aaudio_policy_t aidl2legacy_aaudio_policy(AudioMMapPolicy aidl) {
+    switch (aidl) {
+        case AudioMMapPolicy::NEVER:
+            return AAUDIO_POLICY_NEVER;
+        case AudioMMapPolicy::AUTO:
+            return AAUDIO_POLICY_AUTO;
+        case AudioMMapPolicy::ALWAYS:
+            return AAUDIO_POLICY_ALWAYS;
+        case AudioMMapPolicy::UNSPECIFIED:
+        default:
+            return AAUDIO_UNSPECIFIED;
+    }
+}
+
+} // namespace
+
+aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos) {
+    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
+    for (size_t i = 1; i < policyInfos.size(); ++i) {
+        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
+            return AAUDIO_POLICY_AUTO;
+        }
+    }
+    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
+}
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index b59ce1c..d44bbab 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -19,14 +19,17 @@
 
 #include <algorithm>
 #include <functional>
+#include <vector>
 #include <stdint.h>
 #include <sys/types.h>
 #include <unistd.h>
 
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <utils/Errors.h>
 #include <system/audio.h>
 
 #include "aaudio/AAudio.h"
+#include "aaudio/AAudioTesting.h"
 
 /**
  * Convert an AAudio result into the closest matching Android status.
@@ -62,6 +65,7 @@
 
 aaudio_format_t AAudioConvert_androidToAAudioDataFormat(audio_format_t format);
 
+aaudio_format_t AAudioConvert_androidToNearestAAudioDataFormat(audio_format_t format);
 
 /**
  * Note that this function does not validate the passed in value.
@@ -90,10 +94,11 @@
  * That is done somewhere else.
  * @return internal audio flags mask
  */
-audio_flags_mask_t AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+audio_flags_mask_t AAudio_computeAudioFlagsMask(
         aaudio_allowed_capture_policy_t policy,
         aaudio_spatialization_behavior_t spatializationBehavior,
-        bool isContentSpatialized);
+        bool isContentSpatialized,
+        audio_output_flags_t outputFlags);
 
 audio_flags_mask_t AAudioConvert_privacySensitiveToAudioFlagsMask(
         bool privacySensitive);
@@ -343,4 +348,9 @@
     AAUDIO_CHANNEL_INDEX_MASK_24 = AAUDIO_CHANNEL_BIT_INDEX | (1 << 24) - 1,
 };
 
+// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
+// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
+aaudio_policy_t AAudio_getAAudioPolicy(
+        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos);
+
 #endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 4b45909..24041bc 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -214,3 +214,26 @@
     srcs: ["test_disconnect_race.cpp"],
     shared_libs: ["libaaudio"],
 }
+
+cc_test {
+    name: "aaudio_test_mmap_path",
+    defaults: [
+        "libaaudio_tests_defaults",
+    ],
+    srcs: ["test_mmap_path.cpp"],
+    shared_libs: [
+        "libaaudio",
+        "libaaudio_internal",
+        "libaudioclient",
+        "liblog",
+    ],
+}
+
+cc_test {
+    name: "test_resampler",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_resampler.cpp"],
+    shared_libs: [
+        "libaaudio_internal",
+    ],
+}
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index b88d562..e5676a7 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -20,6 +20,7 @@
 // "test_aaudio_attributes.cpp". That other file is more current.
 // So these tests could be deleted.
 
+#include <memory>
 #include <stdio.h>
 #include <unistd.h>
 
@@ -40,7 +41,7 @@
                             int privacyMode = DONT_SET,
                             aaudio_direction_t direction = AAUDIO_DIRECTION_OUTPUT) {
 
-    float *buffer = new float[kNumFrames * kChannelCount];
+    std::unique_ptr<float[]> buffer(new float[kNumFrames * kChannelCount]);
 
     AAudioStreamBuilder *aaudioBuilder = nullptr;
     AAudioStream *aaudioStream = nullptr;
@@ -109,16 +110,15 @@
 
     if (direction == AAUDIO_DIRECTION_INPUT) {
         EXPECT_EQ(kNumFrames,
-                  AAudioStream_read(aaudioStream, buffer, kNumFrames, kNanosPerSecond));
+                  AAudioStream_read(aaudioStream, buffer.get(), kNumFrames, kNanosPerSecond));
     } else {
         EXPECT_EQ(kNumFrames,
-                  AAudioStream_write(aaudioStream, buffer, kNumFrames, kNanosPerSecond));
+                  AAudioStream_write(aaudioStream, buffer.get(), kNumFrames, kNanosPerSecond));
     }
 
     EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream));
 
     EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
-    delete[] buffer;
 }
 
 static const aaudio_usage_t sUsages[] = {
diff --git a/media/libaaudio/tests/test_clock_model.cpp b/media/libaaudio/tests/test_clock_model.cpp
index 7f7abbd..e455768 100644
--- a/media/libaaudio/tests/test_clock_model.cpp
+++ b/media/libaaudio/tests/test_clock_model.cpp
@@ -30,7 +30,8 @@
 // We can use arbitrary values here because we are not opening a real audio stream.
 #define SAMPLE_RATE             48000
 #define HW_FRAMES_PER_BURST     48
-#define NANOS_PER_BURST         (NANOS_PER_SECOND * HW_FRAMES_PER_BURST / SAMPLE_RATE)
+// Sometimes we need a (double) value to avoid misguided Build warnings.
+#define NANOS_PER_BURST         ((double) NANOS_PER_SECOND * HW_FRAMES_PER_BURST / SAMPLE_RATE)
 
 class ClockModelTestFixture: public ::testing::Test {
 public:
@@ -49,10 +50,20 @@
         // cleanup any pending stuff, but no exceptions allowed
     }
 
-    // Test processing of timestamps when the hardware may be slightly off from
-    // the expected sample rate.
-    void checkDriftingClock(double hardwareFramesPerSecond, int numLoops) {
+    /** Test processing of timestamps when the hardware may be slightly off from
+     * the expected sample rate.
+     * @param hardwareFramesPerSecond  sample rate that may be slightly off
+     * @param numLoops number of iterations
+     * @param hardwarePauseTime  number of seconds to jump forward at halfway point
+     */
+    void checkDriftingClock(double hardwareFramesPerSecond,
+                            int numLoops,
+                            double hardwarePauseTime = 0.0) {
+        int checksToSkip = 0;
         const int64_t startTimeNanos = 500000000; // arbitrary
+        int64_t jumpOffsetNanos = 0;
+
+        srand48(123456); // arbitrary seed for repeatable test results
         model.start(startTimeNanos);
 
         const int64_t startPositionFrames = HW_FRAMES_PER_BURST; // hardware
@@ -64,7 +75,7 @@
         model.processTimestamp(startPositionFrames, markerTime);
         ASSERT_EQ(startPositionFrames, model.convertTimeToPosition(markerTime));
 
-        double elapsedTimeSeconds = startTimeNanos / (double) NANOS_PER_SECOND;
+        double elapsedTimeSeconds = 0.0;
         for (int i = 0; i < numLoops; i++) {
             // Calculate random delay over several bursts.
             const double timeDelaySeconds = 10.0 * drand48() * NANOS_PER_BURST / NANOS_PER_SECOND;
@@ -75,12 +86,37 @@
             const int64_t currentTimeFrames = startPositionFrames +
                                         (int64_t)(hardwareFramesPerSecond * elapsedTimeSeconds);
             const int64_t numBursts = currentTimeFrames / HW_FRAMES_PER_BURST;
-            const int64_t alignedPosition = startPositionFrames + (numBursts * HW_FRAMES_PER_BURST);
+            const int64_t hardwarePosition = startPositionFrames
+                    + (numBursts * HW_FRAMES_PER_BURST);
 
-            // Apply drifting timestamp.
-            model.processTimestamp(alignedPosition, currentTimeNanos);
+            // Simulate a pause in the DSP where the position freezes for a length of time.
+            if (i == numLoops / 2) {
+                jumpOffsetNanos = (int64_t)(hardwarePauseTime * NANOS_PER_SECOND);
+                checksToSkip = 5; // Give the model some time to catch up.
+            }
 
-            ASSERT_EQ(alignedPosition, model.convertTimeToPosition(currentTimeNanos));
+            // Apply drifting timestamp. Add a random time to simulate the
+            // random sampling of the clock that occurs when polling the DSP clock.
+            int64_t sampledTimeNanos = (int64_t) (currentTimeNanos
+                    + jumpOffsetNanos
+                    + (drand48() * NANOS_PER_BURST));
+            model.processTimestamp(hardwarePosition, sampledTimeNanos);
+
+            if (checksToSkip > 0) {
+                checksToSkip--;
+            } else {
+                // When the model is drifting it may be pushed forward or backward.
+                const int64_t modelPosition = model.convertTimeToPosition(sampledTimeNanos);
+                if (hardwareFramesPerSecond >= SAMPLE_RATE) { // fast hardware
+                    ASSERT_LE(hardwarePosition - HW_FRAMES_PER_BURST, modelPosition);
+                    ASSERT_GE(hardwarePosition + HW_FRAMES_PER_BURST, modelPosition);
+                } else {
+                    // Slow hardware. If this fails then the model may be drifting
+                    // forward in time too slowly. Increase kDriftNanos.
+                    ASSERT_LE(hardwarePosition, modelPosition);
+                    ASSERT_GE(hardwarePosition + (2 * HW_FRAMES_PER_BURST), modelPosition);
+                }
+            }
         }
     }
 
@@ -144,23 +180,31 @@
     EXPECT_EQ(position, model.convertTimeToPosition(markerTime + (73 * NANOS_PER_MICROSECOND)));
 
     // convertPositionToTime rounds up
-    EXPECT_EQ(markerTime + NANOS_PER_BURST, model.convertPositionToTime(position + 17));
+    EXPECT_EQ(markerTime + (int64_t)NANOS_PER_BURST, model.convertPositionToTime(position + 17));
 }
 
-#define NUM_LOOPS_DRIFT   10000
+#define NUM_LOOPS_DRIFT   200000
 
-// test nudging the window by using a drifting HW clock
 TEST_F(ClockModelTestFixture, clock_no_drift) {
     checkDriftingClock(SAMPLE_RATE, NUM_LOOPS_DRIFT);
 }
 
-// These slow drift rates caused errors when I disabled the code that handles
-// drifting in the clock model. So I think the test is valid.
+// Test drifting hardware clocks.
 // It is unlikely that real hardware would be off by more than this amount.
+
+// Test a slow clock. This will cause the times to be later than expected.
+// This will push the clock model window forward and cause it to drift.
 TEST_F(ClockModelTestFixture, clock_slow_drift) {
-    checkDriftingClock(0.998 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
+    checkDriftingClock(0.99998 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
 }
 
+// Test a fast hardware clock. This will cause the times to be earlier
+// than expected. This will cause the clock model to jump backwards quickly.
 TEST_F(ClockModelTestFixture, clock_fast_drift) {
-    checkDriftingClock(1.002 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
-}
\ No newline at end of file
+    checkDriftingClock(1.00002 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
+}
+
+// Simulate a pause in the DSP, which can occur if the DSP reroutes the audio.
+TEST_F(ClockModelTestFixture, clock_jump_forward_500) {
+    checkDriftingClock(SAMPLE_RATE, NUM_LOOPS_DRIFT, 0.500);
+}
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 66b77eb..6f75f5a 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -26,6 +26,7 @@
 #include <gtest/gtest.h>
 
 #include "flowgraph/ClipToRange.h"
+#include "flowgraph/Limiter.h"
 #include "flowgraph/MonoBlend.h"
 #include "flowgraph/MonoToMultiConverter.h"
 #include "flowgraph/SourceFloat.h"
@@ -319,3 +320,77 @@
     }
 }
 
+TEST(test_flowgraph, module_limiter) {
+    constexpr int kNumSamples = 101;
+    constexpr float kLastSample = 3.0f;
+    constexpr float kFirstSample = -kLastSample;
+    constexpr float kDeltaBetweenSamples = (kLastSample - kFirstSample) / (kNumSamples - 1);
+    constexpr float kTolerance = 0.00001f;
+
+    float input[kNumSamples];
+    float output[kNumSamples];
+    SourceFloat sourceFloat{1};
+    Limiter limiter{1};
+    SinkFloat sinkFloat{1};
+
+    for (int i = 0; i < kNumSamples; i++) {
+        input[i] = kFirstSample + i * kDeltaBetweenSamples;
+    }
+
+    const int numInputFrames = std::size(input);
+    sourceFloat.setData(input, numInputFrames);
+
+    sourceFloat.output.connect(&limiter.input);
+    limiter.output.connect(&sinkFloat.input);
+
+    const int numOutputFrames = std::size(output);
+    int32_t numRead = sinkFloat.read(output, numOutputFrames);
+    ASSERT_EQ(numInputFrames, numRead);
+
+    for (int i = 0; i < numRead; i++) {
+        // limiter must be symmetric wrt 0.
+        EXPECT_NEAR(output[i], -output[kNumSamples - i - 1], kTolerance);
+        if (i > 0) {
+            EXPECT_GE(output[i], output[i - 1]); // limiter must be monotonic
+        }
+        if (input[i] == 0.f) {
+            EXPECT_EQ(0.f, output[i]);
+        } else if (input[i] > 0.0f) {
+            EXPECT_GE(output[i], 0.0f);
+            EXPECT_LE(output[i], M_SQRT2); // limiter actually limits
+            EXPECT_LE(output[i], input[i]); // a limiter, gain <= 1
+        } else {
+            EXPECT_LE(output[i], 0.0f);
+            EXPECT_GE(output[i], -M_SQRT2); // limiter actually limits
+            EXPECT_GE(output[i], input[i]); // a limiter, gain <= 1
+        }
+        if (-1.f <= input[i] && input[i] <= 1.f) {
+            EXPECT_EQ(input[i], output[i]);
+        }
+    }
+}
+
+TEST(test_flowgraph, module_limiter_nan) {
+    constexpr int kArbitraryOutputSize = 100;
+    static const float input[] = {NAN, 0.5f, NAN, NAN, -10.0f, NAN};
+    static const float expected[] = {0.0f, 0.5f, 0.5f, 0.5f, -M_SQRT2, -M_SQRT2};
+    constexpr float tolerance = 0.00001f;
+    float output[kArbitraryOutputSize];
+    SourceFloat sourceFloat{1};
+    Limiter limiter{1};
+    SinkFloat sinkFloat{1};
+
+    const int numInputFrames = std::size(input);
+    sourceFloat.setData(input, numInputFrames);
+
+    sourceFloat.output.connect(&limiter.input);
+    limiter.output.connect(&sinkFloat.input);
+
+    const int numOutputFrames = std::size(output);
+    int32_t numRead = sinkFloat.read(output, numOutputFrames);
+    ASSERT_EQ(numInputFrames, numRead);
+
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_NEAR(expected[i], output[i], tolerance);
+    }
+}
diff --git a/media/libaaudio/tests/test_marshalling.cpp b/media/libaaudio/tests/test_marshalling.cpp
index 49213dc..dfb1620 100644
--- a/media/libaaudio/tests/test_marshalling.cpp
+++ b/media/libaaudio/tests/test_marshalling.cpp
@@ -109,7 +109,7 @@
     sharedMemories[0].setup(fd, memSizeBytes);
     int32_t regionOffset1 = 32;
     int32_t regionSize1 = 16;
-    sharedRegionA.setup(0, regionOffset1, regionSize1);
+    sharedRegionA.setup({0, regionOffset1, regionSize1});
 
     void *region1;
     EXPECT_EQ(AAUDIO_OK, sharedRegionA.resolve(sharedMemories, &region1));
diff --git a/media/libaaudio/tests/test_mmap_path.cpp b/media/libaaudio/tests/test_mmap_path.cpp
new file mode 100644
index 0000000..c8376f6
--- /dev/null
+++ b/media/libaaudio/tests/test_mmap_path.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "test_mmap_path"
+
+#include <vector>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
+#include <android/log.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <media/AudioSystem.h>
+
+#include <gtest/gtest.h>
+
+#include "utility/AAudioUtilities.h"
+
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
+
+/**
+ * Open a stream via AAudio API and set the performance mode as LOW_LATENCY. When MMAP is supported,
+ * the stream is supposed to be on MMAP path instead of legacy path. This is guaranteed on pixel
+ * devices, but may not be guaranteed on other vendor devices.
+ * @param direction the direction for the stream
+ */
+static void openStreamAndVerify(aaudio_direction_t direction) {
+    std::vector<AudioMMapPolicyInfo> policyInfos;
+    ASSERT_EQ(android::NO_ERROR, android::AudioSystem::getMmapPolicyInfo(
+            AudioMMapPolicyType::DEFAULT, &policyInfos));
+    if (AAudio_getAAudioPolicy(policyInfos) == AAUDIO_POLICY_NEVER) {
+        // Query the system MMAP policy, if it is NEVER, it indicates there is no MMAP support.
+        // In that case, there is no need to run the test. The reason of adding the query is to
+        // avoid someone accidentally run the test on device that doesn't support MMAP,
+        // such as cuttlefish.
+        ALOGD("Skip test as mmap is not supported");
+        return;
+    }
+
+    AAudioStreamBuilder *aaudioBuilder = nullptr;
+    AAudioStream *aaudioStream = nullptr;
+
+    ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+
+    AAudioStreamBuilder_setDirection(aaudioBuilder, direction);
+    AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+
+    EXPECT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+    EXPECT_EQ(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, AAudioStream_getPerformanceMode(aaudioStream));
+    EXPECT_TRUE(AAudioStream_isMMapUsed(aaudioStream));
+
+    AAudioStream_close(aaudioStream);
+    AAudioStreamBuilder_delete(aaudioBuilder);
+}
+
+TEST(test_mmap_path, input) {
+    openStreamAndVerify(AAUDIO_DIRECTION_INPUT);
+}
+
+TEST(test_mmap_path, output) {
+    openStreamAndVerify(AAUDIO_DIRECTION_OUTPUT);
+}
diff --git a/media/libaaudio/tests/test_recovery.cpp b/media/libaaudio/tests/test_recovery.cpp
index 6e89f83..11331af 100644
--- a/media/libaaudio/tests/test_recovery.cpp
+++ b/media/libaaudio/tests/test_recovery.cpp
@@ -16,6 +16,7 @@
 
 // Play silence and recover from dead servers or disconnected devices.
 
+#include <memory>
 #include <stdio.h>
 
 #include <aaudio/AAudio.h>
@@ -32,7 +33,6 @@
     int32_t triesLeft = 3;
     int32_t bufferCapacity;
     int32_t framesPerBurst = 0;
-    float *buffer = nullptr;
 
     int32_t actualChannelCount = 0;
     int32_t actualSampleRate = 0;
@@ -83,7 +83,7 @@
         bufferCapacity, framesPerBurst);
 
         int samplesPerBurst = framesPerBurst * actualChannelCount;
-        buffer = new float[samplesPerBurst];
+        std::unique_ptr<float[]> buffer(new float[samplesPerBurst]);
 
         result = AAudioStream_requestStart(aaudioStream);
         if (result != AAUDIO_OK) {
@@ -98,7 +98,7 @@
         int64_t printAt = actualSampleRate;
         while (result == AAUDIO_OK && framesTotal < framesMax) {
             int32_t framesWritten = AAudioStream_write(aaudioStream,
-                                                       buffer, framesPerBurst,
+                                                       buffer.get(), framesPerBurst,
                                                        DEFAULT_TIMEOUT_NANOS);
             if (framesWritten < 0) {
                 result = framesWritten;
@@ -134,6 +134,5 @@
         AAudioStream_close(aaudioStream);
     }
     AAudioStreamBuilder_delete(aaudioBuilder);
-    delete[] buffer;
     printf("          result = %d = %s\n", result, AAudio_convertResultToText(result));
 }
diff --git a/media/libaaudio/tests/test_resampler.cpp b/media/libaaudio/tests/test_resampler.cpp
new file mode 100644
index 0000000..1e4f59c
--- /dev/null
+++ b/media/libaaudio/tests/test_resampler.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Test FlowGraph
+ *
+ * This file also tests a few different conversion techniques because
+ * sometimes that have caused compiler bugs.
+ */
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+#include "flowgraph/resampler/MultiChannelResampler.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+// Measure zero crossings.
+static int32_t countZeroCrossingsWithHysteresis(float *input, int32_t numSamples) {
+    const float kHysteresisLevel = 0.25f;
+    int zeroCrossingCount = 0;
+    int state = 0; // can be -1, 0, +1
+    for (int i = 0; i < numSamples; i++) {
+        if (input[i] >= kHysteresisLevel) {
+            if (state < 0) {
+                zeroCrossingCount++;
+            }
+            state = 1;
+        } else if (input[i] <= -kHysteresisLevel) {
+            if (state > 0) {
+                zeroCrossingCount++;
+            }
+            state = -1;
+        }
+    }
+    return zeroCrossingCount;
+}
+
+static constexpr int kChannelCount = 1;
+
+/**
+ * Convert a sine wave and then look for glitches.
+ * Glitches have a high value in the second derivative.
+ */
+static void checkResampler(int32_t sourceRate, int32_t sinkRate,
+        MultiChannelResampler::Quality quality) {
+    const int kNumOutputSamples = 10000;
+    const double framesPerCycle = 81.379; // target output period
+
+    int numInputSamples = kNumOutputSamples * sourceRate / sinkRate;
+
+    std::unique_ptr<float[]>  inputBuffer = std::make_unique<float[]>(numInputSamples);
+    std::unique_ptr<float[]>  outputBuffer = std::make_unique<float[]>(kNumOutputSamples);
+
+    // Generate a sine wave for input.
+    const double kPhaseIncrement = 2.0 * sinkRate / (framesPerCycle * sourceRate);
+    double phase = 0.0;
+    for (int i = 0; i < numInputSamples; i++) {
+        inputBuffer[i] = sin(phase * M_PI);
+        phase += kPhaseIncrement;
+        while (phase > 1.0) {
+            phase -= 2.0;
+        }
+    }
+    int sourceZeroCrossingCount = countZeroCrossingsWithHysteresis(
+            inputBuffer.get(), numInputSamples);
+
+    // Use a MultiChannelResampler to convert from the sourceRate to the sinkRate.
+    std::unique_ptr<MultiChannelResampler>  mcResampler;
+    mcResampler.reset(MultiChannelResampler::make(kChannelCount,
+                                                 sourceRate,
+                                                 sinkRate,
+                                                 quality));
+    int inputFramesLeft = numInputSamples;
+    int numRead = 0;
+    float *input = inputBuffer.get(); // for iteration
+    float *output = outputBuffer.get();
+    while (inputFramesLeft > 0) {
+        if (mcResampler->isWriteNeeded()) {
+            mcResampler->writeNextFrame(input);
+            input++;
+            inputFramesLeft--;
+        } else {
+            mcResampler->readNextFrame(output);
+            output++;
+            numRead++;
+        }
+    }
+
+    ASSERT_LE(numRead, kNumOutputSamples);
+    // Some frames are lost priming the FIR filter.
+    const int kMaxAlgorithmicFrameLoss = 16;
+    EXPECT_GT(numRead, kNumOutputSamples - kMaxAlgorithmicFrameLoss);
+
+    int sinkZeroCrossingCount = countZeroCrossingsWithHysteresis(outputBuffer.get(), numRead);
+    // Some cycles may get chopped off at the end.
+    const int kMaxZeroCrossingDelta = 3;
+    EXPECT_LE(abs(sourceZeroCrossingCount - sinkZeroCrossingCount), kMaxZeroCrossingDelta);
+
+    // Detect glitches by looking for spikes in the second derivative.
+    output = outputBuffer.get();
+    float previousValue = output[0];
+    float previousSlope = output[1] - output[0];
+    for (int i = 0; i < numRead; i++) {
+        float slope = output[i] - previousValue;
+        float slopeDelta = fabs(slope - previousSlope);
+        // Skip a few samples because there are often some steep slope changes at the beginning.
+        if (i > 10) {
+            EXPECT_LT(slopeDelta, 0.1);
+        }
+        previousValue = output[i];
+        previousSlope = slope;
+    }
+
+#if 0
+    // Save to disk for inspection.
+    FILE *fp = fopen( "/sdcard/Download/src_float_out.raw" , "wb" );
+    fwrite(outputBuffer.get(), sizeof(float), numRead, fp );
+    fclose(fp);
+#endif
+}
+
+
+TEST(test_resampler, resampler_scan_all) {
+    // TODO Add 64000, 88200, 96000 when they work. Failing now.
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000};
+    const MultiChannelResampler::Quality qualities[] =
+    {
+        MultiChannelResampler::Quality::Fastest,
+        MultiChannelResampler::Quality::Low,
+        MultiChannelResampler::Quality::Medium,
+        MultiChannelResampler::Quality::High,
+        MultiChannelResampler::Quality::Best
+    };
+    for (int srcRate : rates) {
+        for (int destRate : rates) {
+            for (auto quality : qualities) {
+                if (srcRate != destRate) {
+                    checkResampler(srcRate, destRate, quality);
+                }
+            }
+        }
+    }
+}
+
+TEST(test_resampler, resampler_8000_11025_best) {
+    checkResampler(8000, 11025, MultiChannelResampler::Quality::Best);
+}
+TEST(test_resampler, resampler_8000_48000_best) {
+    checkResampler(8000, 48000, MultiChannelResampler::Quality::Best);
+}
+
+TEST(test_resampler, resampler_8000_44100_best) {
+    checkResampler(8000, 44100, MultiChannelResampler::Quality::Best);
+}
+
+TEST(test_resampler, resampler_11025_24000_best) {
+    checkResampler(11025, 24000, MultiChannelResampler::Quality::Best);
+}
+
+TEST(test_resampler, resampler_11025_48000_fastest) {
+    checkResampler(11025, 48000, MultiChannelResampler::Quality::Fastest);
+}
+TEST(test_resampler, resampler_11025_48000_low) {
+    checkResampler(11025, 48000, MultiChannelResampler::Quality::Low);
+}
+TEST(test_resampler, resampler_11025_48000_medium) {
+    checkResampler(11025, 48000, MultiChannelResampler::Quality::Medium);
+}
+TEST(test_resampler, resampler_11025_48000_high) {
+    checkResampler(11025, 48000, MultiChannelResampler::Quality::High);
+}
+
+TEST(test_resampler, resampler_11025_48000_best) {
+    checkResampler(11025, 48000, MultiChannelResampler::Quality::Best);
+}
+
+TEST(test_resampler, resampler_11025_44100_best) {
+    checkResampler(11025, 44100, MultiChannelResampler::Quality::Best);
+}
+
+// TODO This fails because the output is very low.
+//TEST(test_resampler, resampler_11025_88200_best) {
+//    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
+//}
+
+TEST(test_resampler, resampler_16000_48000_best) {
+    checkResampler(16000, 48000, MultiChannelResampler::Quality::Best);
+}
+
+TEST(test_resampler, resampler_44100_48000_low) {
+    checkResampler(44100, 48000, MultiChannelResampler::Quality::Low);
+}
+TEST(test_resampler, resampler_44100_48000_best) {
+    checkResampler(44100, 48000, MultiChannelResampler::Quality::Best);
+}
+
+// Look for glitches when downsampling.
+TEST(test_resampler, resampler_48000_11025_best) {
+    checkResampler(48000, 11025, MultiChannelResampler::Quality::Best);
+}
+TEST(test_resampler, resampler_48000_44100_best) {
+    checkResampler(48000, 44100, MultiChannelResampler::Quality::Best);
+}
+TEST(test_resampler, resampler_44100_11025_best) {
+    checkResampler(44100, 11025, MultiChannelResampler::Quality::Best);
+}
diff --git a/media/libaaudio/tests/test_session_id.cpp b/media/libaaudio/tests/test_session_id.cpp
index 3f7d4fc..5968b5d 100644
--- a/media/libaaudio/tests/test_session_id.cpp
+++ b/media/libaaudio/tests/test_session_id.cpp
@@ -16,6 +16,7 @@
 
 // Test AAudio SessionId, which is used to associate Effects with a stream
 
+#include <memory>
 #include <stdio.h>
 #include <unistd.h>
 
@@ -29,7 +30,7 @@
 // Test AAUDIO_SESSION_ID_NONE default
 static void checkSessionIdNone(aaudio_performance_mode_t perfMode) {
 
-    float *buffer = new float[kNumFrames * kChannelCount];
+    std::unique_ptr<float[]> buffer(new float[kNumFrames * kChannelCount]);
 
     AAudioStreamBuilder *aaudioBuilder = nullptr;
 
@@ -51,12 +52,12 @@
 
     ASSERT_EQ(AAUDIO_OK, AAudioStream_requestStart(aaudioStream1));
 
-    ASSERT_EQ(kNumFrames, AAudioStream_write(aaudioStream1, buffer, kNumFrames, kNanosPerSecond));
+    ASSERT_EQ(kNumFrames,
+              AAudioStream_write(aaudioStream1, buffer.get(), kNumFrames, kNanosPerSecond));
 
     EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream1));
 
     EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream1));
-    delete[] buffer;
     AAudioStreamBuilder_delete(aaudioBuilder);
 }
 
@@ -72,7 +73,7 @@
 static void checkSessionIdAllocate(aaudio_performance_mode_t perfMode,
                                    aaudio_direction_t direction) {
 
-    float *buffer = new float[kNumFrames * kChannelCount];
+    std::unique_ptr<float[]> buffer(new float[kNumFrames * kChannelCount]);
 
     AAudioStreamBuilder *aaudioBuilder = nullptr;
 
@@ -106,10 +107,10 @@
 
     if (direction == AAUDIO_DIRECTION_INPUT) {
         ASSERT_EQ(kNumFrames, AAudioStream_read(aaudioStream1,
-                                                buffer, kNumFrames, kNanosPerSecond));
+                                                buffer.get(), kNumFrames, kNanosPerSecond));
     } else {
         ASSERT_EQ(kNumFrames, AAudioStream_write(aaudioStream1,
-                                         buffer, kNumFrames, kNanosPerSecond));
+                                         buffer.get(), kNumFrames, kNanosPerSecond));
     }
 
     EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream1));
@@ -135,10 +136,10 @@
 
     if (otherDirection == AAUDIO_DIRECTION_INPUT) {
         ASSERT_EQ(kNumFrames, AAudioStream_read(aaudioStream2,
-                                                 buffer, kNumFrames, kNanosPerSecond));
+                                                 buffer.get(), kNumFrames, kNanosPerSecond));
     } else {
         ASSERT_EQ(kNumFrames, AAudioStream_write(aaudioStream2,
-                                                 buffer, kNumFrames, kNanosPerSecond));
+                                                 buffer.get(), kNumFrames, kNanosPerSecond));
     }
 
     EXPECT_EQ(AAUDIO_OK, AAudioStream_requestStop(aaudioStream2));
@@ -147,7 +148,6 @@
 
 
     EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream1));
-    delete[] buffer;
     AAudioStreamBuilder_delete(aaudioBuilder);
 }
 
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index 11724e0..b32667e 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -42,9 +42,6 @@
 using media::audio::common::AudioDeviceAddress;
 using media::audio::common::AudioDeviceDescription;
 using media::audio::common::AudioDeviceType;
-using media::audio::common::AudioEncapsulationMetadataType;
-using media::audio::common::AudioEncapsulationMode;
-using media::audio::common::AudioEncapsulationType;
 using media::audio::common::AudioFormatDescription;
 using media::audio::common::AudioFormatType;
 using media::audio::common::AudioGain;
@@ -69,246 +66,6 @@
 using media::audio::common::Int;
 using media::audio::common::PcmType;
 
-namespace {
-
-enum class Direction {
-    INPUT, OUTPUT
-};
-
-ConversionResult<Direction> direction(media::AudioPortRole role, media::AudioPortType type) {
-    switch (type) {
-        case media::AudioPortType::NONE:
-        case media::AudioPortType::SESSION:
-            break;  // must be listed  -Werror,-Wswitch
-        case media::AudioPortType::DEVICE:
-            switch (role) {
-                case media::AudioPortRole::NONE:
-                     break;  // must be listed  -Werror,-Wswitch
-                case media::AudioPortRole::SOURCE:
-                    return Direction::INPUT;
-                case media::AudioPortRole::SINK:
-                    return Direction::OUTPUT;
-            }
-            break;
-        case media::AudioPortType::MIX:
-            switch (role) {
-                case media::AudioPortRole::NONE:
-                     break;  // must be listed  -Werror,-Wswitch
-                case media::AudioPortRole::SOURCE:
-                    return Direction::OUTPUT;
-                case media::AudioPortRole::SINK:
-                    return Direction::INPUT;
-            }
-            break;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<Direction> direction(audio_port_role_t role, audio_port_type_t type) {
-    switch (type) {
-        case AUDIO_PORT_TYPE_NONE:
-        case AUDIO_PORT_TYPE_SESSION:
-            break;  // must be listed  -Werror,-Wswitch
-        case AUDIO_PORT_TYPE_DEVICE:
-            switch (role) {
-                case AUDIO_PORT_ROLE_NONE:
-                     break;  // must be listed  -Werror,-Wswitch
-                case AUDIO_PORT_ROLE_SOURCE:
-                    return Direction::INPUT;
-                case AUDIO_PORT_ROLE_SINK:
-                    return Direction::OUTPUT;
-            }
-            break;
-        case AUDIO_PORT_TYPE_MIX:
-            switch (role) {
-                case AUDIO_PORT_ROLE_NONE:
-                     break;  // must be listed  -Werror,-Wswitch
-                case AUDIO_PORT_ROLE_SOURCE:
-                    return Direction::OUTPUT;
-                case AUDIO_PORT_ROLE_SINK:
-                    return Direction::INPUT;
-            }
-            break;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-}  // namespace
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// Converters
-
-status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize) {
-    if (aidl.size() > maxSize - 1) {
-        return BAD_VALUE;
-    }
-    aidl.copy(dest, aidl.size());
-    dest[aidl.size()] = '\0';
-    return OK;
-}
-
-ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize) {
-    if (legacy == nullptr) {
-        return unexpected(BAD_VALUE);
-    }
-    if (strnlen(legacy, maxSize) == maxSize) {
-        // No null-terminator.
-        return unexpected(BAD_VALUE);
-    }
-    return std::string(legacy);
-}
-
-ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl) {
-    return convertReinterpret<audio_module_handle_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl) {
-    return convertReinterpret<audio_io_handle_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl) {
-    return convertReinterpret<audio_port_handle_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl) {
-    return convertReinterpret<audio_patch_handle_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl) {
-    return convertReinterpret<audio_unique_id_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl) {
-    return convertReinterpret<audio_hw_sync_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl) {
-    return convertReinterpret<pid_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl) {
-    return convertReinterpret<uid_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl) {
-    return String16(aidl.data(), aidl.size());
-}
-
-ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy) {
-    return std::string(String8(legacy).c_str());
-}
-
-// TODO b/182392769: create an optional -> optional util
-ConversionResult<std::optional<String16>>
-aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl) {
-    if (!aidl.has_value()) {
-        return std::nullopt;
-    }
-    ConversionResult<String16> conversion =
-        VALUE_OR_RETURN(aidl2legacy_string_view_String16(aidl.value()));
-    return conversion.value();
-}
-
-ConversionResult<std::optional<std::string_view>>
-legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy) {
-  if (!legacy.has_value()) {
-    return std::nullopt;
-  }
-  ConversionResult<std::string> conversion =
-      VALUE_OR_RETURN(legacy2aidl_String16_string(legacy.value()));
-  return conversion.value();
-}
-
-ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl) {
-    return String8(aidl.data(), aidl.size());
-}
-
-ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy) {
-    return std::string(legacy.c_str());
-}
-
-ConversionResult<audio_io_config_event_t> aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(
-        media::AudioIoConfigEvent aidl) {
-    switch (aidl) {
-        case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
-            return AUDIO_OUTPUT_REGISTERED;
-        case media::AudioIoConfigEvent::OUTPUT_OPENED:
-            return AUDIO_OUTPUT_OPENED;
-        case media::AudioIoConfigEvent::OUTPUT_CLOSED:
-            return AUDIO_OUTPUT_CLOSED;
-        case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
-            return AUDIO_OUTPUT_CONFIG_CHANGED;
-        case media::AudioIoConfigEvent::INPUT_REGISTERED:
-            return AUDIO_INPUT_REGISTERED;
-        case media::AudioIoConfigEvent::INPUT_OPENED:
-            return AUDIO_INPUT_OPENED;
-        case media::AudioIoConfigEvent::INPUT_CLOSED:
-            return AUDIO_INPUT_CLOSED;
-        case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
-            return AUDIO_INPUT_CONFIG_CHANGED;
-        case media::AudioIoConfigEvent::CLIENT_STARTED:
-            return AUDIO_CLIENT_STARTED;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(
-        audio_io_config_event_t legacy) {
-    switch (legacy) {
-        case AUDIO_OUTPUT_REGISTERED:
-            return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
-        case AUDIO_OUTPUT_OPENED:
-            return media::AudioIoConfigEvent::OUTPUT_OPENED;
-        case AUDIO_OUTPUT_CLOSED:
-            return media::AudioIoConfigEvent::OUTPUT_CLOSED;
-        case AUDIO_OUTPUT_CONFIG_CHANGED:
-            return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
-        case AUDIO_INPUT_REGISTERED:
-            return media::AudioIoConfigEvent::INPUT_REGISTERED;
-        case AUDIO_INPUT_OPENED:
-            return media::AudioIoConfigEvent::INPUT_OPENED;
-        case AUDIO_INPUT_CLOSED:
-            return media::AudioIoConfigEvent::INPUT_CLOSED;
-        case AUDIO_INPUT_CONFIG_CHANGED:
-            return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
-        case AUDIO_CLIENT_STARTED:
-            return media::AudioIoConfigEvent::CLIENT_STARTED;
-    }
-    return unexpected(BAD_VALUE);
-}
-
 ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
         media::AudioPortRole aidl) {
     switch (aidl) {
@@ -365,1507 +122,97 @@
     return unexpected(BAD_VALUE);
 }
 
-namespace {
-
-namespace detail {
-using AudioChannelBitPair = std::pair<audio_channel_mask_t, int>;
-using AudioChannelBitPairs = std::vector<AudioChannelBitPair>;
-using AudioChannelPair = std::pair<audio_channel_mask_t, AudioChannelLayout>;
-using AudioChannelPairs = std::vector<AudioChannelPair>;
-using AudioDevicePair = std::pair<audio_devices_t, AudioDeviceDescription>;
-using AudioDevicePairs = std::vector<AudioDevicePair>;
-using AudioFormatPair = std::pair<audio_format_t, AudioFormatDescription>;
-using AudioFormatPairs = std::vector<AudioFormatPair>;
+ConversionResult<AudioPortDirection> portDirection(
+        media::AudioPortRole role, media::AudioPortType type) {
+    audio_port_role_t legacyRole = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortRole_audio_port_role_t(role));
+    audio_port_type_t legacyType = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortType_audio_port_type_t(type));
+    return portDirection(legacyRole, legacyType);
 }
 
-const detail::AudioChannelBitPairs& getInAudioChannelBits() {
-    static const detail::AudioChannelBitPairs pairs = {
-        { AUDIO_CHANNEL_IN_LEFT, AudioChannelLayout::CHANNEL_FRONT_LEFT },
-        { AUDIO_CHANNEL_IN_RIGHT, AudioChannelLayout::CHANNEL_FRONT_RIGHT },
-        // AUDIO_CHANNEL_IN_FRONT is at the end
-        { AUDIO_CHANNEL_IN_BACK, AudioChannelLayout::CHANNEL_BACK_CENTER },
-        // AUDIO_CHANNEL_IN_*_PROCESSED not supported
-        // AUDIO_CHANNEL_IN_PRESSURE not supported
-        // AUDIO_CHANNEL_IN_*_AXIS not supported
-        // AUDIO_CHANNEL_IN_VOICE_* not supported
-        { AUDIO_CHANNEL_IN_BACK_LEFT, AudioChannelLayout::CHANNEL_BACK_LEFT },
-        { AUDIO_CHANNEL_IN_BACK_RIGHT, AudioChannelLayout::CHANNEL_BACK_RIGHT },
-        { AUDIO_CHANNEL_IN_CENTER, AudioChannelLayout::CHANNEL_FRONT_CENTER },
-        { AUDIO_CHANNEL_IN_LOW_FREQUENCY, AudioChannelLayout::CHANNEL_LOW_FREQUENCY },
-        { AUDIO_CHANNEL_IN_TOP_LEFT, AudioChannelLayout::CHANNEL_TOP_SIDE_LEFT },
-        { AUDIO_CHANNEL_IN_TOP_RIGHT, AudioChannelLayout::CHANNEL_TOP_SIDE_RIGHT },
-        // When going from aidl to legacy, IN_CENTER is used
-        { AUDIO_CHANNEL_IN_FRONT, AudioChannelLayout::CHANNEL_FRONT_CENTER }
-    };
-    return pairs;
-}
-
-const detail::AudioChannelPairs& getInAudioChannelPairs() {
-    static const detail::AudioChannelPairs pairs = {
-#define DEFINE_INPUT_LAYOUT(n)                                                 \
-            {                                                                  \
-                AUDIO_CHANNEL_IN_##n,                                          \
-                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>( \
-                        AudioChannelLayout::LAYOUT_##n)                        \
-            }
-
-        DEFINE_INPUT_LAYOUT(MONO),
-        DEFINE_INPUT_LAYOUT(STEREO),
-        DEFINE_INPUT_LAYOUT(FRONT_BACK),
-        // AUDIO_CHANNEL_IN_6 not supported
-        DEFINE_INPUT_LAYOUT(2POINT0POINT2),
-        DEFINE_INPUT_LAYOUT(2POINT1POINT2),
-        DEFINE_INPUT_LAYOUT(3POINT0POINT2),
-        DEFINE_INPUT_LAYOUT(3POINT1POINT2),
-        DEFINE_INPUT_LAYOUT(5POINT1)
-#undef DEFINE_INPUT_LAYOUT
-    };
-    return pairs;
-}
-
-const detail::AudioChannelBitPairs& getOutAudioChannelBits() {
-    static const detail::AudioChannelBitPairs pairs = {
-#define DEFINE_OUTPUT_BITS(n)                                                  \
-            { AUDIO_CHANNEL_OUT_##n, AudioChannelLayout::CHANNEL_##n }
-
-        DEFINE_OUTPUT_BITS(FRONT_LEFT),
-        DEFINE_OUTPUT_BITS(FRONT_RIGHT),
-        DEFINE_OUTPUT_BITS(FRONT_CENTER),
-        DEFINE_OUTPUT_BITS(LOW_FREQUENCY),
-        DEFINE_OUTPUT_BITS(BACK_LEFT),
-        DEFINE_OUTPUT_BITS(BACK_RIGHT),
-        DEFINE_OUTPUT_BITS(FRONT_LEFT_OF_CENTER),
-        DEFINE_OUTPUT_BITS(FRONT_RIGHT_OF_CENTER),
-        DEFINE_OUTPUT_BITS(BACK_CENTER),
-        DEFINE_OUTPUT_BITS(SIDE_LEFT),
-        DEFINE_OUTPUT_BITS(SIDE_RIGHT),
-        DEFINE_OUTPUT_BITS(TOP_CENTER),
-        DEFINE_OUTPUT_BITS(TOP_FRONT_LEFT),
-        DEFINE_OUTPUT_BITS(TOP_FRONT_CENTER),
-        DEFINE_OUTPUT_BITS(TOP_FRONT_RIGHT),
-        DEFINE_OUTPUT_BITS(TOP_BACK_LEFT),
-        DEFINE_OUTPUT_BITS(TOP_BACK_CENTER),
-        DEFINE_OUTPUT_BITS(TOP_BACK_RIGHT),
-        DEFINE_OUTPUT_BITS(TOP_SIDE_LEFT),
-        DEFINE_OUTPUT_BITS(TOP_SIDE_RIGHT),
-        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_LEFT),
-        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_CENTER),
-        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_RIGHT),
-        DEFINE_OUTPUT_BITS(LOW_FREQUENCY_2),
-        DEFINE_OUTPUT_BITS(FRONT_WIDE_LEFT),
-        DEFINE_OUTPUT_BITS(FRONT_WIDE_RIGHT),
-#undef DEFINE_OUTPUT_BITS
-        { AUDIO_CHANNEL_OUT_HAPTIC_A, AudioChannelLayout::CHANNEL_HAPTIC_A },
-        { AUDIO_CHANNEL_OUT_HAPTIC_B, AudioChannelLayout::CHANNEL_HAPTIC_B }
-    };
-    return pairs;
-}
-
-const detail::AudioChannelPairs& getOutAudioChannelPairs() {
-    static const detail::AudioChannelPairs pairs = {
-#define DEFINE_OUTPUT_LAYOUT(n)                                                \
-            {                                                                  \
-                AUDIO_CHANNEL_OUT_##n,                                         \
-                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>( \
-                        AudioChannelLayout::LAYOUT_##n)                        \
-            }
-
-        DEFINE_OUTPUT_LAYOUT(MONO),
-        DEFINE_OUTPUT_LAYOUT(STEREO),
-        DEFINE_OUTPUT_LAYOUT(2POINT1),
-        DEFINE_OUTPUT_LAYOUT(TRI),
-        DEFINE_OUTPUT_LAYOUT(TRI_BACK),
-        DEFINE_OUTPUT_LAYOUT(3POINT1),
-        DEFINE_OUTPUT_LAYOUT(2POINT0POINT2),
-        DEFINE_OUTPUT_LAYOUT(2POINT1POINT2),
-        DEFINE_OUTPUT_LAYOUT(3POINT0POINT2),
-        DEFINE_OUTPUT_LAYOUT(3POINT1POINT2),
-        DEFINE_OUTPUT_LAYOUT(QUAD),
-        DEFINE_OUTPUT_LAYOUT(QUAD_SIDE),
-        DEFINE_OUTPUT_LAYOUT(SURROUND),
-        DEFINE_OUTPUT_LAYOUT(PENTA),
-        DEFINE_OUTPUT_LAYOUT(5POINT1),
-        DEFINE_OUTPUT_LAYOUT(5POINT1_SIDE),
-        DEFINE_OUTPUT_LAYOUT(5POINT1POINT2),
-        DEFINE_OUTPUT_LAYOUT(5POINT1POINT4),
-        DEFINE_OUTPUT_LAYOUT(6POINT1),
-        DEFINE_OUTPUT_LAYOUT(7POINT1),
-        DEFINE_OUTPUT_LAYOUT(7POINT1POINT2),
-        DEFINE_OUTPUT_LAYOUT(7POINT1POINT4),
-        DEFINE_OUTPUT_LAYOUT(13POINT_360RA),
-        DEFINE_OUTPUT_LAYOUT(22POINT2),
-        DEFINE_OUTPUT_LAYOUT(MONO_HAPTIC_A),
-        DEFINE_OUTPUT_LAYOUT(STEREO_HAPTIC_A),
-        DEFINE_OUTPUT_LAYOUT(HAPTIC_AB),
-        DEFINE_OUTPUT_LAYOUT(MONO_HAPTIC_AB),
-        DEFINE_OUTPUT_LAYOUT(STEREO_HAPTIC_AB)
-#undef DEFINE_OUTPUT_LAYOUT
-    };
-    return pairs;
-}
-
-const detail::AudioChannelPairs& getVoiceAudioChannelPairs() {
-    static const detail::AudioChannelPairs pairs = {
-#define DEFINE_VOICE_LAYOUT(n)                                                 \
-            {                                                                  \
-                AUDIO_CHANNEL_IN_VOICE_##n,                                    \
-                AudioChannelLayout::make<AudioChannelLayout::Tag::voiceMask>(  \
-                        AudioChannelLayout::VOICE_##n)                         \
-            }
-        DEFINE_VOICE_LAYOUT(UPLINK_MONO),
-        DEFINE_VOICE_LAYOUT(DNLINK_MONO),
-        DEFINE_VOICE_LAYOUT(CALL_MONO)
-#undef DEFINE_VOICE_LAYOUT
-    };
-    return pairs;
-}
-
-AudioDeviceDescription make_AudioDeviceDescription(AudioDeviceType type,
-        const std::string& connection = "") {
-    AudioDeviceDescription result;
-    result.type = type;
-    result.connection = connection;
-    return result;
-}
-
-void append_AudioDeviceDescription(detail::AudioDevicePairs& pairs,
-        audio_devices_t inputType, audio_devices_t outputType,
-        AudioDeviceType inType, AudioDeviceType outType,
-        const std::string& connection = "") {
-    pairs.push_back(std::make_pair(inputType, make_AudioDeviceDescription(inType, connection)));
-    pairs.push_back(std::make_pair(outputType, make_AudioDeviceDescription(outType, connection)));
-}
-
-const detail::AudioDevicePairs& getAudioDevicePairs() {
-    static const detail::AudioDevicePairs pairs = []() {
-        detail::AudioDevicePairs pairs = {{
-            {
-                AUDIO_DEVICE_NONE, AudioDeviceDescription{}
-            },
-            {
-                AUDIO_DEVICE_OUT_EARPIECE, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_SPEAKER_EARPIECE)
-            },
-            {
-                AUDIO_DEVICE_OUT_SPEAKER, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_SPEAKER)
-            },
-            {
-                AUDIO_DEVICE_OUT_WIRED_HEADPHONE, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_HEADPHONE,
-                        AudioDeviceDescription::CONNECTION_ANALOG())
-            },
-            {
-                AUDIO_DEVICE_OUT_BLUETOOTH_SCO, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_DEVICE,
-                        AudioDeviceDescription::CONNECTION_BT_SCO())
-            },
-            {
-                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_CARKIT,
-                        AudioDeviceDescription::CONNECTION_BT_SCO())
-            },
-            {
-                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_HEADPHONE,
-                        AudioDeviceDescription::CONNECTION_BT_A2DP())
-            },
-            {
-                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_SPEAKER,
-                        AudioDeviceDescription::CONNECTION_BT_A2DP())
-            },
-            {
-                AUDIO_DEVICE_OUT_TELEPHONY_TX, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_TELEPHONY_TX)
-            },
-            {
-                AUDIO_DEVICE_OUT_AUX_LINE, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_LINE_AUX)
-            },
-            {
-                AUDIO_DEVICE_OUT_SPEAKER_SAFE, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_SPEAKER_SAFE)
-            },
-            {
-                AUDIO_DEVICE_OUT_HEARING_AID, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_HEARING_AID,
-                        AudioDeviceDescription::CONNECTION_WIRELESS())
-            },
-            {
-                AUDIO_DEVICE_OUT_ECHO_CANCELLER, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_ECHO_CANCELLER)
-            },
-            {
-                AUDIO_DEVICE_OUT_BLE_SPEAKER, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_SPEAKER,
-                        AudioDeviceDescription::CONNECTION_BT_LE())
-            },
-            {
-                AUDIO_DEVICE_OUT_BLE_BROADCAST, make_AudioDeviceDescription(
-                        AudioDeviceType::OUT_BROADCAST,
-                        AudioDeviceDescription::CONNECTION_BT_LE())
-            },
-            // AUDIO_DEVICE_IN_AMBIENT and IN_COMMUNICATION are removed since they were deprecated.
-            {
-                AUDIO_DEVICE_IN_BUILTIN_MIC, make_AudioDeviceDescription(
-                        AudioDeviceType::IN_MICROPHONE)
-            },
-            {
-                AUDIO_DEVICE_IN_BACK_MIC, make_AudioDeviceDescription(
-                        AudioDeviceType::IN_MICROPHONE_BACK)
-            },
-            {
-                AUDIO_DEVICE_IN_TELEPHONY_RX, make_AudioDeviceDescription(
-                        AudioDeviceType::IN_TELEPHONY_RX)
-            },
-            {
-                AUDIO_DEVICE_IN_TV_TUNER, make_AudioDeviceDescription(
-                        AudioDeviceType::IN_TV_TUNER)
-            },
-            {
-                AUDIO_DEVICE_IN_LOOPBACK, make_AudioDeviceDescription(
-                        AudioDeviceType::IN_LOOPBACK)
-            },
-            {
-                AUDIO_DEVICE_IN_BLUETOOTH_BLE, make_AudioDeviceDescription(
-                        AudioDeviceType::IN_DEVICE,
-                        AudioDeviceDescription::CONNECTION_BT_LE())
-            },
-            {
-                AUDIO_DEVICE_IN_ECHO_REFERENCE, make_AudioDeviceDescription(
-                        AudioDeviceType::IN_ECHO_REFERENCE)
-            }
-        }};
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_DEFAULT, AUDIO_DEVICE_OUT_DEFAULT,
-                AudioDeviceType::IN_DEFAULT, AudioDeviceType::OUT_DEFAULT);
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADSET,
-                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
-                AudioDeviceDescription::CONNECTION_ANALOG());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
-                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
-                AudioDeviceDescription::CONNECTION_BT_SCO());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_HDMI, AUDIO_DEVICE_OUT_HDMI,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_HDMI());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
-                AudioDeviceType::IN_SUBMIX, AudioDeviceType::OUT_SUBMIX);
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET, AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET,
-                AudioDeviceType::IN_DOCK, AudioDeviceType::OUT_DOCK,
-                AudioDeviceDescription::CONNECTION_ANALOG());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET,
-                AudioDeviceType::IN_DOCK, AudioDeviceType::OUT_DOCK,
-                AudioDeviceDescription::CONNECTION_USB());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_USB_ACCESSORY, AUDIO_DEVICE_OUT_USB_ACCESSORY,
-                AudioDeviceType::IN_ACCESSORY, AudioDeviceType::OUT_ACCESSORY,
-                AudioDeviceDescription::CONNECTION_USB());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_OUT_USB_DEVICE,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_USB());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_FM_TUNER, AUDIO_DEVICE_OUT_FM,
-                AudioDeviceType::IN_FM_TUNER, AudioDeviceType::OUT_FM);
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_LINE, AUDIO_DEVICE_OUT_LINE,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_ANALOG());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_SPDIF, AUDIO_DEVICE_OUT_SPDIF,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_SPDIF());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_BT_A2DP());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_IP, AUDIO_DEVICE_OUT_IP,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_IP_V4());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_BUS, AUDIO_DEVICE_OUT_BUS,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_BUS());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_PROXY, AUDIO_DEVICE_OUT_PROXY,
-                AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::OUT_AFE_PROXY);
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_OUT_USB_HEADSET,
-                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
-                AudioDeviceDescription::CONNECTION_USB());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_ARC,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_HDMI_ARC());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_HDMI_EARC, AUDIO_DEVICE_OUT_HDMI_EARC,
-                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
-                AudioDeviceDescription::CONNECTION_HDMI_EARC());
-        append_AudioDeviceDescription(pairs,
-                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_OUT_BLE_HEADSET,
-                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
-                AudioDeviceDescription::CONNECTION_BT_LE());
-        return pairs;
-    }();
-    return pairs;
-}
-
-AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
-    AudioFormatDescription result;
-    result.type = type;
-    return result;
-}
-
-AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
-    auto result = make_AudioFormatDescription(AudioFormatType::PCM);
-    result.pcm = pcm;
-    return result;
-}
-
-AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
-    AudioFormatDescription result;
-    result.encoding = encoding;
-    return result;
-}
-
-AudioFormatDescription make_AudioFormatDescription(PcmType transport,
-        const std::string& encoding) {
-    auto result = make_AudioFormatDescription(encoding);
-    result.pcm = transport;
-    return result;
-}
-
-const detail::AudioFormatPairs& getAudioFormatPairs() {
-    static const detail::AudioFormatPairs pairs = {{
-        {
-            AUDIO_FORMAT_INVALID,
-            make_AudioFormatDescription(AudioFormatType::SYS_RESERVED_INVALID)
-        },
-        {
-            AUDIO_FORMAT_DEFAULT, AudioFormatDescription{}
-        },
-        {
-            AUDIO_FORMAT_PCM_16_BIT, make_AudioFormatDescription(PcmType::INT_16_BIT)
-        },
-        {
-            AUDIO_FORMAT_PCM_8_BIT, make_AudioFormatDescription(PcmType::UINT_8_BIT)
-        },
-        {
-            AUDIO_FORMAT_PCM_32_BIT, make_AudioFormatDescription(PcmType::INT_32_BIT)
-        },
-        {
-            AUDIO_FORMAT_PCM_8_24_BIT, make_AudioFormatDescription(PcmType::FIXED_Q_8_24)
-        },
-        {
-            AUDIO_FORMAT_PCM_FLOAT, make_AudioFormatDescription(PcmType::FLOAT_32_BIT)
-        },
-        {
-            AUDIO_FORMAT_PCM_24_BIT_PACKED, make_AudioFormatDescription(PcmType::INT_24_BIT)
-        },
-        {
-            AUDIO_FORMAT_MP3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEG)
-        },
-        {
-            AUDIO_FORMAT_AMR_NB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AMR_NB)
-        },
-        {
-            AUDIO_FORMAT_AMR_WB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AMR_WB)
-        },
-        {
-            AUDIO_FORMAT_AAC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_MP4)
-        },
-        {
-            AUDIO_FORMAT_AAC_MAIN, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_MAIN)
-        },
-        {
-            AUDIO_FORMAT_AAC_LC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LC)
-        },
-        {
-            AUDIO_FORMAT_AAC_SSR, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_SSR)
-        },
-        {
-            AUDIO_FORMAT_AAC_LTP, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LTP)
-        },
-        {
-            AUDIO_FORMAT_AAC_HE_V1, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_HE_V1)
-        },
-        {
-            AUDIO_FORMAT_AAC_SCALABLE,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE)
-        },
-        {
-            AUDIO_FORMAT_AAC_ERLC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ERLC)
-        },
-        {
-            AUDIO_FORMAT_AAC_LD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LD)
-        },
-        {
-            AUDIO_FORMAT_AAC_HE_V2, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_HE_V2)
-        },
-        {
-            AUDIO_FORMAT_AAC_ELD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ELD)
-        },
-        {
-            AUDIO_FORMAT_AAC_XHE, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_XHE)
-        },
-        // AUDIO_FORMAT_HE_AAC_V1 and HE_AAC_V2 are removed since they were deprecated long time
-        // ago.
-        {
-            AUDIO_FORMAT_VORBIS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_VORBIS)
-        },
-        {
-            AUDIO_FORMAT_OPUS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_OPUS)
-        },
-        {
-            AUDIO_FORMAT_AC3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AC3)
-        },
-        {
-            AUDIO_FORMAT_E_AC3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EAC3)
-        },
-        {
-            AUDIO_FORMAT_E_AC3_JOC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EAC3_JOC)
-        },
-        {
-            AUDIO_FORMAT_DTS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DTS)
-        },
-        {
-            AUDIO_FORMAT_DTS_HD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DTS_HD)
-        },
-        // In the future, we would like to represent encapsulated bitstreams as
-        // nested AudioFormatDescriptions. The legacy 'AUDIO_FORMAT_IEC61937' type doesn't
-        // specify the format of the encapsulated bitstream.
-        {
-            AUDIO_FORMAT_IEC61937,
-            make_AudioFormatDescription(PcmType::INT_16_BIT, MEDIA_MIMETYPE_AUDIO_IEC61937)
-        },
-        {
-            AUDIO_FORMAT_DOLBY_TRUEHD,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD)
-        },
-        {
-            AUDIO_FORMAT_EVRC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRC)
-        },
-        {
-            AUDIO_FORMAT_EVRCB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRCB)
-        },
-        {
-            AUDIO_FORMAT_EVRCWB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRCWB)
-        },
-        {
-            AUDIO_FORMAT_EVRCNW, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRCNW)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADIF, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADIF)
-        },
-        {
-            AUDIO_FORMAT_WMA, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_WMA)
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_WMA_PRO, make_AudioFormatDescription("audio/x-ms-wma.pro")
-        },
-        {
-            AUDIO_FORMAT_AMR_WB_PLUS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS)
-        },
-        {
-            AUDIO_FORMAT_MP2, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II)
-        },
-        {
-            AUDIO_FORMAT_QCELP, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_QCELP)
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_DSD, make_AudioFormatDescription("audio/vnd.sony.dsd")
-        },
-        {
-            AUDIO_FORMAT_FLAC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_FLAC)
-        },
-        {
-            AUDIO_FORMAT_ALAC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_ALAC)
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_APE, make_AudioFormatDescription("audio/x-ape")
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_MAIN,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_LC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_SSR,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_LTP,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_HE_V1,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_SCALABLE,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_ERLC,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_LD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_HE_V2,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_ELD,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD)
-        },
-        {
-            AUDIO_FORMAT_AAC_ADTS_XHE,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE)
-        },
-        {
-            // Note: not in the IANA registry. "vnd.octel.sbc" is not BT SBC.
-            AUDIO_FORMAT_SBC, make_AudioFormatDescription("audio/x-sbc")
-        },
-        {
-            AUDIO_FORMAT_APTX, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_APTX)
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_APTX_HD, make_AudioFormatDescription("audio/vnd.qcom.aptx.hd")
-        },
-        {
-            AUDIO_FORMAT_AC4, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AC4)
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_LDAC, make_AudioFormatDescription("audio/vnd.sony.ldac")
-        },
-        {
-            AUDIO_FORMAT_MAT, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT)
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_MAT_1_0,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT + std::string(".1.0"))
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_MAT_2_0,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT + std::string(".2.0"))
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_MAT_2_1,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT + std::string(".2.1"))
-        },
-        {
-            AUDIO_FORMAT_AAC_LATM, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC)
-        },
-        {
-            AUDIO_FORMAT_AAC_LATM_LC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC)
-        },
-        {
-            AUDIO_FORMAT_AAC_LATM_HE_V1,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1)
-        },
-        {
-            AUDIO_FORMAT_AAC_LATM_HE_V2,
-            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2)
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_CELT, make_AudioFormatDescription("audio/x-celt")
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_APTX_ADAPTIVE, make_AudioFormatDescription("audio/vnd.qcom.aptx.adaptive")
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_LHDC, make_AudioFormatDescription("audio/vnd.savitech.lhdc")
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_LHDC_LL, make_AudioFormatDescription("audio/vnd.savitech.lhdc.ll")
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_APTX_TWSP, make_AudioFormatDescription("audio/vnd.qcom.aptx.twsp")
-        },
-        {
-            // Note: not in the IANA registry.
-            AUDIO_FORMAT_LC3, make_AudioFormatDescription("audio/x-lc3")
-        },
-        {
-            AUDIO_FORMAT_MPEGH, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1)
-        },
-        {
-            AUDIO_FORMAT_MPEGH_BL_L3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3)
-        },
-        {
-            AUDIO_FORMAT_MPEGH_BL_L4, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4)
-        },
-        {
-            AUDIO_FORMAT_MPEGH_LC_L3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3)
-        },
-        {
-            AUDIO_FORMAT_MPEGH_LC_L4, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4)
-        },
-        {
-            AUDIO_FORMAT_IEC60958,
-            make_AudioFormatDescription(PcmType::INT_24_BIT, MEDIA_MIMETYPE_AUDIO_IEC60958)
-        },
-        {
-            AUDIO_FORMAT_DTS_UHD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DTS_UHD)
-        },
-        {
-            AUDIO_FORMAT_DRA, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DRA)
-        },
-    }};
-    return pairs;
-}
-
-template<typename S, typename T>
-std::unordered_map<S, T> make_DirectMap(const std::vector<std::pair<S, T>>& v) {
-    std::unordered_map<S, T> result(v.begin(), v.end());
-    LOG_ALWAYS_FATAL_IF(result.size() != v.size(), "Duplicate key elements detected");
-    return result;
-}
-
-template<typename S, typename T>
-std::unordered_map<S, T> make_DirectMap(
-        const std::vector<std::pair<S, T>>& v1, const std::vector<std::pair<S, T>>& v2) {
-    std::unordered_map<S, T> result(v1.begin(), v1.end());
-    LOG_ALWAYS_FATAL_IF(result.size() != v1.size(), "Duplicate key elements detected in v1");
-    result.insert(v2.begin(), v2.end());
-    LOG_ALWAYS_FATAL_IF(result.size() != v1.size() + v2.size(),
-            "Duplicate key elements detected in v1+v2");
-    return result;
-}
-
-template<typename S, typename T>
-std::unordered_map<T, S> make_ReverseMap(const std::vector<std::pair<S, T>>& v) {
-    std::unordered_map<T, S> result;
-    std::transform(v.begin(), v.end(), std::inserter(result, result.begin()),
-            [](const std::pair<S, T>& p) {
-                return std::make_pair(p.second, p.first);
-            });
-    LOG_ALWAYS_FATAL_IF(result.size() != v.size(), "Duplicate key elements detected");
-    return result;
-}
-
-}  // namespace
-
-audio_channel_mask_t aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
-        int aidlLayout, bool isInput) {
-    auto& bitMapping = isInput ? getInAudioChannelBits() : getOutAudioChannelBits();
-    const int aidlLayoutInitial = aidlLayout; // for error message
-    audio_channel_mask_t legacy = AUDIO_CHANNEL_NONE;
-    for (const auto& bitPair : bitMapping) {
-        if ((aidlLayout & bitPair.second) == bitPair.second) {
-            legacy = static_cast<audio_channel_mask_t>(legacy | bitPair.first);
-            aidlLayout &= ~bitPair.second;
-            if (aidlLayout == 0) {
-                return legacy;
-            }
-        }
-    }
-    ALOGE("%s: aidl layout 0x%x contains bits 0x%x that have no match to legacy %s bits",
-            __func__, aidlLayoutInitial, aidlLayout, isInput ? "input" : "output");
-    return AUDIO_CHANNEL_NONE;
-}
-
-ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
-        const AudioChannelLayout& aidl, bool isInput) {
-    using ReverseMap = std::unordered_map<AudioChannelLayout, audio_channel_mask_t>;
-    using Tag = AudioChannelLayout::Tag;
-    static const ReverseMap mIn = make_ReverseMap(getInAudioChannelPairs());
-    static const ReverseMap mOut = make_ReverseMap(getOutAudioChannelPairs());
-    static const ReverseMap mVoice = make_ReverseMap(getVoiceAudioChannelPairs());
-
-    auto convert = [](const AudioChannelLayout& aidl, const ReverseMap& m,
-            const char* func, const char* type) -> ConversionResult<audio_channel_mask_t> {
-        if (auto it = m.find(aidl); it != m.end()) {
-            return it->second;
-        } else {
-            ALOGW("%s: no legacy %s audio_channel_mask_t found for %s", func, type,
-                    aidl.toString().c_str());
-            return unexpected(BAD_VALUE);
-        }
-    };
-
-    switch (aidl.getTag()) {
-        case Tag::none:
-            return AUDIO_CHANNEL_NONE;
-        case Tag::invalid:
-            return AUDIO_CHANNEL_INVALID;
-        case Tag::indexMask:
-            // Index masks do not have pre-defined values.
-            if (const int bits = aidl.get<Tag::indexMask>();
-                    __builtin_popcount(bits) != 0 &&
-                    __builtin_popcount(bits) <= AUDIO_CHANNEL_COUNT_MAX) {
-                return audio_channel_mask_from_representation_and_bits(
-                        AUDIO_CHANNEL_REPRESENTATION_INDEX, bits);
-            } else {
-                ALOGE("%s: invalid indexMask value 0x%x in %s",
-                        __func__, bits, aidl.toString().c_str());
-                return unexpected(BAD_VALUE);
-            }
-        case Tag::layoutMask:
-            // The fast path is to find a direct match for some known layout mask.
-            if (const auto layoutMatch = convert(aidl, isInput ? mIn : mOut, __func__,
-                    isInput ? "input" : "output");
-                    layoutMatch.ok()) {
-                return layoutMatch;
-            }
-            // If a match for a predefined layout wasn't found, make a custom one from bits.
-            if (audio_channel_mask_t bitMask =
-                    aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
-                            aidl.get<Tag::layoutMask>(), isInput);
-                    bitMask != AUDIO_CHANNEL_NONE) {
-                return bitMask;
-            }
-            return unexpected(BAD_VALUE);
-        case Tag::voiceMask:
-            return convert(aidl, mVoice, __func__, "voice");
-    }
-    ALOGE("%s: unexpected tag value %d", __func__, aidl.getTag());
-    return unexpected(BAD_VALUE);
-}
-
-int legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
-        audio_channel_mask_t legacy, bool isInput) {
-    auto& bitMapping = isInput ? getInAudioChannelBits() : getOutAudioChannelBits();
-    const int legacyInitial = legacy; // for error message
-    int aidlLayout = 0;
-    for (const auto& bitPair : bitMapping) {
-        if ((legacy & bitPair.first) == bitPair.first) {
-            aidlLayout |= bitPair.second;
-            legacy = static_cast<audio_channel_mask_t>(legacy & ~bitPair.first);
-            if (legacy == 0) {
-                return aidlLayout;
-            }
-        }
-    }
-    ALOGE("%s: legacy %s audio_channel_mask_t 0x%x contains unrecognized bits 0x%x",
-            __func__, isInput ? "input" : "output", legacyInitial, legacy);
-    return 0;
-}
-
-ConversionResult<AudioChannelLayout> legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
-        audio_channel_mask_t legacy, bool isInput) {
-    using DirectMap = std::unordered_map<audio_channel_mask_t, AudioChannelLayout>;
-    using Tag = AudioChannelLayout::Tag;
-    static const DirectMap mInAndVoice = make_DirectMap(
-            getInAudioChannelPairs(), getVoiceAudioChannelPairs());
-    static const DirectMap mOut = make_DirectMap(getOutAudioChannelPairs());
-
-    auto convert = [](const audio_channel_mask_t legacy, const DirectMap& m,
-            const char* func, const char* type) -> ConversionResult<AudioChannelLayout> {
-        if (auto it = m.find(legacy); it != m.end()) {
-            return it->second;
-        } else {
-            ALOGW("%s: no AudioChannelLayout found for legacy %s audio_channel_mask_t value 0x%x",
-                    func, type, legacy);
-            return unexpected(BAD_VALUE);
-        }
-    };
-
-    if (legacy == AUDIO_CHANNEL_NONE) {
-        return AudioChannelLayout{};
-    } else if (legacy == AUDIO_CHANNEL_INVALID) {
-        return AudioChannelLayout::make<Tag::invalid>(0);
-    }
-
-    const audio_channel_representation_t repr = audio_channel_mask_get_representation(legacy);
-    if (repr == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
-        if (audio_channel_mask_is_valid(legacy)) {
-            const int indexMask = VALUE_OR_RETURN(
-                    convertIntegral<int>(audio_channel_mask_get_bits(legacy)));
-            return AudioChannelLayout::make<Tag::indexMask>(indexMask);
-        } else {
-            ALOGE("%s: legacy audio_channel_mask_t value 0x%x is invalid", __func__, legacy);
-            return unexpected(BAD_VALUE);
-        }
-    } else if (repr == AUDIO_CHANNEL_REPRESENTATION_POSITION) {
-        // The fast path is to find a direct match for some known layout mask.
-        if (const auto layoutMatch = convert(legacy, isInput ? mInAndVoice : mOut, __func__,
-                isInput ? "input / voice" : "output");
-                layoutMatch.ok()) {
-            return layoutMatch;
-        }
-        // If a match for a predefined layout wasn't found, make a custom one from bits,
-        // rejecting those with voice channel bits.
-        if (!isInput ||
-                (legacy & (AUDIO_CHANNEL_IN_VOICE_UPLINK | AUDIO_CHANNEL_IN_VOICE_DNLINK)) == 0) {
-            if (int bitMaskLayout =
-                    legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
-                            legacy, isInput);
-                    bitMaskLayout != 0) {
-                return AudioChannelLayout::make<Tag::layoutMask>(bitMaskLayout);
-            }
-        } else {
-            ALOGE("%s: legacy audio_channel_mask_t value 0x%x contains voice bits",
-                    __func__, legacy);
-        }
-        return unexpected(BAD_VALUE);
-    }
-
-    ALOGE("%s: unknown representation %d in audio_channel_mask_t value 0x%x",
-            __func__, repr, legacy);
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
-        const AudioDeviceDescription& aidl) {
-    static const std::unordered_map<AudioDeviceDescription, audio_devices_t> m =
-            make_ReverseMap(getAudioDevicePairs());
-    if (auto it = m.find(aidl); it != m.end()) {
-        return it->second;
-    } else {
-        ALOGE("%s: no legacy audio_devices_t found for %s", __func__, aidl.toString().c_str());
-        return unexpected(BAD_VALUE);
-    }
-}
-
-ConversionResult<AudioDeviceDescription> legacy2aidl_audio_devices_t_AudioDeviceDescription(
-        audio_devices_t legacy) {
-    static const std::unordered_map<audio_devices_t, AudioDeviceDescription> m =
-            make_DirectMap(getAudioDevicePairs());
-    if (auto it = m.find(legacy); it != m.end()) {
-        return it->second;
-    } else {
-        ALOGE("%s: no AudioDeviceDescription found for legacy audio_devices_t value 0x%x",
-                __func__, legacy);
-        return unexpected(BAD_VALUE);
-    }
-}
-
-status_t aidl2legacy_AudioDevice_audio_device(
-        const AudioDevice& aidl,
-        audio_devices_t* legacyType, char* legacyAddress) {
-    *legacyType = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
-    return aidl2legacy_string(
-                    aidl.address.get<AudioDeviceAddress::id>(),
-                    legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN);
-}
-
-status_t aidl2legacy_AudioDevice_audio_device(
-        const AudioDevice& aidl,
-        audio_devices_t* legacyType, String8* legacyAddress) {
-    *legacyType = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
-    *legacyAddress = VALUE_OR_RETURN_STATUS(aidl2legacy_string_view_String8(
-                    aidl.address.get<AudioDeviceAddress::id>()));
-    return OK;
-}
-
-status_t aidl2legacy_AudioDevice_audio_device(
-        const AudioDevice& aidl,
-        audio_devices_t* legacyType, std::string* legacyAddress) {
-    *legacyType = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
-    *legacyAddress = aidl.address.get<AudioDeviceAddress::id>();
-    return OK;
-}
-
-ConversionResult<AudioDevice> legacy2aidl_audio_device_AudioDevice(
-        audio_devices_t legacyType, const char* legacyAddress) {
-    AudioDevice aidl;
-    aidl.type = VALUE_OR_RETURN(
-            legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
-    const std::string aidl_id = VALUE_OR_RETURN(
-            legacy2aidl_string(legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN));
-    aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
-    return aidl;
-}
-
-ConversionResult<AudioDevice>
-legacy2aidl_audio_device_AudioDevice(
-        audio_devices_t legacyType, const String8& legacyAddress) {
-    AudioDevice aidl;
-    aidl.type = VALUE_OR_RETURN(
-            legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
-    const std::string aidl_id = VALUE_OR_RETURN(
-            legacy2aidl_String8_string(legacyAddress));
-    aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
-    return aidl;
-}
-
-ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
-        const AudioFormatDescription& aidl) {
-    static const std::unordered_map<AudioFormatDescription, audio_format_t> m =
-            make_ReverseMap(getAudioFormatPairs());
-    if (auto it = m.find(aidl); it != m.end()) {
-        return it->second;
-    } else {
-        ALOGE("%s: no legacy audio_format_t found for %s", __func__, aidl.toString().c_str());
-        return unexpected(BAD_VALUE);
-    }
-}
-
-ConversionResult<AudioFormatDescription> legacy2aidl_audio_format_t_AudioFormatDescription(
-        audio_format_t legacy) {
-    static const std::unordered_map<audio_format_t, AudioFormatDescription> m =
-            make_DirectMap(getAudioFormatPairs());
-    if (auto it = m.find(legacy); it != m.end()) {
-        return it->second;
-    } else {
-        ALOGE("%s: no AudioFormatDescription found for legacy audio_format_t value 0x%x",
-                __func__, legacy);
-        return unexpected(BAD_VALUE);
-    }
-}
-
-ConversionResult<audio_gain_mode_t> aidl2legacy_AudioGainMode_audio_gain_mode_t(
-        AudioGainMode aidl) {
+ConversionResult<audio_io_config_event_t> aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(
+        media::AudioIoConfigEvent aidl) {
     switch (aidl) {
-        case AudioGainMode::JOINT:
-            return AUDIO_GAIN_MODE_JOINT;
-        case AudioGainMode::CHANNELS:
-            return AUDIO_GAIN_MODE_CHANNELS;
-        case AudioGainMode::RAMP:
-            return AUDIO_GAIN_MODE_RAMP;
+        case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
+            return AUDIO_OUTPUT_REGISTERED;
+        case media::AudioIoConfigEvent::OUTPUT_OPENED:
+            return AUDIO_OUTPUT_OPENED;
+        case media::AudioIoConfigEvent::OUTPUT_CLOSED:
+            return AUDIO_OUTPUT_CLOSED;
+        case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
+            return AUDIO_OUTPUT_CONFIG_CHANGED;
+        case media::AudioIoConfigEvent::INPUT_REGISTERED:
+            return AUDIO_INPUT_REGISTERED;
+        case media::AudioIoConfigEvent::INPUT_OPENED:
+            return AUDIO_INPUT_OPENED;
+        case media::AudioIoConfigEvent::INPUT_CLOSED:
+            return AUDIO_INPUT_CLOSED;
+        case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
+            return AUDIO_INPUT_CONFIG_CHANGED;
+        case media::AudioIoConfigEvent::CLIENT_STARTED:
+            return AUDIO_CLIENT_STARTED;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<AudioGainMode> legacy2aidl_audio_gain_mode_t_AudioGainMode(
-        audio_gain_mode_t legacy) {
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(
+        audio_io_config_event_t legacy) {
     switch (legacy) {
-        case AUDIO_GAIN_MODE_JOINT:
-            return AudioGainMode::JOINT;
-        case AUDIO_GAIN_MODE_CHANNELS:
-            return AudioGainMode::CHANNELS;
-        case AUDIO_GAIN_MODE_RAMP:
-            return AudioGainMode::RAMP;
+        case AUDIO_OUTPUT_REGISTERED:
+            return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
+        case AUDIO_OUTPUT_OPENED:
+            return media::AudioIoConfigEvent::OUTPUT_OPENED;
+        case AUDIO_OUTPUT_CLOSED:
+            return media::AudioIoConfigEvent::OUTPUT_CLOSED;
+        case AUDIO_OUTPUT_CONFIG_CHANGED:
+            return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
+        case AUDIO_INPUT_REGISTERED:
+            return media::AudioIoConfigEvent::INPUT_REGISTERED;
+        case AUDIO_INPUT_OPENED:
+            return media::AudioIoConfigEvent::INPUT_OPENED;
+        case AUDIO_INPUT_CLOSED:
+            return media::AudioIoConfigEvent::INPUT_CLOSED;
+        case AUDIO_INPUT_CONFIG_CHANGED:
+            return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
+        case AUDIO_CLIENT_STARTED:
+            return media::AudioIoConfigEvent::CLIENT_STARTED;
     }
     return unexpected(BAD_VALUE);
 }
-
-ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl) {
-    return convertBitmask<audio_gain_mode_t, int32_t, audio_gain_mode_t, AudioGainMode>(
-            aidl, aidl2legacy_AudioGainMode_audio_gain_mode_t,
-            // AudioGainMode is index-based.
-            indexToEnum_index<AudioGainMode>,
-            // AUDIO_GAIN_MODE_* constants are mask-based.
-            enumToMask_bitmask<audio_gain_mode_t, audio_gain_mode_t>);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy) {
-    return convertBitmask<int32_t, audio_gain_mode_t, AudioGainMode, audio_gain_mode_t>(
-            legacy, legacy2aidl_audio_gain_mode_t_AudioGainMode,
-            // AUDIO_GAIN_MODE_* constants are mask-based.
-            indexToEnum_bitmask<audio_gain_mode_t>,
-            // AudioGainMode is index-based.
-            enumToMask_index<int32_t, AudioGainMode>);
-}
-
-ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
-        const AudioGainConfig& aidl, bool isInput) {
-    audio_gain_config legacy;
-    legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
-    legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
-    legacy.channel_mask = VALUE_OR_RETURN(
-            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
-    const bool isJoint = bitmaskIsSet(aidl.mode, AudioGainMode::JOINT);
-    size_t numValues = isJoint ? 1
-                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
-                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
-    if (aidl.values.size() != numValues || aidl.values.size() > std::size(legacy.values)) {
-        return unexpected(BAD_VALUE);
-    }
-    for (size_t i = 0; i < numValues; ++i) {
-        legacy.values[i] = VALUE_OR_RETURN(convertIntegral<int>(aidl.values[i]));
-    }
-    legacy.ramp_duration_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.rampDurationMs));
-    return legacy;
-}
-
-ConversionResult<AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
-        const audio_gain_config& legacy, bool isInput) {
-    AudioGainConfig aidl;
-    aidl.index = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.index));
-    aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
-    aidl.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
-    const bool isJoint = (legacy.mode & AUDIO_GAIN_MODE_JOINT) != 0;
-    size_t numValues = isJoint ? 1
-                               : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
-                                         : audio_channel_count_from_out_mask(legacy.channel_mask);
-    aidl.values.resize(numValues);
-    for (size_t i = 0; i < numValues; ++i) {
-        aidl.values[i] = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.values[i]));
-    }
-    aidl.rampDurationMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.ramp_duration_ms));
-    return aidl;
-}
-
-ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
-        AudioInputFlags aidl) {
-    switch (aidl) {
-        case AudioInputFlags::FAST:
-            return AUDIO_INPUT_FLAG_FAST;
-        case AudioInputFlags::HW_HOTWORD:
-            return AUDIO_INPUT_FLAG_HW_HOTWORD;
-        case AudioInputFlags::RAW:
-            return AUDIO_INPUT_FLAG_RAW;
-        case AudioInputFlags::SYNC:
-            return AUDIO_INPUT_FLAG_SYNC;
-        case AudioInputFlags::MMAP_NOIRQ:
-            return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
-        case AudioInputFlags::VOIP_TX:
-            return AUDIO_INPUT_FLAG_VOIP_TX;
-        case AudioInputFlags::HW_AV_SYNC:
-            return AUDIO_INPUT_FLAG_HW_AV_SYNC;
-        case AudioInputFlags::DIRECT:
-            return AUDIO_INPUT_FLAG_DIRECT;
-        case AudioInputFlags::ULTRASOUND:
-            return AUDIO_INPUT_FLAG_ULTRASOUND;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
-        audio_input_flags_t legacy) {
-    switch (legacy) {
-        case AUDIO_INPUT_FLAG_NONE:
-            break; // shouldn't get here. must be listed  -Werror,-Wswitch
-        case AUDIO_INPUT_FLAG_FAST:
-            return AudioInputFlags::FAST;
-        case AUDIO_INPUT_FLAG_HW_HOTWORD:
-            return AudioInputFlags::HW_HOTWORD;
-        case AUDIO_INPUT_FLAG_RAW:
-            return AudioInputFlags::RAW;
-        case AUDIO_INPUT_FLAG_SYNC:
-            return AudioInputFlags::SYNC;
-        case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
-            return AudioInputFlags::MMAP_NOIRQ;
-        case AUDIO_INPUT_FLAG_VOIP_TX:
-            return AudioInputFlags::VOIP_TX;
-        case AUDIO_INPUT_FLAG_HW_AV_SYNC:
-            return AudioInputFlags::HW_AV_SYNC;
-        case AUDIO_INPUT_FLAG_DIRECT:
-            return AudioInputFlags::DIRECT;
-        case AUDIO_INPUT_FLAG_ULTRASOUND:
-            return AudioInputFlags::ULTRASOUND;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
-        AudioOutputFlags aidl) {
-    switch (aidl) {
-        case AudioOutputFlags::DIRECT:
-            return AUDIO_OUTPUT_FLAG_DIRECT;
-        case AudioOutputFlags::PRIMARY:
-            return AUDIO_OUTPUT_FLAG_PRIMARY;
-        case AudioOutputFlags::FAST:
-            return AUDIO_OUTPUT_FLAG_FAST;
-        case AudioOutputFlags::DEEP_BUFFER:
-            return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
-        case AudioOutputFlags::COMPRESS_OFFLOAD:
-            return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
-        case AudioOutputFlags::NON_BLOCKING:
-            return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
-        case AudioOutputFlags::HW_AV_SYNC:
-            return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
-        case AudioOutputFlags::TTS:
-            return AUDIO_OUTPUT_FLAG_TTS;
-        case AudioOutputFlags::RAW:
-            return AUDIO_OUTPUT_FLAG_RAW;
-        case AudioOutputFlags::SYNC:
-            return AUDIO_OUTPUT_FLAG_SYNC;
-        case AudioOutputFlags::IEC958_NONAUDIO:
-            return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
-        case AudioOutputFlags::DIRECT_PCM:
-            return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
-        case AudioOutputFlags::MMAP_NOIRQ:
-            return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
-        case AudioOutputFlags::VOIP_RX:
-            return AUDIO_OUTPUT_FLAG_VOIP_RX;
-        case AudioOutputFlags::INCALL_MUSIC:
-            return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
-        case AudioOutputFlags::GAPLESS_OFFLOAD:
-            return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
-        case AudioOutputFlags::ULTRASOUND:
-            return AUDIO_OUTPUT_FLAG_ULTRASOUND;
-        case AudioOutputFlags::SPATIALIZER:
-            return AUDIO_OUTPUT_FLAG_SPATIALIZER;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
-        audio_output_flags_t legacy) {
-    switch (legacy) {
-        case AUDIO_OUTPUT_FLAG_NONE:
-            break; // shouldn't get here. must be listed  -Werror,-Wswitch
-        case AUDIO_OUTPUT_FLAG_DIRECT:
-            return AudioOutputFlags::DIRECT;
-        case AUDIO_OUTPUT_FLAG_PRIMARY:
-            return AudioOutputFlags::PRIMARY;
-        case AUDIO_OUTPUT_FLAG_FAST:
-            return AudioOutputFlags::FAST;
-        case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
-            return AudioOutputFlags::DEEP_BUFFER;
-        case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
-            return AudioOutputFlags::COMPRESS_OFFLOAD;
-        case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
-            return AudioOutputFlags::NON_BLOCKING;
-        case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
-            return AudioOutputFlags::HW_AV_SYNC;
-        case AUDIO_OUTPUT_FLAG_TTS:
-            return AudioOutputFlags::TTS;
-        case AUDIO_OUTPUT_FLAG_RAW:
-            return AudioOutputFlags::RAW;
-        case AUDIO_OUTPUT_FLAG_SYNC:
-            return AudioOutputFlags::SYNC;
-        case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
-            return AudioOutputFlags::IEC958_NONAUDIO;
-        case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
-            return AudioOutputFlags::DIRECT_PCM;
-        case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
-            return AudioOutputFlags::MMAP_NOIRQ;
-        case AUDIO_OUTPUT_FLAG_VOIP_RX:
-            return AudioOutputFlags::VOIP_RX;
-        case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
-            return AudioOutputFlags::INCALL_MUSIC;
-        case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
-            return AudioOutputFlags::GAPLESS_OFFLOAD;
-        case AUDIO_OUTPUT_FLAG_ULTRASOUND:
-            return AudioOutputFlags::ULTRASOUND;
-        case AUDIO_OUTPUT_FLAG_SPATIALIZER:
-            return AudioOutputFlags::SPATIALIZER;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
-        int32_t aidl) {
-    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
-
-    LegacyMask converted = VALUE_OR_RETURN(
-            (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, AudioInputFlags>(
-                    aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
-                    indexToEnum_index<AudioInputFlags>,
-                    enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
-    return static_cast<audio_input_flags_t>(converted);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
-        audio_input_flags_t legacy) {
-    using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
-
-    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
-    return convertBitmask<int32_t, LegacyMask, AudioInputFlags, audio_input_flags_t>(
-            legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
-            indexToEnum_bitmask<audio_input_flags_t>,
-            enumToMask_index<int32_t, AudioInputFlags>);
-}
-
-ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
-        int32_t aidl) {
-    return convertBitmask<audio_output_flags_t,
-            int32_t,
-            audio_output_flags_t,
-            AudioOutputFlags>(
-            aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
-            indexToEnum_index<AudioOutputFlags>,
-            enumToMask_bitmask<audio_output_flags_t, audio_output_flags_t>);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
-        audio_output_flags_t legacy) {
-    using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
-
-    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
-    return convertBitmask<int32_t, LegacyMask, AudioOutputFlags, audio_output_flags_t>(
-            legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
-            indexToEnum_bitmask<audio_output_flags_t>,
-            enumToMask_index<int32_t, AudioOutputFlags>);
-}
-
-ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
-        const AudioIoFlags& aidl, bool isInput) {
-    audio_io_flags legacy;
-    if (isInput) {
-        legacy.input = VALUE_OR_RETURN(
-                aidl2legacy_int32_t_audio_input_flags_t_mask(
-                        VALUE_OR_RETURN(UNION_GET(aidl, input))));
-    } else {
-        legacy.output = VALUE_OR_RETURN(
-                aidl2legacy_int32_t_audio_output_flags_t_mask(
-                        VALUE_OR_RETURN(UNION_GET(aidl, output))));
-    }
-    return legacy;
-}
-
-ConversionResult<AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
-        const audio_io_flags& legacy, bool isInput) {
-    AudioIoFlags aidl;
-    if (isInput) {
-        UNION_SET(aidl, input,
-                VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(legacy.input)));
-    } else {
-        UNION_SET(aidl, output,
-                VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(legacy.output)));
-    }
-    return aidl;
-}
-
-ConversionResult<audio_port_config_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
-        const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlDeviceExt) {
-    audio_port_config_device_ext legacy;
-    legacy.hw_module = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_module_handle_t(aidlDeviceExt.hwModule));
-    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
-                    aidl.device, &legacy.type, legacy.address));
-    return legacy;
-}
-
-status_t legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
-        const audio_port_config_device_ext& legacy,
-        AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
-    aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidl->device = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
-    return OK;
-}
-
-ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
-        AudioStreamType aidl) {
-    switch (aidl) {
-        case AudioStreamType::INVALID:
-            break;  // return error
-        case AudioStreamType::SYS_RESERVED_DEFAULT:
-            return AUDIO_STREAM_DEFAULT;
-        case AudioStreamType::VOICE_CALL:
-            return AUDIO_STREAM_VOICE_CALL;
-        case AudioStreamType::SYSTEM:
-            return AUDIO_STREAM_SYSTEM;
-        case AudioStreamType::RING:
-            return AUDIO_STREAM_RING;
-        case AudioStreamType::MUSIC:
-            return AUDIO_STREAM_MUSIC;
-        case AudioStreamType::ALARM:
-            return AUDIO_STREAM_ALARM;
-        case AudioStreamType::NOTIFICATION:
-            return AUDIO_STREAM_NOTIFICATION;
-        case AudioStreamType::BLUETOOTH_SCO:
-            return AUDIO_STREAM_BLUETOOTH_SCO;
-        case AudioStreamType::ENFORCED_AUDIBLE:
-            return AUDIO_STREAM_ENFORCED_AUDIBLE;
-        case AudioStreamType::DTMF:
-            return AUDIO_STREAM_DTMF;
-        case AudioStreamType::TTS:
-            return AUDIO_STREAM_TTS;
-        case AudioStreamType::ACCESSIBILITY:
-            return AUDIO_STREAM_ACCESSIBILITY;
-        case AudioStreamType::ASSISTANT:
-            return AUDIO_STREAM_ASSISTANT;
-        case AudioStreamType::SYS_RESERVED_REROUTING:
-            return AUDIO_STREAM_REROUTING;
-        case AudioStreamType::SYS_RESERVED_PATCH:
-            return AUDIO_STREAM_PATCH;
-        case AudioStreamType::CALL_ASSISTANT:
-            return AUDIO_STREAM_CALL_ASSISTANT;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
-        audio_stream_type_t legacy) {
-    switch (legacy) {
-        case AUDIO_STREAM_DEFAULT:
-            return AudioStreamType::SYS_RESERVED_DEFAULT;
-        case AUDIO_STREAM_VOICE_CALL:
-            return AudioStreamType::VOICE_CALL;
-        case AUDIO_STREAM_SYSTEM:
-            return AudioStreamType::SYSTEM;
-        case AUDIO_STREAM_RING:
-            return AudioStreamType::RING;
-        case AUDIO_STREAM_MUSIC:
-            return AudioStreamType::MUSIC;
-        case AUDIO_STREAM_ALARM:
-            return AudioStreamType::ALARM;
-        case AUDIO_STREAM_NOTIFICATION:
-            return AudioStreamType::NOTIFICATION;
-        case AUDIO_STREAM_BLUETOOTH_SCO:
-            return AudioStreamType::BLUETOOTH_SCO;
-        case AUDIO_STREAM_ENFORCED_AUDIBLE:
-            return AudioStreamType::ENFORCED_AUDIBLE;
-        case AUDIO_STREAM_DTMF:
-            return AudioStreamType::DTMF;
-        case AUDIO_STREAM_TTS:
-            return AudioStreamType::TTS;
-        case AUDIO_STREAM_ACCESSIBILITY:
-            return AudioStreamType::ACCESSIBILITY;
-        case AUDIO_STREAM_ASSISTANT:
-            return AudioStreamType::ASSISTANT;
-        case AUDIO_STREAM_REROUTING:
-            return AudioStreamType::SYS_RESERVED_REROUTING;
-        case AUDIO_STREAM_PATCH:
-            return AudioStreamType::SYS_RESERVED_PATCH;
-        case AUDIO_STREAM_CALL_ASSISTANT:
-            return AudioStreamType::CALL_ASSISTANT;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
-        AudioSource aidl) {
-    switch (aidl) {
-        case AudioSource::SYS_RESERVED_INVALID:
-            return AUDIO_SOURCE_INVALID;
-        case AudioSource::DEFAULT:
-            return AUDIO_SOURCE_DEFAULT;
-        case AudioSource::MIC:
-            return AUDIO_SOURCE_MIC;
-        case AudioSource::VOICE_UPLINK:
-            return AUDIO_SOURCE_VOICE_UPLINK;
-        case AudioSource::VOICE_DOWNLINK:
-            return AUDIO_SOURCE_VOICE_DOWNLINK;
-        case AudioSource::VOICE_CALL:
-            return AUDIO_SOURCE_VOICE_CALL;
-        case AudioSource::CAMCORDER:
-            return AUDIO_SOURCE_CAMCORDER;
-        case AudioSource::VOICE_RECOGNITION:
-            return AUDIO_SOURCE_VOICE_RECOGNITION;
-        case AudioSource::VOICE_COMMUNICATION:
-            return AUDIO_SOURCE_VOICE_COMMUNICATION;
-        case AudioSource::REMOTE_SUBMIX:
-            return AUDIO_SOURCE_REMOTE_SUBMIX;
-        case AudioSource::UNPROCESSED:
-            return AUDIO_SOURCE_UNPROCESSED;
-        case AudioSource::VOICE_PERFORMANCE:
-            return AUDIO_SOURCE_VOICE_PERFORMANCE;
-        case AudioSource::ULTRASOUND:
-            return AUDIO_SOURCE_ULTRASOUND;
-        case AudioSource::ECHO_REFERENCE:
-            return AUDIO_SOURCE_ECHO_REFERENCE;
-        case AudioSource::FM_TUNER:
-            return AUDIO_SOURCE_FM_TUNER;
-        case AudioSource::HOTWORD:
-            return AUDIO_SOURCE_HOTWORD;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioSource> legacy2aidl_audio_source_t_AudioSource(
-        audio_source_t legacy) {
-    switch (legacy) {
-        case AUDIO_SOURCE_INVALID:
-            return AudioSource::SYS_RESERVED_INVALID;
-        case AUDIO_SOURCE_DEFAULT:
-            return AudioSource::DEFAULT;
-        case AUDIO_SOURCE_MIC:
-            return AudioSource::MIC;
-        case AUDIO_SOURCE_VOICE_UPLINK:
-            return AudioSource::VOICE_UPLINK;
-        case AUDIO_SOURCE_VOICE_DOWNLINK:
-            return AudioSource::VOICE_DOWNLINK;
-        case AUDIO_SOURCE_VOICE_CALL:
-            return AudioSource::VOICE_CALL;
-        case AUDIO_SOURCE_CAMCORDER:
-            return AudioSource::CAMCORDER;
-        case AUDIO_SOURCE_VOICE_RECOGNITION:
-            return AudioSource::VOICE_RECOGNITION;
-        case AUDIO_SOURCE_VOICE_COMMUNICATION:
-            return AudioSource::VOICE_COMMUNICATION;
-        case AUDIO_SOURCE_REMOTE_SUBMIX:
-            return AudioSource::REMOTE_SUBMIX;
-        case AUDIO_SOURCE_UNPROCESSED:
-            return AudioSource::UNPROCESSED;
-        case AUDIO_SOURCE_VOICE_PERFORMANCE:
-            return AudioSource::VOICE_PERFORMANCE;
-        case AUDIO_SOURCE_ULTRASOUND:
-            return AudioSource::ULTRASOUND;
-        case AUDIO_SOURCE_ECHO_REFERENCE:
-            return AudioSource::ECHO_REFERENCE;
-        case AUDIO_SOURCE_FM_TUNER:
-            return AudioSource::FM_TUNER;
-        case AUDIO_SOURCE_HOTWORD:
-            return AudioSource::HOTWORD;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl) {
-    return convertReinterpret<audio_session_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-// This type is unnamed in the original definition, thus we name it here.
-using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
-
 ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortMixExtUseCase(
         const AudioPortMixExtUseCase& aidl, media::AudioPortRole role) {
-    audio_port_config_mix_ext_usecase legacy;
-
     switch (role) {
-        case media::AudioPortRole::NONE:
+        case media::AudioPortRole::NONE: {
+            audio_port_config_mix_ext_usecase legacy;
             // Just verify that the union is empty.
             VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
             return legacy;
-
+        }
         case media::AudioPortRole::SOURCE:
-            // This is not a bug. A SOURCE role corresponds to the stream field.
-            legacy.stream = VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
-                    VALUE_OR_RETURN(UNION_GET(aidl, stream))));
-            return legacy;
-
+            return aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+                    aidl, false /*isInput*/);
         case media::AudioPortRole::SINK:
-            // This is not a bug. A SINK role corresponds to the source field.
-            legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(
-                    VALUE_OR_RETURN(UNION_GET(aidl, source))));
-            return legacy;
+            return aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+                    aidl, true /*isInput*/);
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
 ConversionResult<AudioPortMixExtUseCase> legacy2aidl_AudioPortMixExtUseCase(
         const audio_port_config_mix_ext_usecase& legacy, audio_port_role_t role) {
-    AudioPortMixExtUseCase aidl;
-
     switch (role) {
-        case AUDIO_PORT_ROLE_NONE:
+        case AUDIO_PORT_ROLE_NONE: {
+            AudioPortMixExtUseCase aidl;
             UNION_SET(aidl, unspecified, false);
             return aidl;
+        }
         case AUDIO_PORT_ROLE_SOURCE:
-            // This is not a bug. A SOURCE role corresponds to the stream field.
-            UNION_SET(aidl, stream, VALUE_OR_RETURN(
-                    legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream)));
-            return aidl;
+            return legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+                    legacy, false /*isInput*/);
         case AUDIO_PORT_ROLE_SINK:
-            // This is not a bug. A SINK role corresponds to the source field.
-            UNION_SET(aidl, source,
-                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.source)));
-            return aidl;
+            return legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+                    legacy, true /*isInput*/);
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
@@ -1873,6 +220,8 @@
 ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt(
         const AudioPortMixExt& aidl, media::AudioPortRole role,
         const media::AudioPortMixExtSys& aidlMixExt) {
+    // Not using HAL-level 'aidl2legacy_AudioPortMixExt' as it does not support
+    // 'media::AudioPortRole::NONE'.
     audio_port_config_mix_ext legacy;
     legacy.hw_module = VALUE_OR_RETURN(
             aidl2legacy_int32_t_audio_module_handle_t(aidlMixExt.hwModule));
@@ -1884,6 +233,8 @@
 status_t legacy2aidl_AudioPortMixExt(
         const audio_port_config_mix_ext& legacy, audio_port_role_t role,
         AudioPortMixExt* aidl, media::AudioPortMixExtSys* aidlMixExt) {
+    // Not using HAL-level 'legacy2aidl_AudioPortMixExt' as it does not support
+    // 'AUDIO_PORT_ROLE_NONE'.
     aidlMixExt->hwModule = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
     aidl->handle = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
@@ -1905,12 +256,34 @@
     return legacy2aidl_audio_session_t_int32_t(legacy.session);
 }
 
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+        const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlDeviceExt) {
+    audio_port_config_device_ext legacy = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(aidl));
+    legacy.hw_module = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_module_handle_t(aidlDeviceExt.hwModule));
+    return legacy;
+}
+
+status_t legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+        const audio_port_config_device_ext& legacy,
+        AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+    *aidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(legacy));
+    aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    return OK;
+}
+
 // This type is unnamed in the original definition, thus we name it here.
 using audio_port_config_ext = decltype(audio_port_config::ext);
 
 ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortExt_audio_port_config_ext(
         const AudioPortExt& aidl, media::AudioPortType type,
         media::AudioPortRole role, const media::AudioPortExtSys& aidlSys) {
+    // Not using HAL-level 'aidl2legacy_AudioPortExt_audio_port_config_ext' as it does not support
+    // 'media::AudioPortType::SESSION'.
     audio_port_config_ext legacy;
     switch (type) {
         case media::AudioPortType::NONE:
@@ -1932,7 +305,7 @@
         case media::AudioPortType::SESSION:
             legacy.session = VALUE_OR_RETURN(
                     aidl2legacy_int32_t_audio_port_config_session_ext(
-                            VALUE_OR_RETURN(UNION_GET(aidl, session))));
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, session))));
             return legacy;
 
     }
@@ -1942,6 +315,8 @@
 status_t legacy2aidl_AudioPortExt(
         const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role,
         AudioPortExt* aidl, media::AudioPortExtSys* aidlSys) {
+    // Not using HAL-level 'aidl2legacy_AudioPortExt_audio_port_config_ext' as it does not support
+    // 'AUDIO_PORT_TYPE_SESSION'.
     switch (type) {
         case AUDIO_PORT_TYPE_NONE:
             UNION_SET(*aidl, unspecified, false);
@@ -1966,91 +341,47 @@
             return OK;
         }
         case AUDIO_PORT_TYPE_SESSION:
-            UNION_SET(*aidl, session, VALUE_OR_RETURN_STATUS(
+            UNION_SET(*aidl, unspecified, false);
+            UNION_SET(*aidlSys, session, VALUE_OR_RETURN_STATUS(
                             legacy2aidl_audio_port_config_session_ext_int32_t(legacy.session)));
-            UNION_SET(*aidlSys, unspecified, false);
             return OK;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
-ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
-        const media::AudioPortConfig& aidl) {
-    audio_port_config legacy{};
-    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfigFw_audio_port_config(
+        const media::AudioPortConfigFw& aidl, int32_t* aidlPortId) {
+    const bool isInput = VALUE_OR_RETURN(
+            portDirection(aidl.sys.role, aidl.sys.type)) == AudioPortDirection::INPUT;
+    audio_port_config legacy;
+    int32_t aidlPortIdHolder;
+    RETURN_IF_ERROR(aidl2legacy_AudioPortConfig_audio_port_config(
+                    aidl.hal, isInput, &legacy, &aidlPortIdHolder));
+    if (aidlPortId != nullptr) *aidlPortId = aidlPortIdHolder;
     legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
     legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.sys.type));
-    const bool isInput =
-            VALUE_OR_RETURN(direction(aidl.sys.role, aidl.sys.type)) == Direction::INPUT;
-    if (aidl.hal.sampleRate.has_value()) {
-        legacy.sample_rate = VALUE_OR_RETURN(
-                convertIntegral<unsigned int>(aidl.hal.sampleRate.value().value));
-        legacy.config_mask |= AUDIO_PORT_CONFIG_SAMPLE_RATE;
-    }
-    if (aidl.hal.channelMask.has_value()) {
-        legacy.channel_mask =
-                VALUE_OR_RETURN(
-                        aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
-                                aidl.hal.channelMask.value(), isInput));
-        legacy.config_mask |= AUDIO_PORT_CONFIG_CHANNEL_MASK;
-    }
-    if (aidl.hal.format.has_value()) {
-        legacy.format = VALUE_OR_RETURN(
-                aidl2legacy_AudioFormatDescription_audio_format_t(aidl.hal.format.value()));
-        legacy.config_mask |= AUDIO_PORT_CONFIG_FORMAT;
-    }
-    if (aidl.hal.gain.has_value()) {
-        legacy.gain = VALUE_OR_RETURN(aidl2legacy_AudioGainConfig_audio_gain_config(
-                        aidl.hal.gain.value(), isInput));
-        legacy.config_mask |= AUDIO_PORT_CONFIG_GAIN;
-    }
-    if (aidl.hal.flags.has_value()) {
-        legacy.flags = VALUE_OR_RETURN(
-                aidl2legacy_AudioIoFlags_audio_io_flags(aidl.hal.flags.value(), isInput));
-        legacy.config_mask |= AUDIO_PORT_CONFIG_FLAGS;
-    }
     legacy.ext = VALUE_OR_RETURN(
             aidl2legacy_AudioPortExt_audio_port_config_ext(
                     aidl.hal.ext, aidl.sys.type, aidl.sys.role, aidl.sys.ext));
     return legacy;
 }
 
-ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
-        const audio_port_config& legacy) {
-    media::AudioPortConfig aidl;
-    aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ConversionResult<media::AudioPortConfigFw> legacy2aidl_audio_port_config_AudioPortConfigFw(
+        const audio_port_config& legacy, int32_t portId) {
+    const bool isInput = VALUE_OR_RETURN(
+            portDirection(legacy.role, legacy.type)) == AudioPortDirection::INPUT;
+    media::AudioPortConfigFw aidl;
+    aidl.hal = VALUE_OR_RETURN(
+            legacy2aidl_audio_port_config_AudioPortConfig(legacy, isInput, portId));
     aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
     aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
-    const bool isInput = VALUE_OR_RETURN(
-            direction(legacy.role, legacy.type)) == Direction::INPUT;
-    if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
-        Int aidl_sampleRate;
-        aidl_sampleRate.value = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
-        aidl.hal.sampleRate = aidl_sampleRate;
-    }
-    if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
-        aidl.hal.channelMask = VALUE_OR_RETURN(
-                legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
-    }
-    if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
-        aidl.hal.format = VALUE_OR_RETURN(
-                legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
-    }
-    if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
-        aidl.hal.gain = VALUE_OR_RETURN(
-                legacy2aidl_audio_gain_config_AudioGainConfig(legacy.gain, isInput));
-    }
-    if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
-        aidl.hal.flags = VALUE_OR_RETURN(
-                legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, isInput));
-    }
     RETURN_IF_ERROR(legacy2aidl_AudioPortExt(legacy.ext, legacy.type, legacy.role,
                     &aidl.hal.ext, &aidl.sys.ext));
     return aidl;
 }
 
-ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
-        const media::AudioPatch& aidl) {
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatchFw_audio_patch(
+        const media::AudioPatchFw& aidl) {
     struct audio_patch legacy;
     legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_patch_handle_t(aidl.id));
     legacy.num_sinks = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sinks.size()));
@@ -2059,7 +390,7 @@
     }
     for (size_t i = 0; i < legacy.num_sinks; ++i) {
         legacy.sinks[i] =
-                VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sinks[i]));
+                VALUE_OR_RETURN(aidl2legacy_AudioPortConfigFw_audio_port_config(aidl.sinks[i]));
     }
     legacy.num_sources = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sources.size()));
     if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
@@ -2067,14 +398,14 @@
     }
     for (size_t i = 0; i < legacy.num_sources; ++i) {
         legacy.sources[i] =
-                VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sources[i]));
+                VALUE_OR_RETURN(aidl2legacy_AudioPortConfigFw_audio_port_config(aidl.sources[i]));
     }
     return legacy;
 }
 
-ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ConversionResult<media::AudioPatchFw> legacy2aidl_audio_patch_AudioPatchFw(
         const struct audio_patch& legacy) {
-    media::AudioPatch aidl;
+    media::AudioPatchFw aidl;
     aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_patch_handle_t_int32_t(legacy.id));
 
     if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
@@ -2082,14 +413,14 @@
     }
     for (unsigned int i = 0; i < legacy.num_sinks; ++i) {
         aidl.sinks.push_back(
-                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sinks[i])));
+                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfigFw(legacy.sinks[i])));
     }
     if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
         return unexpected(BAD_VALUE);
     }
     for (unsigned int i = 0; i < legacy.num_sources; ++i) {
         aidl.sources.push_back(
-                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sources[i])));
+                VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfigFw(legacy.sources[i])));
     }
     return aidl;
 }
@@ -2099,7 +430,7 @@
     const audio_io_handle_t io_handle = VALUE_OR_RETURN(
             aidl2legacy_int32_t_audio_io_handle_t(aidl.ioHandle));
     const struct audio_patch patch = VALUE_OR_RETURN(
-            aidl2legacy_AudioPatch_audio_patch(aidl.patch));
+            aidl2legacy_AudioPatchFw_audio_patch(aidl.patch));
     const bool isInput = aidl.isInput;
     const uint32_t sampling_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
     const audio_format_t format = VALUE_OR_RETURN(
@@ -2119,7 +450,7 @@
         const sp<AudioIoDescriptor>& legacy) {
     media::AudioIoDescriptor aidl;
     aidl.ioHandle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy->getIoHandle()));
-    aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->getPatch()));
+    aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatchFw(legacy->getPatch()));
     aidl.isInput = legacy->getIsInput();
     aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->getSamplingRate()));
     aidl.format = VALUE_OR_RETURN(
@@ -2149,148 +480,6 @@
     return aidl;
 }
 
-ConversionResult<audio_content_type_t>
-aidl2legacy_AudioContentType_audio_content_type_t(AudioContentType aidl) {
-    switch (aidl) {
-        case AudioContentType::UNKNOWN:
-            return AUDIO_CONTENT_TYPE_UNKNOWN;
-        case AudioContentType::SPEECH:
-            return AUDIO_CONTENT_TYPE_SPEECH;
-        case AudioContentType::MUSIC:
-            return AUDIO_CONTENT_TYPE_MUSIC;
-        case AudioContentType::MOVIE:
-            return AUDIO_CONTENT_TYPE_MOVIE;
-        case AudioContentType::SONIFICATION:
-            return AUDIO_CONTENT_TYPE_SONIFICATION;
-        case AudioContentType::ULTRASOUND:
-            return AUDIO_CONTENT_TYPE_ULTRASOUND;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioContentType>
-legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy) {
-    switch (legacy) {
-        case AUDIO_CONTENT_TYPE_UNKNOWN:
-            return AudioContentType::UNKNOWN;
-        case AUDIO_CONTENT_TYPE_SPEECH:
-            return AudioContentType::SPEECH;
-        case AUDIO_CONTENT_TYPE_MUSIC:
-            return AudioContentType::MUSIC;
-        case AUDIO_CONTENT_TYPE_MOVIE:
-            return AudioContentType::MOVIE;
-        case AUDIO_CONTENT_TYPE_SONIFICATION:
-            return AudioContentType::SONIFICATION;
-        case AUDIO_CONTENT_TYPE_ULTRASOUND:
-            return AudioContentType::ULTRASOUND;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_usage_t>
-aidl2legacy_AudioUsage_audio_usage_t(AudioUsage aidl) {
-    switch (aidl) {
-        case AudioUsage::INVALID:
-            break;  // return error
-        case AudioUsage::UNKNOWN:
-            return AUDIO_USAGE_UNKNOWN;
-        case AudioUsage::MEDIA:
-            return AUDIO_USAGE_MEDIA;
-        case AudioUsage::VOICE_COMMUNICATION:
-            return AUDIO_USAGE_VOICE_COMMUNICATION;
-        case AudioUsage::VOICE_COMMUNICATION_SIGNALLING:
-            return AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
-        case AudioUsage::ALARM:
-            return AUDIO_USAGE_ALARM;
-        case AudioUsage::NOTIFICATION:
-            return AUDIO_USAGE_NOTIFICATION;
-        case AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE:
-            return AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
-        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST:
-            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST;
-        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT:
-            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT;
-        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED:
-            return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED;
-        case AudioUsage::NOTIFICATION_EVENT:
-            return AUDIO_USAGE_NOTIFICATION_EVENT;
-        case AudioUsage::ASSISTANCE_ACCESSIBILITY:
-            return AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
-        case AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE:
-            return AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
-        case AudioUsage::ASSISTANCE_SONIFICATION:
-            return AUDIO_USAGE_ASSISTANCE_SONIFICATION;
-        case AudioUsage::GAME:
-            return AUDIO_USAGE_GAME;
-        case AudioUsage::VIRTUAL_SOURCE:
-            return AUDIO_USAGE_VIRTUAL_SOURCE;
-        case AudioUsage::ASSISTANT:
-            return AUDIO_USAGE_ASSISTANT;
-        case AudioUsage::CALL_ASSISTANT:
-            return AUDIO_USAGE_CALL_ASSISTANT;
-        case AudioUsage::EMERGENCY:
-            return AUDIO_USAGE_EMERGENCY;
-        case AudioUsage::SAFETY:
-            return AUDIO_USAGE_SAFETY;
-        case AudioUsage::VEHICLE_STATUS:
-            return AUDIO_USAGE_VEHICLE_STATUS;
-        case AudioUsage::ANNOUNCEMENT:
-            return AUDIO_USAGE_ANNOUNCEMENT;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioUsage>
-legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy) {
-    switch (legacy) {
-        case AUDIO_USAGE_UNKNOWN:
-            return AudioUsage::UNKNOWN;
-        case AUDIO_USAGE_MEDIA:
-            return AudioUsage::MEDIA;
-        case AUDIO_USAGE_VOICE_COMMUNICATION:
-            return AudioUsage::VOICE_COMMUNICATION;
-        case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
-            return AudioUsage::VOICE_COMMUNICATION_SIGNALLING;
-        case AUDIO_USAGE_ALARM:
-            return AudioUsage::ALARM;
-        case AUDIO_USAGE_NOTIFICATION:
-            return AudioUsage::NOTIFICATION;
-        case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
-            return AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE;
-        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
-            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST;
-        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
-            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT;
-        case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
-            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED;
-        case AUDIO_USAGE_NOTIFICATION_EVENT:
-            return AudioUsage::NOTIFICATION_EVENT;
-        case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
-            return AudioUsage::ASSISTANCE_ACCESSIBILITY;
-        case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
-            return AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE;
-        case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
-            return AudioUsage::ASSISTANCE_SONIFICATION;
-        case AUDIO_USAGE_GAME:
-            return AudioUsage::GAME;
-        case AUDIO_USAGE_VIRTUAL_SOURCE:
-            return AudioUsage::VIRTUAL_SOURCE;
-        case AUDIO_USAGE_ASSISTANT:
-            return AudioUsage::ASSISTANT;
-        case AUDIO_USAGE_CALL_ASSISTANT:
-            return AudioUsage::CALL_ASSISTANT;
-        case AUDIO_USAGE_EMERGENCY:
-            return AudioUsage::EMERGENCY;
-        case AUDIO_USAGE_SAFETY:
-            return AudioUsage::SAFETY;
-        case AUDIO_USAGE_VEHICLE_STATUS:
-            return AudioUsage::VEHICLE_STATUS;
-        case AUDIO_USAGE_ANNOUNCEMENT:
-            return AudioUsage::ANNOUNCEMENT;
-    }
-    return unexpected(BAD_VALUE);
-}
-
 ConversionResult<audio_flags_mask_t>
 aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl) {
     switch (aidl) {
@@ -2414,140 +603,6 @@
     return aidl;
 }
 
-ConversionResult<audio_encapsulation_mode_t>
-aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(AudioEncapsulationMode aidl) {
-    switch (aidl) {
-        case AudioEncapsulationMode::INVALID:
-            break;  // return error
-        case AudioEncapsulationMode::NONE:
-            return AUDIO_ENCAPSULATION_MODE_NONE;
-        case AudioEncapsulationMode::ELEMENTARY_STREAM:
-            return AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM;
-        case AudioEncapsulationMode::HANDLE:
-            return AUDIO_ENCAPSULATION_MODE_HANDLE;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioEncapsulationMode>
-legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy) {
-    switch (legacy) {
-        case AUDIO_ENCAPSULATION_MODE_NONE:
-            return AudioEncapsulationMode::NONE;
-        case AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM:
-            return AudioEncapsulationMode::ELEMENTARY_STREAM;
-        case AUDIO_ENCAPSULATION_MODE_HANDLE:
-            return AudioEncapsulationMode::HANDLE;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_offload_info_t>
-aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const AudioOffloadInfo& aidl) {
-    audio_offload_info_t legacy = AUDIO_INFO_INITIALIZER;
-    audio_config_base_t base = VALUE_OR_RETURN(
-            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.base, false /*isInput*/));
-    legacy.sample_rate = base.sample_rate;
-    legacy.channel_mask = base.channel_mask;
-    legacy.format = base.format;
-    legacy.stream_type = VALUE_OR_RETURN(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
-    legacy.bit_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitRatePerSecond));
-    legacy.duration_us = VALUE_OR_RETURN(convertIntegral<int64_t>(aidl.durationUs));
-    legacy.has_video = aidl.hasVideo;
-    legacy.is_streaming = aidl.isStreaming;
-    legacy.bit_width = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitWidth));
-    legacy.offload_buffer_size = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.offloadBufferSize));
-    legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
-    legacy.encapsulation_mode = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(aidl.encapsulationMode));
-    legacy.content_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.contentId));
-    legacy.sync_id = VALUE_OR_RETURN(convertReinterpret<int32_t>(aidl.syncId));
-    return legacy;
-}
-
-ConversionResult<AudioOffloadInfo>
-legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy) {
-    AudioOffloadInfo aidl;
-    // Version 0.1 fields.
-    if (legacy.size < offsetof(audio_offload_info_t, usage) + sizeof(audio_offload_info_t::usage)) {
-        return unexpected(BAD_VALUE);
-    }
-    const audio_config_base_t base = { .sample_rate = legacy.sample_rate,
-        .channel_mask = legacy.channel_mask, .format = legacy.format };
-    aidl.base = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(
-                    base, false /*isInput*/));
-    aidl.streamType = VALUE_OR_RETURN(
-            legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream_type));
-    aidl.bitRatePerSecond = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_rate));
-    aidl.durationUs = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.duration_us));
-    aidl.hasVideo = legacy.has_video;
-    aidl.isStreaming = legacy.is_streaming;
-    aidl.bitWidth = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_width));
-    aidl.offloadBufferSize = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.offload_buffer_size));
-    aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
-
-    // Version 0.2 fields.
-    if (legacy.version >= AUDIO_OFFLOAD_INFO_VERSION_0_2) {
-        if (legacy.size <
-            offsetof(audio_offload_info_t, sync_id) + sizeof(audio_offload_info_t::sync_id)) {
-            return unexpected(BAD_VALUE);
-        }
-        aidl.encapsulationMode = VALUE_OR_RETURN(
-                legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(
-                        legacy.encapsulation_mode));
-        aidl.contentId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.content_id));
-        aidl.syncId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.sync_id));
-    }
-    return aidl;
-}
-
-ConversionResult<audio_config_t>
-aidl2legacy_AudioConfig_audio_config_t(const AudioConfig& aidl, bool isInput) {
-    const audio_config_base_t legacyBase = VALUE_OR_RETURN(
-            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.base, isInput));
-    audio_config_t legacy = AUDIO_CONFIG_INITIALIZER;
-    legacy.sample_rate = legacyBase.sample_rate;
-    legacy.channel_mask = legacyBase.channel_mask;
-    legacy.format = legacyBase.format;
-    legacy.offload_info = VALUE_OR_RETURN(
-            aidl2legacy_AudioOffloadInfo_audio_offload_info_t(aidl.offloadInfo));
-    legacy.frame_count = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.frameCount));
-    return legacy;
-}
-
-ConversionResult<AudioConfig>
-legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput) {
-    const audio_config_base_t base = { .sample_rate = legacy.sample_rate,
-        .channel_mask = legacy.channel_mask, .format = legacy.format };
-    AudioConfig aidl;
-    aidl.base = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(base, isInput));
-    aidl.offloadInfo = VALUE_OR_RETURN(
-            legacy2aidl_audio_offload_info_t_AudioOffloadInfo(legacy.offload_info));
-    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.frame_count));
-    return aidl;
-}
-
-ConversionResult<audio_config_base_t>
-aidl2legacy_AudioConfigBase_audio_config_base_t(const AudioConfigBase& aidl, bool isInput) {
-    audio_config_base_t legacy;
-    legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
-    legacy.channel_mask = VALUE_OR_RETURN(
-            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
-    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
-    return legacy;
-}
-
-ConversionResult<AudioConfigBase>
-legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput) {
-    AudioConfigBase aidl;
-    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
-    aidl.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
-    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
-    return aidl;
-}
-
 ConversionResult<sp<IMemory>>
 aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl) {
     sp<IMemory> legacy;
@@ -2566,8 +621,8 @@
     return aidl;
 }
 
-ConversionResult<sp<IMemory>>
-aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl) {
+ConversionResult<sp<IMemory>> aidl2legacy_NullableSharedFileRegion_IMemory(
+        const std::optional<media::SharedFileRegion>& aidl) {
     sp<IMemory> legacy;
     if (!convertNullableSharedFileRegionToIMemory(aidl, &legacy)) {
         return unexpected(BAD_VALUE);
@@ -2602,31 +657,6 @@
     return aidl;
 }
 
-ConversionResult<audio_uuid_t>
-aidl2legacy_AudioUuid_audio_uuid_t(const AudioUuid& aidl) {
-    audio_uuid_t legacy;
-    legacy.timeLow = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.timeLow));
-    legacy.timeMid = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeMid));
-    legacy.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeHiAndVersion));
-    legacy.clockSeq = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.clockSeq));
-    if (aidl.node.size() != std::size(legacy.node)) {
-        return unexpected(BAD_VALUE);
-    }
-    std::copy(aidl.node.begin(), aidl.node.end(), legacy.node);
-    return legacy;
-}
-
-ConversionResult<AudioUuid>
-legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy) {
-    AudioUuid aidl;
-    aidl.timeLow = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.timeLow));
-    aidl.timeMid = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeMid));
-    aidl.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeHiAndVersion));
-    aidl.clockSeq = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.clockSeq));
-    std::copy(legacy.node, legacy.node + std::size(legacy.node), std::back_inserter(aidl.node));
-    return aidl;
-}
-
 ConversionResult<effect_descriptor_t>
 aidl2legacy_EffectDescriptor_effect_descriptor_t(const media::EffectDescriptor& aidl) {
     effect_descriptor_t legacy;
@@ -2657,123 +687,21 @@
     return aidl;
 }
 
-ConversionResult<audio_encapsulation_metadata_type_t>
-aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
-        AudioEncapsulationMetadataType aidl) {
-    switch (aidl) {
-        case AudioEncapsulationMetadataType::NONE:
-            return AUDIO_ENCAPSULATION_METADATA_TYPE_NONE;
-        case AudioEncapsulationMetadataType::FRAMEWORK_TUNER:
-            return AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER;
-        case AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR:
-            return AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioEncapsulationMetadataType>
-legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
-        audio_encapsulation_metadata_type_t legacy) {
-    switch (legacy) {
-        case AUDIO_ENCAPSULATION_METADATA_TYPE_NONE:
-            return AudioEncapsulationMetadataType::NONE;
-        case AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER:
-            return AudioEncapsulationMetadataType::FRAMEWORK_TUNER;
-        case AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR:
-            return AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<uint32_t>
-aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl) {
-    return convertBitmask<uint32_t,
-            int32_t,
-            audio_encapsulation_mode_t,
-            AudioEncapsulationMode>(
-            aidl, aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t,
-            indexToEnum_index<AudioEncapsulationMode>,
-            enumToMask_index<uint32_t, audio_encapsulation_mode_t>);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy) {
-    return convertBitmask<int32_t,
-            uint32_t,
-            AudioEncapsulationMode,
-            audio_encapsulation_mode_t>(
-            legacy, legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode,
-            indexToEnum_index<audio_encapsulation_mode_t>,
-            enumToMask_index<int32_t, AudioEncapsulationMode>);
-}
-
-ConversionResult<uint32_t>
-aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl) {
-    return convertBitmask<uint32_t,
-            int32_t,
-            audio_encapsulation_metadata_type_t,
-            AudioEncapsulationMetadataType>(
-            aidl, aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t,
-            indexToEnum_index<AudioEncapsulationMetadataType>,
-            enumToMask_index<uint32_t, audio_encapsulation_metadata_type_t>);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy) {
-    return convertBitmask<int32_t,
-            uint32_t,
-            AudioEncapsulationMetadataType,
-            audio_encapsulation_metadata_type_t>(
-            legacy, legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType,
-            indexToEnum_index<audio_encapsulation_metadata_type_t>,
-            enumToMask_index<int32_t, AudioEncapsulationMetadataType>);
-}
-
-ConversionResult<audio_port_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
-        const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlSys) {
-    audio_port_device_ext legacy;
-    legacy.hw_module = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
-    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
-                    aidl.device, &legacy.type, legacy.address));
-    legacy.encapsulation_modes = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationMode_mask(aidlSys.encapsulationModes));
-    legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationMetadataType_mask(
-                    aidlSys.encapsulationMetadataTypes));
-    return legacy;
-}
-
-status_t legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
-        const audio_port_device_ext& legacy,
-        AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
-    aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidl->device = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
-    aidlDeviceExt->encapsulationModes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
-    aidlDeviceExt->encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMetadataType_mask(legacy.encapsulation_metadata_types));
-    return OK;
-}
-
 ConversionResult<audio_port_mix_ext>
 aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
         const AudioPortMixExt& aidl, const media::AudioPortMixExtSys& aidlSys) {
-    audio_port_mix_ext legacy{};
+    audio_port_mix_ext legacy = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortMixExt_audio_port_mix_ext(aidl));
     legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
-    legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
     return legacy;
 }
 
 status_t
 legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy,
         AudioPortMixExt* aidl, media::AudioPortMixExtSys* aidlMixExt) {
+    *aidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_mix_ext_AudioPortMixExt(legacy));
     aidlMixExt->hwModule = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidl->handle = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
     return OK;
 }
 
@@ -2789,6 +717,25 @@
     return legacy2aidl_audio_session_t_int32_t(legacy.session);
 }
 
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+        const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlSys) {
+    audio_port_device_ext legacy = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(aidl));
+    legacy.hw_module = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
+    return legacy;
+}
+
+status_t legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+        const audio_port_device_ext& legacy,
+        AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+    *aidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy));
+    aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    return OK;
+}
+
 // This type is unnamed in the original definition, thus we name it here.
 using audio_port_v7_ext = decltype(audio_port_v7::ext);
 
@@ -2816,7 +763,7 @@
         case media::AudioPortType::SESSION:
             legacy.session = VALUE_OR_RETURN(
                     aidl2legacy_int32_t_audio_port_session_ext(
-                            VALUE_OR_RETURN(UNION_GET(aidl, session))));
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, session))));
             return legacy;
 
     }
@@ -2852,247 +799,48 @@
             return OK;
         }
         case AUDIO_PORT_TYPE_SESSION:
-            UNION_SET(*aidl, session, VALUE_OR_RETURN_STATUS(
+            UNION_SET(*aidl, unspecified, false);
+            UNION_SET(*aidlSys, session, VALUE_OR_RETURN_STATUS(
                             legacy2aidl_audio_port_session_ext_int32_t(legacy.session)));
-            UNION_SET(*aidlSys, unspecified, false);
             return OK;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
-ConversionResult<audio_profile>
-aidl2legacy_AudioProfile_audio_profile(const AudioProfile& aidl, bool isInput) {
-    audio_profile legacy;
-    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
-
-    if (aidl.sampleRates.size() > std::size(legacy.sample_rates)) {
-        return unexpected(BAD_VALUE);
-    }
-    RETURN_IF_ERROR(
-            convertRange(aidl.sampleRates.begin(), aidl.sampleRates.end(), legacy.sample_rates,
-                         convertIntegral<int32_t, unsigned int>));
-    legacy.num_sample_rates = aidl.sampleRates.size();
-
-    if (aidl.channelMasks.size() > std::size(legacy.channel_masks)) {
-        return unexpected(BAD_VALUE);
-    }
-    RETURN_IF_ERROR(
-            convertRange(aidl.channelMasks.begin(), aidl.channelMasks.end(), legacy.channel_masks,
-                    [isInput](const AudioChannelLayout& l) {
-                        return aidl2legacy_AudioChannelLayout_audio_channel_mask_t(l, isInput);
-                    }));
-    legacy.num_channel_masks = aidl.channelMasks.size();
-
-    legacy.encapsulation_type = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(aidl.encapsulationType));
-    return legacy;
-}
-
-ConversionResult<AudioProfile>
-legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy, bool isInput) {
-    AudioProfile aidl;
-    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
-
-    if (legacy.num_sample_rates > std::size(legacy.sample_rates)) {
-        return unexpected(BAD_VALUE);
-    }
-    RETURN_IF_ERROR(
-            convertRange(legacy.sample_rates, legacy.sample_rates + legacy.num_sample_rates,
-                         std::back_inserter(aidl.sampleRates),
-                         convertIntegral<unsigned int, int32_t>));
-
-    if (legacy.num_channel_masks > std::size(legacy.channel_masks)) {
-        return unexpected(BAD_VALUE);
-    }
-    RETURN_IF_ERROR(
-            convertRange(legacy.channel_masks, legacy.channel_masks + legacy.num_channel_masks,
-                         std::back_inserter(aidl.channelMasks),
-                    [isInput](audio_channel_mask_t m) {
-                        return legacy2aidl_audio_channel_mask_t_AudioChannelLayout(m, isInput);
-                    }));
-
-    aidl.encapsulationType = VALUE_OR_RETURN(
-            legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
-                    legacy.encapsulation_type));
-    return aidl;
-}
-
-ConversionResult<audio_gain>
-aidl2legacy_AudioGain_audio_gain(const AudioGain& aidl, bool isInput) {
-    audio_gain legacy;
-    legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
-    legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
-                    aidl.channelMask, isInput));
-    legacy.min_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.minValue));
-    legacy.max_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.maxValue));
-    legacy.default_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.defaultValue));
-    legacy.step_value = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.stepValue));
-    legacy.min_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.minRampMs));
-    legacy.max_ramp_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.maxRampMs));
-    return legacy;
-}
-
-ConversionResult<AudioGain>
-legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput) {
-    AudioGain aidl;
-    aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
-    aidl.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
-    aidl.minValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_value));
-    aidl.maxValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_value));
-    aidl.defaultValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.default_value));
-    aidl.stepValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.step_value));
-    aidl.minRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_ramp_ms));
-    aidl.maxRampMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_ramp_ms));
-    return aidl;
-}
-
 ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl) {
-    audio_port_v7 legacy;
-    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
+aidl2legacy_AudioPortFw_audio_port_v7(const media::AudioPortFw& aidl) {
+    const bool isInput = VALUE_OR_RETURN(
+            portDirection(aidl.sys.role, aidl.sys.type)) == AudioPortDirection::INPUT;
+    audio_port_v7 legacy = VALUE_OR_RETURN(aidl2legacy_AudioPort_audio_port_v7(aidl.hal, isInput));
     legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
     legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.sys.type));
-    RETURN_IF_ERROR(aidl2legacy_string(aidl.hal.name, legacy.name, sizeof(legacy.name)));
-
-    if (aidl.hal.profiles.size() > std::size(legacy.audio_profiles)) {
-        return unexpected(BAD_VALUE);
-    }
-    const bool isInput =
-            VALUE_OR_RETURN(direction(aidl.sys.role, aidl.sys.type)) == Direction::INPUT;
-    RETURN_IF_ERROR(convertRange(
-                    aidl.hal.profiles.begin(), aidl.hal.profiles.end(), legacy.audio_profiles,
-                    [isInput](const AudioProfile& p) {
-                        return aidl2legacy_AudioProfile_audio_profile(p, isInput);
-                    }));
-    legacy.num_audio_profiles = aidl.hal.profiles.size();
-
-    if (aidl.hal.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
-        return unexpected(BAD_VALUE);
-    }
-    RETURN_IF_ERROR(
-            convertRange(
-                    aidl.hal.extraAudioDescriptors.begin(), aidl.hal.extraAudioDescriptors.end(),
-                    legacy.extra_audio_descriptors,
-                    aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
-    legacy.num_extra_audio_descriptors = aidl.hal.extraAudioDescriptors.size();
-
-    if (aidl.hal.gains.size() > std::size(legacy.gains)) {
-        return unexpected(BAD_VALUE);
-    }
-    RETURN_IF_ERROR(convertRange(aidl.hal.gains.begin(), aidl.hal.gains.end(), legacy.gains,
-                                 [isInput](const AudioGain& g) {
-                                     return aidl2legacy_AudioGain_audio_gain(g, isInput);
-                                 }));
-    legacy.num_gains = aidl.hal.gains.size();
 
     legacy.active_config = VALUE_OR_RETURN(
-            aidl2legacy_AudioPortConfig_audio_port_config(aidl.sys.activeConfig));
+            aidl2legacy_AudioPortConfigFw_audio_port_config(aidl.sys.activeConfig));
     legacy.ext = VALUE_OR_RETURN(
             aidl2legacy_AudioPortExt_audio_port_v7_ext(aidl.hal.ext, aidl.sys.type, aidl.sys.ext));
     return legacy;
 }
 
-ConversionResult<media::AudioPort>
-legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy) {
-    media::AudioPort aidl;
-    aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ConversionResult<media::AudioPortFw>
+legacy2aidl_audio_port_v7_AudioPortFw(const audio_port_v7& legacy) {
+    const bool isInput = VALUE_OR_RETURN(
+            portDirection(legacy.role, legacy.type)) == AudioPortDirection::INPUT;
+    media::AudioPortFw aidl;
+    aidl.hal = VALUE_OR_RETURN(legacy2aidl_audio_port_v7_AudioPort(legacy, isInput));
     aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
     aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
-    aidl.hal.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
-
-    if (legacy.num_audio_profiles > std::size(legacy.audio_profiles)) {
-        return unexpected(BAD_VALUE);
-    }
-    const bool isInput = VALUE_OR_RETURN(direction(legacy.role, legacy.type)) == Direction::INPUT;
-    RETURN_IF_ERROR(
-            convertRange(legacy.audio_profiles, legacy.audio_profiles + legacy.num_audio_profiles,
-                         std::back_inserter(aidl.hal.profiles),
-                         [isInput](const audio_profile& p) {
-                             return legacy2aidl_audio_profile_AudioProfile(p, isInput);
-                         }));
-
-    if (legacy.num_extra_audio_descriptors > std::size(legacy.extra_audio_descriptors)) {
-        return unexpected(BAD_VALUE);
-    }
+    // These get filled by the call to 'legacy2aidl_AudioPortExt' below.
     aidl.sys.profiles.resize(legacy.num_audio_profiles);
-    RETURN_IF_ERROR(
-            convertRange(legacy.extra_audio_descriptors,
-                    legacy.extra_audio_descriptors + legacy.num_extra_audio_descriptors,
-                    std::back_inserter(aidl.hal.extraAudioDescriptors),
-                    legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor));
-
-    if (legacy.num_gains > std::size(legacy.gains)) {
-        return unexpected(BAD_VALUE);
-    }
-    RETURN_IF_ERROR(
-            convertRange(legacy.gains, legacy.gains + legacy.num_gains,
-                         std::back_inserter(aidl.hal.gains),
-                         [isInput](const audio_gain& g) {
-                             return legacy2aidl_audio_gain_AudioGain(g, isInput);
-                         }));
     aidl.sys.gains.resize(legacy.num_gains);
-
     aidl.sys.activeConfig = VALUE_OR_RETURN(
-            legacy2aidl_audio_port_config_AudioPortConfig(legacy.active_config));
+            legacy2aidl_audio_port_config_AudioPortConfigFw(legacy.active_config, legacy.id));
     aidl.sys.activeConfig.hal.portId = aidl.hal.id;
     RETURN_IF_ERROR(
             legacy2aidl_AudioPortExt(legacy.ext, legacy.type, &aidl.hal.ext, &aidl.sys.ext));
     return aidl;
 }
 
-ConversionResult<audio_mode_t>
-aidl2legacy_AudioMode_audio_mode_t(AudioMode aidl) {
-    switch (aidl) {
-        case AudioMode::SYS_RESERVED_INVALID:
-            return AUDIO_MODE_INVALID;
-        case AudioMode::SYS_RESERVED_CURRENT:
-            return AUDIO_MODE_CURRENT;
-        case AudioMode::NORMAL:
-            return AUDIO_MODE_NORMAL;
-        case AudioMode::RINGTONE:
-            return AUDIO_MODE_RINGTONE;
-        case AudioMode::IN_CALL:
-            return AUDIO_MODE_IN_CALL;
-        case AudioMode::IN_COMMUNICATION:
-            return AUDIO_MODE_IN_COMMUNICATION;
-        case AudioMode::CALL_SCREEN:
-            return AUDIO_MODE_CALL_SCREEN;
-        case AudioMode::SYS_RESERVED_CALL_REDIRECT:
-            return AUDIO_MODE_CALL_REDIRECT;
-        case AudioMode::SYS_RESERVED_COMMUNICATION_REDIRECT:
-            return AUDIO_MODE_COMMUNICATION_REDIRECT;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioMode>
-legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy) {
-    switch (legacy) {
-        case AUDIO_MODE_INVALID:
-            return AudioMode::SYS_RESERVED_INVALID;
-        case AUDIO_MODE_CURRENT:
-            return AudioMode::SYS_RESERVED_CURRENT;
-        case AUDIO_MODE_NORMAL:
-            return AudioMode::NORMAL;
-        case AUDIO_MODE_RINGTONE:
-            return AudioMode::RINGTONE;
-        case AUDIO_MODE_IN_CALL:
-            return AudioMode::IN_CALL;
-        case AUDIO_MODE_IN_COMMUNICATION:
-            return AudioMode::IN_COMMUNICATION;
-        case AUDIO_MODE_CALL_SCREEN:
-            return AudioMode::CALL_SCREEN;
-        case AUDIO_MODE_CALL_REDIRECT:
-            return AudioMode::SYS_RESERVED_CALL_REDIRECT;
-        case AUDIO_MODE_COMMUNICATION_REDIRECT:
-            return AudioMode::SYS_RESERVED_COMMUNICATION_REDIRECT;
-        case AUDIO_MODE_CNT:
-            break;
-    }
-    return unexpected(BAD_VALUE);
-}
-
 ConversionResult<audio_unique_id_use_t>
 aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(media::AudioUniqueIdUse aidl) {
     switch (aidl) {
@@ -3161,160 +909,6 @@
     return convertReinterpret<int32_t>(legacy);
 }
 
-ConversionResult<audio_dual_mono_mode_t>
-aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::AudioDualMonoMode aidl) {
-    switch (aidl) {
-        case media::AudioDualMonoMode::OFF:
-            return AUDIO_DUAL_MONO_MODE_OFF;
-        case media::AudioDualMonoMode::LR:
-            return AUDIO_DUAL_MONO_MODE_LR;
-        case media::AudioDualMonoMode::LL:
-            return AUDIO_DUAL_MONO_MODE_LL;
-        case media::AudioDualMonoMode::RR:
-            return AUDIO_DUAL_MONO_MODE_RR;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::AudioDualMonoMode>
-legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy) {
-    switch (legacy) {
-        case AUDIO_DUAL_MONO_MODE_OFF:
-            return media::AudioDualMonoMode::OFF;
-        case AUDIO_DUAL_MONO_MODE_LR:
-            return media::AudioDualMonoMode::LR;
-        case AUDIO_DUAL_MONO_MODE_LL:
-            return media::AudioDualMonoMode::LL;
-        case AUDIO_DUAL_MONO_MODE_RR:
-            return media::AudioDualMonoMode::RR;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_timestretch_fallback_mode_t>
-aidl2legacy_int32_t_audio_timestretch_fallback_mode_t(int32_t aidl) {
-    return convertReinterpret<audio_timestretch_fallback_mode_t>(aidl);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_audio_timestretch_fallback_mode_t_int32_t(audio_timestretch_fallback_mode_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_timestretch_stretch_mode_t>
-aidl2legacy_int32_t_audio_timestretch_stretch_mode_t(int32_t aidl) {
-    return convertReinterpret<audio_timestretch_stretch_mode_t>(aidl);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_audio_timestretch_stretch_mode_t_int32_t(audio_timestretch_stretch_mode_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_playback_rate_t>
-aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(const media::AudioPlaybackRate& aidl) {
-    audio_playback_rate_t legacy;
-    legacy.mSpeed = aidl.speed;
-    legacy.mPitch = aidl.pitch;
-    legacy.mFallbackMode = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_timestretch_fallback_mode_t(aidl.fallbackMode));
-    legacy.mStretchMode = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_timestretch_stretch_mode_t(aidl.stretchMode));
-    return legacy;
-}
-
-ConversionResult<media::AudioPlaybackRate>
-legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy) {
-    media::AudioPlaybackRate aidl;
-    aidl.speed = legacy.mSpeed;
-    aidl.pitch = legacy.mPitch;
-    aidl.fallbackMode = VALUE_OR_RETURN(
-            legacy2aidl_audio_timestretch_fallback_mode_t_int32_t(legacy.mFallbackMode));
-    aidl.stretchMode = VALUE_OR_RETURN(
-            legacy2aidl_audio_timestretch_stretch_mode_t_int32_t(legacy.mStretchMode));
-    return aidl;
-}
-
-ConversionResult<audio_standard_t>
-aidl2legacy_AudioStandard_audio_standard_t(AudioStandard aidl) {
-    switch (aidl) {
-        case AudioStandard::NONE:
-            return AUDIO_STANDARD_NONE;
-        case AudioStandard::EDID:
-            return AUDIO_STANDARD_EDID;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioStandard>
-legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy) {
-    switch (legacy) {
-        case AUDIO_STANDARD_NONE:
-            return AudioStandard::NONE;
-        case AUDIO_STANDARD_EDID:
-            return AudioStandard::EDID;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_extra_audio_descriptor>
-aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
-        const ExtraAudioDescriptor& aidl) {
-    audio_extra_audio_descriptor legacy;
-    legacy.standard = VALUE_OR_RETURN(aidl2legacy_AudioStandard_audio_standard_t(aidl.standard));
-    if (aidl.audioDescriptor.size() > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
-        return unexpected(BAD_VALUE);
-    }
-    legacy.descriptor_length = aidl.audioDescriptor.size();
-    std::copy(aidl.audioDescriptor.begin(), aidl.audioDescriptor.end(),
-              std::begin(legacy.descriptor));
-    legacy.encapsulation_type =
-            VALUE_OR_RETURN(aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
-                    aidl.encapsulationType));
-    return legacy;
-}
-
-ConversionResult<ExtraAudioDescriptor>
-legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
-        const audio_extra_audio_descriptor& legacy) {
-    ExtraAudioDescriptor aidl;
-    aidl.standard = VALUE_OR_RETURN(legacy2aidl_audio_standard_t_AudioStandard(legacy.standard));
-    if (legacy.descriptor_length > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
-        return unexpected(BAD_VALUE);
-    }
-    aidl.audioDescriptor.resize(legacy.descriptor_length);
-    std::copy(legacy.descriptor, legacy.descriptor + legacy.descriptor_length,
-              aidl.audioDescriptor.begin());
-    aidl.encapsulationType =
-            VALUE_OR_RETURN(legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
-                    legacy.encapsulation_type));
-    return aidl;
-}
-
-ConversionResult<audio_encapsulation_type_t>
-aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
-        const AudioEncapsulationType& aidl) {
-    switch (aidl) {
-        case AudioEncapsulationType::NONE:
-            return AUDIO_ENCAPSULATION_TYPE_NONE;
-        case AudioEncapsulationType::IEC61937:
-            return AUDIO_ENCAPSULATION_TYPE_IEC61937;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<AudioEncapsulationType>
-legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
-        const audio_encapsulation_type_t & legacy) {
-    switch (legacy) {
-        case AUDIO_ENCAPSULATION_TYPE_NONE:
-            return AudioEncapsulationType::NONE;
-        case AUDIO_ENCAPSULATION_TYPE_IEC61937:
-            return AudioEncapsulationType::IEC61937;
-    }
-    return unexpected(BAD_VALUE);
-}
-
 ConversionResult<TrackSecondaryOutputInfoPair>
 aidl2legacy_TrackSecondaryOutputInfo_TrackSecondaryOutputInfoPair(
         const media::TrackSecondaryOutputInfo& aidl) {
@@ -3388,4 +982,23 @@
             enumToMask_index<int32_t, media::AudioDirectMode>);
 }
 
+ConversionResult<audio_microphone_characteristic_t>
+aidl2legacy_MicrophoneInfoFw_audio_microphone_characteristic_t(
+        const media::MicrophoneInfoFw& aidl) {
+    audio_microphone_characteristic_t legacy =
+            VALUE_OR_RETURN(aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+                            aidl.info, aidl.dynamic));
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    return legacy;
+}
+ConversionResult<media::MicrophoneInfoFw>
+legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(
+        const audio_microphone_characteristic_t& legacy) {
+    media::MicrophoneInfoFw aidl;
+    RETURN_IF_ERROR(legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfos(
+                    legacy, &aidl.info, &aidl.dynamic));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    return aidl;
+}
+
 }  // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 69a9c68..7122af1 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -48,14 +48,16 @@
 cc_library {
     name: "libaudiopolicy",
     srcs: [
-        "AudioAttributes.cpp",
+        "VolumeGroupAttributes.cpp",
         "AudioPolicy.cpp",
         "AudioProductStrategy.cpp",
         "AudioVolumeGroup.cpp",
         "PolicyAidlConversion.cpp"
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_export_shared",
+    ],
     shared_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -76,7 +78,6 @@
     include_dirs: ["system/media/audio_utils/include"],
     export_include_dirs: ["include"],
     export_shared_lib_headers: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -116,8 +117,10 @@
         "RecordingActivityTracker.cpp",
         "TrackPlayerBase.cpp",
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
     shared_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -125,6 +128,7 @@
         "audiopolicy-types-aidl-cpp",
         "av-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
+        "libaudio_aidl_conversion_common_cpp",
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudioutils",
@@ -190,87 +194,31 @@
     },
 }
 
-// This is intended for clients needing to include AidlConversionUtil.h, without dragging in a lot of extra
-// dependencies.
-cc_library_headers {
-    name: "libaudioclient_aidl_conversion_util",
-    host_supported: true,
-    vendor_available: true,
-    double_loadable: true,
-    min_sdk_version: "29",
-    export_include_dirs: [
-        "include",
-    ],
-    header_libs: [
-        "libbase_headers",
-        "liberror_headers",
-    ],
-    export_header_lib_headers: [
-        "libbase_headers",
-        "liberror_headers",
-    ],
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.bluetooth",
-        "com.android.media",
-        "com.android.media.swcodec",
-    ],
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-}
-
 cc_library {
     name: "libaudioclient_aidl_conversion",
     srcs: ["AidlConversion.cpp"],
+    shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
+    ],
+    static_libs: [
+        "libaudio_aidl_conversion_common_cpp",
+    ],
     export_include_dirs: ["include"],
-    host_supported: true,
-    vendor_available: true,
-    double_loadable: true,
-    min_sdk_version: "29",
     header_libs: [
-        "libaudioclient_aidl_conversion_util",
-        "libaudio_system_headers",
+        "libaudio_aidl_conversion_common_util_cpp",
     ],
     export_header_lib_headers: [
-        "libaudioclient_aidl_conversion_util",
-    ],
-    shared_libs: [
-        "android.media.audio.common.types-V1-cpp",
-        "audioclient-types-aidl-cpp",
-        "libbase",
-        "libbinder",
-        "liblog",
-        "libshmemcompat",
-        "libstagefright_foundation",
-        "libutils",
-        "shared-file-region-aidl-cpp",
-        "framework-permission-aidl-cpp",
+        "libaudio_aidl_conversion_common_util_cpp",
     ],
     export_shared_lib_headers: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
-        "libbase",
-        "shared-file-region-aidl-cpp",
+        "av-types-aidl-cpp",
     ],
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wno-error=deprecated-declarations",
+    defaults: [
+        "audio_aidl_conversion_common_default",
+        "latest_android_media_audio_common_types_cpp_export_shared",
     ],
-    sanitize: {
-        misc_undefined: [
-            "unsigned-integer-overflow",
-            "signed-integer-overflow",
-        ],
-    },
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
 }
 
 // AIDL interface between libaudioclient and framework.jar
@@ -278,6 +226,7 @@
     name: "libaudioclient_aidl",
     srcs: [
         "aidl/android/media/IPlayer.aidl",
+        "aidl/android/media/AudioHalVersion.aidl",
     ],
     path: "aidl",
 }
@@ -302,12 +251,26 @@
     double_loadable: true,
     vendor_available: true,
     srcs: [
+        "aidl/android/media/EffectConfig.aidl",
         "aidl/android/media/IEffect.aidl",
         "aidl/android/media/IEffectClient.aidl",
     ],
     imports: [
+        "android.media.audio.common.types-V2",
         "shared-file-region-aidl",
     ],
+    backend: {
+        cpp: {
+            min_sdk_version: "29",
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.media",
+            ],
+        },
+        java: {
+            sdk_version: "module_current",
+        },
+    },
 }
 
 aidl_interface {
@@ -321,16 +284,17 @@
         "aidl/android/media/AudioAttributesInternal.aidl",
         "aidl/android/media/AudioClient.aidl",
         "aidl/android/media/AudioDirectMode.aidl",
-        "aidl/android/media/AudioDualMonoMode.aidl",
         "aidl/android/media/AudioFlag.aidl",
         "aidl/android/media/AudioGainSys.aidl",
+        "aidl/android/media/AudioHalVersion.aidl",
+        "aidl/android/media/AudioHwModule.aidl",
         "aidl/android/media/AudioIoConfigEvent.aidl",
         "aidl/android/media/AudioIoDescriptor.aidl",
-        "aidl/android/media/AudioPatch.aidl",
-        "aidl/android/media/AudioPlaybackRate.aidl",
-        "aidl/android/media/AudioPort.aidl",
+        "aidl/android/media/AudioPatchFw.aidl",
+        "aidl/android/media/AudioPolicyConfig.aidl",
+        "aidl/android/media/AudioPortFw.aidl",
         "aidl/android/media/AudioPortSys.aidl",
-        "aidl/android/media/AudioPortConfig.aidl",
+        "aidl/android/media/AudioPortConfigFw.aidl",
         "aidl/android/media/AudioPortConfigSys.aidl",
         "aidl/android/media/AudioPortDeviceExtSys.aidl",
         "aidl/android/media/AudioPortExtSys.aidl",
@@ -338,14 +302,17 @@
         "aidl/android/media/AudioPortRole.aidl",
         "aidl/android/media/AudioPortType.aidl",
         "aidl/android/media/AudioProfileSys.aidl",
+        "aidl/android/media/AudioRoute.aidl",
         "aidl/android/media/AudioTimestampInternal.aidl",
         "aidl/android/media/AudioUniqueIdUse.aidl",
         "aidl/android/media/AudioVibratorInfo.aidl",
+        "aidl/android/media/DeviceConnectedState.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
+        "aidl/android/media/SurroundSoundConfig.aidl",
     ],
     imports: [
-        "android.media.audio.common.types-V1",
+        "android.media.audio.common.types-V2",
         "framework-permission-aidl",
     ],
     backend: {
@@ -371,6 +338,8 @@
     srcs: [
         "aidl/android/media/AudioAttributesEx.aidl",
         "aidl/android/media/AudioMix.aidl",
+        "aidl/android/media/AudioMixerAttributesInternal.aidl",
+        "aidl/android/media/AudioMixerBehavior.aidl",
         "aidl/android/media/AudioMixCallbackFlag.aidl",
         "aidl/android/media/AudioMixMatchCriterion.aidl",
         "aidl/android/media/AudioMixMatchCriterionValue.aidl",
@@ -389,7 +358,7 @@
         "aidl/android/media/SpatializerHeadTrackingMode.aidl",
     ],
     imports: [
-        "android.media.audio.common.types-V1",
+        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
     ],
     backend: {
@@ -430,9 +399,13 @@
         "aidl/android/media/IAudioRecord.aidl",
         "aidl/android/media/IAudioTrack.aidl",
         "aidl/android/media/IAudioTrackCallback.aidl",
+
+        "aidl/android/media/ISoundDose.aidl",
+        "aidl/android/media/ISoundDoseCallback.aidl",
+        "aidl/android/media/SoundDoseRecord.aidl",
     ],
     imports: [
-        "android.media.audio.common.types-V1",
+        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
         "av-types-aidl",
         "effect-aidl",
@@ -469,7 +442,7 @@
         "aidl/android/media/IAudioPolicyServiceClient.aidl",
     ],
     imports: [
-        "android.media.audio.common.types-V1",
+        "android.media.audio.common.types-V2",
         "audioclient-types-aidl",
         "audiopolicy-types-aidl",
         "capture_state_listener-aidl",
@@ -521,3 +494,21 @@
         },
     },
 }
+
+aidl_interface {
+    name: "sounddose-aidl",
+    unstable: true,
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/android/media/ISoundDose.aidl",
+        "aidl/android/media/ISoundDoseCallback.aidl",
+        "aidl/android/media/SoundDoseRecord.aidl",
+    ],
+
+    double_loadable: true,
+    backend: {
+        java: {
+            sdk_version: "module_current",
+        },
+    },
+}
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 7b273ec..2870c4c 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -89,7 +89,7 @@
         return NO_INIT;
     }
 
-    if (type == NULL && uuid == NULL) {
+    if (type == nullptr && uuid == nullptr) {
         ALOGW("Must specify at least type or uuid");
         return BAD_VALUE;
     }
@@ -99,8 +99,8 @@
     mCallback = callback;
 
     memset(&mDescriptor, 0, sizeof(effect_descriptor_t));
-    mDescriptor.type = *(type != NULL ? type : EFFECT_UUID_NULL);
-    mDescriptor.uuid = *(uuid != NULL ? uuid : EFFECT_UUID_NULL);
+    mDescriptor.type = *(type != nullptr ? type : EFFECT_UUID_NULL);
+    mDescriptor.uuid = *(uuid != nullptr ? uuid : EFFECT_UUID_NULL);
 
     // TODO b/182392769: use attribution source util
     mIEffectClient = new EffectClient(this);
@@ -292,7 +292,7 @@
             AudioSystem::releaseAudioSessionId(mSessionId,
                 VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid)));
         }
-        if (mIEffect != NULL) {
+        if (mIEffect != nullptr) {
             mIEffect->disconnect();
             IInterface::asBinder(mIEffect)->unlinkToDeath(mIEffectClient);
         }
@@ -370,7 +370,7 @@
         if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) {
             return NO_ERROR;
         }
-        if (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL) {
+        if (replySize == nullptr || *replySize != sizeof(status_t) || replyData == nullptr) {
             return BAD_VALUE;
         }
         mLock.lock();
@@ -413,7 +413,7 @@
         return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
     }
 
-    if (param == NULL || param->psize == 0 || param->vsize == 0) {
+    if (param == nullptr || param->psize == 0 || param->vsize == 0) {
         return BAD_VALUE;
     }
 
@@ -448,8 +448,7 @@
     if (mStatus != NO_ERROR) {
         return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
     }
-
-    if (param == NULL || param->psize == 0 || param->vsize == 0) {
+    if (param == nullptr || param->psize == 0 || param->vsize == 0) {
         return BAD_VALUE;
     }
 
@@ -504,8 +503,7 @@
     if (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS) {
         return mStatus;
     }
-
-    if (param == NULL || param->psize == 0 || param->vsize == 0) {
+    if (param == nullptr || param->psize == 0 || param->vsize == 0) {
         return BAD_VALUE;
     }
 
@@ -529,6 +527,36 @@
     return status;
 }
 
+status_t AudioEffect::getConfigs(
+        audio_config_base_t *inputCfg, audio_config_base_t *outputCfg)
+{
+    if (mProbe) {
+        return INVALID_OPERATION;
+    }
+    if (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS) {
+        return mStatus;
+    }
+    if (inputCfg == NULL || outputCfg == NULL) {
+        return BAD_VALUE;
+    }
+    status_t status;
+    media::EffectConfig cfg;
+    Status bs = mIEffect->getConfig(&cfg, &status);
+    if (!bs.isOk()) {
+        status = statusTFromBinderStatus(bs);
+        ALOGW("%s received status %d from binder transaction", __func__, status);
+        return status;
+    }
+    if (status == NO_ERROR) {
+        *inputCfg = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioConfigBase_audio_config_base_t(
+                        cfg.inputCfg, cfg.isOnInputStream));
+        *outputCfg = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioConfigBase_audio_config_base_t(
+                        cfg.outputCfg, cfg.isOnInputStream));
+    } else {
+        ALOGW("%s received status %d from the effect", __func__, status);
+    }
+    return status;
+}
 
 // -------------------------------------------------------------------------
 
@@ -540,7 +568,6 @@
     if (cb != nullptr) {
         cb->onError(mStatus);
     }
-    mIEffect.clear();
 }
 
 // -------------------------------------------------------------------------
@@ -603,6 +630,9 @@
 
 status_t AudioEffect::queryNumberEffects(uint32_t *numEffects)
 {
+    if (numEffects == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->queryNumberEffects(numEffects);
@@ -610,6 +640,9 @@
 
 status_t AudioEffect::queryEffect(uint32_t index, effect_descriptor_t *descriptor)
 {
+    if (descriptor == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->queryEffect(index, descriptor);
@@ -620,6 +653,9 @@
                                           uint32_t preferredTypeFlag,
                                           effect_descriptor_t *descriptor)
 {
+    if (uuid == nullptr || type == nullptr || descriptor == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getEffectDescriptor(uuid, type, preferredTypeFlag, descriptor);
@@ -650,6 +686,9 @@
 
 status_t AudioEffect::newEffectUniqueId(audio_unique_id_t* id)
 {
+    if (id == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *id = af->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
@@ -663,14 +702,15 @@
                                              audio_source_t source,
                                              audio_unique_id_t *id)
 {
+    if ((typeStr == nullptr && uuidStr == nullptr) || id == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    if (typeStr == NULL && uuidStr == NULL) return BAD_VALUE;
-
     // Convert type & uuid from string to effect_uuid_t.
     effect_uuid_t type;
-    if (typeStr != NULL) {
+    if (typeStr != nullptr) {
         status_t res = stringToGuid(typeStr, &type);
         if (res != OK) return res;
     } else {
@@ -678,7 +718,7 @@
     }
 
     effect_uuid_t uuid;
-    if (uuidStr != NULL) {
+    if (uuidStr != nullptr) {
         status_t res = stringToGuid(uuidStr, &uuid);
         if (res != OK) return res;
     } else {
@@ -706,14 +746,15 @@
                                              audio_usage_t usage,
                                              audio_unique_id_t *id)
 {
+    if ((typeStr == nullptr && uuidStr == nullptr) || id == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    if (typeStr == NULL && uuidStr == NULL) return BAD_VALUE;
-
     // Convert type & uuid from string to effect_uuid_t.
     effect_uuid_t type;
-    if (typeStr != NULL) {
+    if (typeStr != nullptr) {
         status_t res = stringToGuid(typeStr, &type);
         if (res != OK) return res;
     } else {
@@ -721,7 +762,7 @@
     }
 
     effect_uuid_t uuid;
-    if (uuidStr != NULL) {
+    if (uuidStr != nullptr) {
         status_t res = stringToGuid(uuidStr, &uuid);
         if (res != OK) return res;
     } else {
@@ -764,7 +805,7 @@
 
 status_t AudioEffect::stringToGuid(const char *str, effect_uuid_t *guid)
 {
-    if (str == NULL || guid == NULL) {
+    if (str == nullptr || guid == nullptr) {
         return BAD_VALUE;
     }
 
@@ -790,7 +831,7 @@
 
 status_t AudioEffect::guidToString(const effect_uuid_t *guid, char *str, size_t maxLen)
 {
-    if (guid == NULL || str == NULL) {
+    if (guid == nullptr || str == nullptr) {
         return BAD_VALUE;
     }
 
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index c2f7229..1b9936f 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -57,6 +57,10 @@
     case RULE_EXCLUDE_USERID:
         mValue.mUserId = (int) parcel->readInt32();
         break;
+    case RULE_MATCH_AUDIO_SESSION_ID:
+    case RULE_EXCLUDE_AUDIO_SESSION_ID:
+        mValue.mAudioSessionId = (audio_session_t) parcel->readInt32();
+        break;
     default:
         ALOGE("Trying to build AudioMixMatchCriterion from unknown rule %d", mRule);
         return BAD_VALUE;
@@ -71,6 +75,10 @@
     return NO_ERROR;
 }
 
+bool AudioMixMatchCriterion::isExcludeCriterion() const {
+    return mRule & RULE_EXCLUSION_MASK;
+}
+
 //
 //  AudioMix implementation
 //
@@ -91,10 +99,11 @@
     if (size > MAX_CRITERIA_PER_MIX) {
         size = MAX_CRITERIA_PER_MIX;
     }
+    mCriteria.reserve(size);
     for (size_t i = 0; i < size; i++) {
         AudioMixMatchCriterion criterion;
         if (criterion.readFromParcel(parcel) == NO_ERROR) {
-            mCriteria.add(criterion);
+            mCriteria.push_back(criterion);
         }
     }
     return NO_ERROR;
@@ -135,18 +144,18 @@
     return NO_ERROR;
 }
 
-void AudioMix::setExcludeUid(uid_t uid) const {
+void AudioMix::setExcludeUid(uid_t uid) {
     AudioMixMatchCriterion crit;
     crit.mRule = RULE_EXCLUDE_UID;
     crit.mValue.mUid = uid;
-    mCriteria.add(crit);
+    mCriteria.push_back(crit);
 }
 
-void AudioMix::setMatchUid(uid_t uid) const {
+void AudioMix::setMatchUid(uid_t uid) {
     AudioMixMatchCriterion crit;
     crit.mRule = RULE_MATCH_UID;
     crit.mValue.mUid = uid;
-    mCriteria.add(crit);
+    mCriteria.push_back(crit);
 }
 
 bool AudioMix::hasUidRule(bool match, uid_t uid) const {
@@ -169,18 +178,18 @@
     return false;
 }
 
-void AudioMix::setExcludeUserId(int userId) const {
+void AudioMix::setExcludeUserId(int userId) {
     AudioMixMatchCriterion crit;
     crit.mRule = RULE_EXCLUDE_USERID;
     crit.mValue.mUserId = userId;
-    mCriteria.add(crit);
+    mCriteria.push_back(crit);
 }
 
-void AudioMix::setMatchUserId(int userId) const {
+void AudioMix::setMatchUserId(int userId) {
     AudioMixMatchCriterion crit;
     crit.mRule = RULE_MATCH_USERID;
     crit.mValue.mUserId = userId;
-    mCriteria.add(crit);
+    mCriteria.push_back(crit);
 }
 
 bool AudioMix::hasUserIdRule(bool match, int userId) const {
@@ -194,9 +203,10 @@
     return false;
 }
 
-bool AudioMix::hasMatchUserIdRule() const {
+bool AudioMix::hasUserIdRule(bool match) const {
+    const uint32_t rule = match ? RULE_MATCH_USERID : RULE_EXCLUDE_USERID;
     for (size_t i = 0; i < mCriteria.size(); i++) {
-        if (mCriteria[i].mRule == RULE_MATCH_USERID) {
+        if (mCriteria[i].mRule == rule) {
             return true;
         }
     }
@@ -205,7 +215,7 @@
 
 bool AudioMix::isDeviceAffinityCompatible() const {
     return ((mMixType == MIX_TYPE_PLAYERS)
-            && (mRouteFlags == MIX_ROUTE_FLAG_RENDER));
+            && ((mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER));
 }
 
 } // namespace android
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index ecd423a..d9fd58c 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -18,7 +18,7 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 #include <media/AudioProductStrategy.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
 namespace android {
@@ -42,8 +42,8 @@
     aidl.name = legacy.getName();
     aidl.audioAttributes = VALUE_OR_RETURN(
             convertContainer<std::vector<media::AudioAttributesEx>>(
-                    legacy.getAudioAttributes(),
-                    legacy2aidl_AudioAttributes_AudioAttributesEx));
+                    legacy.getVolumeGroupAttributes(),
+                    legacy2aidl_VolumeGroupAttributes_AudioAttributesEx));
     aidl.id = VALUE_OR_RETURN(legacy2aidl_product_strategy_t_int32_t(legacy.getId()));
     return aidl;
 }
@@ -53,32 +53,57 @@
     return AudioProductStrategy(
             aidl.name,
             VALUE_OR_RETURN(
-                    convertContainer<std::vector<AudioAttributes>>(
+                    convertContainer<std::vector<VolumeGroupAttributes>>(
                             aidl.audioAttributes,
-                            aidl2legacy_AudioAttributesEx_AudioAttributes)),
+                            aidl2legacy_AudioAttributesEx_VolumeGroupAttributes)),
             VALUE_OR_RETURN(aidl2legacy_int32_t_product_strategy_t(aidl.id)));
 }
 
 // Keep in sync with android/media/audiopolicy/AudioProductStrategy#attributeMatches
-bool AudioProductStrategy::attributesMatches(const audio_attributes_t refAttributes,
-                                        const audio_attributes_t clientAttritubes)
+int AudioProductStrategy::attributesMatchesScore(const audio_attributes_t refAttributes,
+                                                 const audio_attributes_t clientAttritubes)
 {
+    if (refAttributes == clientAttritubes) {
+        return MATCH_EQUALS;
+    }
     if (refAttributes == AUDIO_ATTRIBUTES_INITIALIZER) {
         // The default product strategy is the strategy that holds default attributes by convention.
         // All attributes that fail to match will follow the default strategy for routing.
-        // Choosing the default must be done as a fallback, the attributes match shall not
-        // select the default.
-        return false;
+        // Choosing the default must be done as a fallback,so return a default (zero) score to
+        // allow identify the fallback.
+        return MATCH_ON_DEFAULT_SCORE;
     }
-    return ((refAttributes.usage == AUDIO_USAGE_UNKNOWN) ||
-            (clientAttritubes.usage == refAttributes.usage)) &&
-            ((refAttributes.content_type == AUDIO_CONTENT_TYPE_UNKNOWN) ||
-             (clientAttritubes.content_type == refAttributes.content_type)) &&
-            ((refAttributes.flags == AUDIO_FLAG_NONE) ||
-             (clientAttritubes.flags != AUDIO_FLAG_NONE &&
-            (clientAttritubes.flags & refAttributes.flags) == refAttributes.flags)) &&
-            ((strlen(refAttributes.tags) == 0) ||
-             (std::strcmp(clientAttritubes.tags, refAttributes.tags) == 0));
+    int score = MATCH_ON_DEFAULT_SCORE;
+    if (refAttributes.usage == AUDIO_USAGE_UNKNOWN) {
+        score |= MATCH_ON_DEFAULT_SCORE;
+    } else if (clientAttritubes.usage == refAttributes.usage) {
+        score |= MATCH_ON_USAGE_SCORE;
+    } else {
+        return NO_MATCH;
+    }
+    if (refAttributes.content_type == AUDIO_CONTENT_TYPE_UNKNOWN) {
+        score |= MATCH_ON_DEFAULT_SCORE;
+    } else if (clientAttritubes.content_type == refAttributes.content_type) {
+        score |= MATCH_ON_CONTENT_TYPE_SCORE;
+    } else {
+        return NO_MATCH;
+    }
+    if (strlen(refAttributes.tags) == 0) {
+        score |= MATCH_ON_DEFAULT_SCORE;
+    } else if (std::strcmp(clientAttritubes.tags, refAttributes.tags) == 0) {
+        score |= MATCH_ON_TAGS_SCORE;
+    } else {
+        return NO_MATCH;
+    }
+    if (refAttributes.flags == AUDIO_FLAG_NONE) {
+        score |= MATCH_ON_DEFAULT_SCORE;
+    } else if ((clientAttritubes.flags != AUDIO_FLAG_NONE)
+            && ((clientAttritubes.flags & refAttributes.flags) == refAttributes.flags)) {
+        score |= MATCH_ON_FLAGS_SCORE;
+    } else {
+        return NO_MATCH;
+    }
+    return score;
 }
 
 } // namespace android
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 15203d6..f07ea1d 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -146,39 +146,6 @@
         audio_channel_mask_t channelMask,
         const AttributionSourceState& client,
         size_t frameCount,
-        legacy_callback_t callback,
-        void* user,
-        uint32_t notificationFrames,
-        audio_session_t sessionId,
-        transfer_type transferType,
-        audio_input_flags_t flags,
-        const audio_attributes_t* pAttributes,
-        audio_port_handle_t selectedDeviceId,
-        audio_microphone_direction_t selectedMicDirection,
-        float microphoneFieldDimension)
-    : mActive(false),
-      mStatus(NO_INIT),
-      mClientAttributionSource(client),
-      mSessionId(AUDIO_SESSION_ALLOCATE),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mProxy(NULL)
-{
-    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientAttributionSource.uid));
-    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
-    (void)set(inputSource, sampleRate, format, channelMask, frameCount, callback, user,
-            notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
-            uid, pid, pAttributes, selectedDeviceId, selectedMicDirection,
-            microphoneFieldDimension);
-}
-
-AudioRecord::AudioRecord(
-        audio_source_t inputSource,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        const AttributionSourceState& client,
-        size_t frameCount,
         const wp<IAudioRecordCallback>& callback,
         uint32_t notificationFrames,
         audio_session_t sessionId,
@@ -255,37 +222,6 @@
         mDeviceCallback.clear();
     }
 }
-namespace {
-class LegacyCallbackWrapper : public AudioRecord::IAudioRecordCallback {
-    const AudioRecord::legacy_callback_t mCallback;
-    void* const mData;
-
-  public:
-    LegacyCallbackWrapper(AudioRecord::legacy_callback_t callback, void* user)
-        : mCallback(callback), mData(user) {}
-
-    size_t onMoreData(const AudioRecord::Buffer& buffer) override {
-        AudioRecord::Buffer copy = buffer;
-        mCallback(AudioRecord::EVENT_MORE_DATA, mData, &copy);
-        return copy.size();
-    }
-
-    void onOverrun() override { mCallback(AudioRecord::EVENT_OVERRUN, mData, nullptr); }
-
-    void onMarker(uint32_t markerPosition) override {
-        mCallback(AudioRecord::EVENT_MARKER, mData, &markerPosition);
-    }
-
-    void onNewPos(uint32_t newPos) override {
-        mCallback(AudioRecord::EVENT_NEW_POS, mData, &newPos);
-    }
-
-    void onNewIAudioRecord() override {
-        mCallback(AudioRecord::EVENT_NEW_IAUDIORECORD, mData, nullptr);
-    }
-};
-}  // namespace
-
 status_t AudioRecord::set(
         audio_source_t inputSource,
         uint32_t sampleRate,
@@ -479,37 +415,6 @@
     return status;
 }
 
-status_t AudioRecord::set(
-        audio_source_t inputSource,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        size_t frameCount,
-        legacy_callback_t callback,
-        void* user,
-        uint32_t notificationFrames,
-        bool threadCanCallJava,
-        audio_session_t sessionId,
-        transfer_type transferType,
-        audio_input_flags_t flags,
-        uid_t uid,
-        pid_t pid,
-        const audio_attributes_t* pAttributes,
-        audio_port_handle_t selectedDeviceId,
-        audio_microphone_direction_t selectedMicDirection,
-        float microphoneFieldDimension,
-        int32_t maxSharedAudioHistoryMs)
-{
-    if (callback != nullptr) {
-        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
-    } else if (user) {
-        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
-    }
-    return set(inputSource, sampleRate, format, channelMask, frameCount, mLegacyCallbackWrapper,
-        notificationFrames, threadCanCallJava, sessionId, transferType, flags, uid, pid,
-        pAttributes, selectedDeviceId, selectedMicDirection, microphoneFieldDimension,
-        maxSharedAudioHistoryMs);
-}
 // -------------------------------------------------------------------------
 
 status_t AudioRecord::start(AudioSystem::sync_event_t event, audio_session_t triggerSession)
@@ -655,6 +560,21 @@
     return NO_ERROR;
 }
 
+uint32_t AudioRecord::getHalSampleRate() const
+{
+    return mHalSampleRate;
+}
+
+uint32_t AudioRecord::getHalChannelCount() const
+{
+    return mHalChannelCount;
+}
+
+audio_format_t AudioRecord::getHalFormat() const
+{
+    return mHalFormat;
+}
+
 status_t AudioRecord::getMarkerPosition(uint32_t *marker) const
 {
     if (marker == NULL) {
@@ -973,6 +893,9 @@
     mServerFrameSize = audio_bytes_per_frame(
             audio_channel_count_from_in_mask(mServerConfig.channel_mask), mServerConfig.format);
     mServerSampleSize = audio_bytes_per_sample(mServerConfig.format);
+    mHalSampleRate = output.halConfig.sample_rate;
+    mHalChannelCount = audio_channel_count_from_in_mask(output.halConfig.channel_mask);
+    mHalFormat = output.halConfig.format;
 
     if (output.cblk == 0) {
         errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
@@ -1712,16 +1635,10 @@
 
 // -------------------------------------------------------------------------
 
-status_t AudioRecord::getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones)
+status_t AudioRecord::getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones)
 {
     AutoMutex lock(mLock);
-    std::vector<media::MicrophoneInfoData> mics;
-    status_t status = statusTFromBinderStatus(mAudioRecord->getActiveMicrophones(&mics));
-    activeMicrophones->resize(mics.size());
-    for (size_t i = 0; status == OK && i < mics.size(); ++i) {
-        status = activeMicrophones->at(i).readFromParcelable(mics[i]);
-    }
-    return status;
+    return statusTFromBinderStatus(mAudioRecord->getActiveMicrophones(activeMicrophones));
 }
 
 status_t AudioRecord::setPreferredMicrophoneDirection(audio_microphone_direction_t direction)
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index de8c298..4527e0f 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -34,6 +34,7 @@
 
 #include <system/audio.h>
 #include <android/media/GetInputForAttrResponse.h>
+#include <android/media/AudioMixerAttributesInternal.h>
 
 #define VALUE_OR_RETURN_BINDER_STATUS(x) \
     ({ auto _tmp = (x); \
@@ -81,7 +82,7 @@
 // Binder for the AudioFlinger service that's passed to this client process from the system server.
 // This allows specific isolated processes to access the audio system. Currently used only for the
 // HotwordDetectionService.
-sp<IBinder> gAudioFlingerBinder = nullptr;
+static sp<IBinder> gAudioFlingerBinder = nullptr;
 
 void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
     if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
@@ -97,6 +98,15 @@
     gAudioFlingerBinder = audioFlinger;
 }
 
+static sp<IAudioFlinger> gLocalAudioFlinger; // set if we are local.
+
+status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
+    Mutex::Autolock _l(gLock);
+    if (gAudioFlinger != nullptr) return INVALID_OPERATION;
+    gLocalAudioFlinger = af;
+    return OK;
+}
+
 // establish binder interface to AudioFlinger service
 const sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
     sp<IAudioFlinger> af;
@@ -104,7 +114,19 @@
     bool reportNoError = false;
     {
         Mutex::Autolock _l(gLock);
-        if (gAudioFlinger == 0) {
+        if (gAudioFlinger != nullptr) {
+            return gAudioFlinger;
+        }
+
+        if (gAudioFlingerClient == nullptr) {
+            gAudioFlingerClient = sp<AudioFlingerClient>::make();
+        } else {
+            reportNoError = true;
+        }
+
+        if (gLocalAudioFlinger != nullptr) {
+            gAudioFlinger = gLocalAudioFlinger;
+        } else {
             sp<IBinder> binder;
             if (gAudioFlingerBinder != nullptr) {
                 binder = gAudioFlingerBinder;
@@ -112,32 +134,24 @@
                 sp<IServiceManager> sm = defaultServiceManager();
                 do {
                     binder = sm->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
-                    if (binder != 0)
-                        break;
+                    if (binder != nullptr) break;
                     ALOGW("AudioFlinger not published, waiting...");
                     usleep(500000); // 0.5 s
                 } while (true);
             }
-            if (gAudioFlingerClient == NULL) {
-                gAudioFlingerClient = new AudioFlingerClient();
-            } else {
-                reportNoError = true;
-            }
             binder->linkToDeath(gAudioFlingerClient);
-            gAudioFlinger = new AudioFlingerClientAdapter(
-                    interface_cast<media::IAudioFlingerService>(binder));
-            LOG_ALWAYS_FATAL_IF(gAudioFlinger == 0);
-            afc = gAudioFlingerClient;
-            // Make sure callbacks can be received by gAudioFlingerClient
-            ProcessState::self()->startThreadPool();
+            const auto afs = interface_cast<media::IAudioFlingerService>(binder);
+            LOG_ALWAYS_FATAL_IF(afs == nullptr);
+            gAudioFlinger = sp<AudioFlingerClientAdapter>::make(afs);
         }
+        afc = gAudioFlingerClient;
         af = gAudioFlinger;
+        // Make sure callbacks can be received by gAudioFlingerClient
+        ProcessState::self()->startThreadPool();
     }
-    if (afc != 0) {
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        af->registerClient(afc);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    }
+    const int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    af->registerClient(afc);
+    IPCThreadState::self()->restoreCallingIdentity(token);
     if (reportNoError) reportError(NO_ERROR);
     return af;
 }
@@ -250,6 +264,12 @@
     return af->setMode(mode);
 }
 
+status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == 0) return PERMISSION_DENIED;
+    return af->setSimulateDeviceConnections(enabled);
+}
+
 status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -670,6 +690,30 @@
     return Status::ok();
 }
 
+Status AudioSystem::AudioFlingerClient::onSupportedLatencyModesChanged(
+        int output, const std::vector<media::audio::common::AudioLatencyMode>& latencyModes) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    std::vector<audio_latency_mode_t> modesLegacy = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<audio_latency_mode_t>>(
+                    latencyModes, aidl2legacy_AudioLatencyMode_audio_latency_mode_t));
+
+    std::vector<sp<SupportedLatencyModesCallback>> callbacks;
+    {
+        Mutex::Autolock _l(mLock);
+        for (auto callback : mSupportedLatencyModesCallbacks) {
+            if (auto ref = callback.promote(); ref != nullptr) {
+                callbacks.push_back(ref);
+            }
+        }
+    }
+    for (const auto& callback : callbacks) {
+        callback->onSupportedLatencyModesChanged(outputLegacy, modesLegacy);
+    }
+
+    return Status::ok();
+}
+
 status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
         uint32_t sampleRate, audio_format_t format,
         audio_channel_mask_t channelMask, size_t* buffSize) {
@@ -749,6 +793,31 @@
     return NO_ERROR;
 }
 
+status_t AudioSystem::AudioFlingerClient::addSupportedLatencyModesCallback(
+        const sp<SupportedLatencyModesCallback>& callback) {
+    Mutex::Autolock _l(mLock);
+    if (std::find(mSupportedLatencyModesCallbacks.begin(),
+                  mSupportedLatencyModesCallbacks.end(),
+                  callback) != mSupportedLatencyModesCallbacks.end()) {
+        return INVALID_OPERATION;
+    }
+    mSupportedLatencyModesCallbacks.push_back(callback);
+    return NO_ERROR;
+}
+
+status_t AudioSystem::AudioFlingerClient::removeSupportedLatencyModesCallback(
+        const sp<SupportedLatencyModesCallback>& callback) {
+    Mutex::Autolock _l(mLock);
+    auto it = std::find(mSupportedLatencyModesCallbacks.begin(),
+                                 mSupportedLatencyModesCallbacks.end(),
+                                 callback);
+    if (it == mSupportedLatencyModesCallbacks.end()) {
+        return INVALID_OPERATION;
+    }
+    mSupportedLatencyModesCallbacks.erase(it);
+    return NO_ERROR;
+}
+
 /* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb) {
     Mutex::Autolock _l(gLockErrorCallbacks);
     gAudioErrorCallbacks.insert(cb);
@@ -966,12 +1035,13 @@
                                        audio_session_t session,
                                        audio_stream_type_t* stream,
                                        const AttributionSourceState& attributionSource,
-                                       const audio_config_t* config,
+                                       audio_config_t* config,
                                        audio_output_flags_t flags,
                                        audio_port_handle_t* selectedDeviceId,
                                        audio_port_handle_t* portId,
                                        std::vector<audio_io_handle_t>* secondaryOutputs,
-                                       bool *isSpatialized) {
+                                       bool *isSpatialized,
+                                       bool *isBitPerfect) {
     if (attr == nullptr) {
         ALOGE("%s NULL audio attributes", __func__);
         return BAD_VALUE;
@@ -1008,9 +1078,18 @@
 
     media::GetOutputForAttrResponse responseAidl;
 
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+    status_t status = statusTFromBinderStatus(
             aps->getOutputForAttr(attrAidl, sessionAidl, attributionSource, configAidl, flagsAidl,
-                                  selectedDeviceIdAidl, &responseAidl)));
+                                  selectedDeviceIdAidl, &responseAidl));
+    if (status != NO_ERROR) {
+        config->format = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
+        config->channel_mask = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    responseAidl.configBase.channelMask, false /*isInput*/));
+        config->sample_rate = responseAidl.configBase.sampleRate;
+        return status;
+    }
 
     *output = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_io_handle_t(responseAidl.output));
@@ -1025,6 +1104,9 @@
     *secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
             responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
     *isSpatialized = responseAidl.isSpatialized;
+    *isBitPerfect = responseAidl.isBitPerfect;
+    *attr = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(responseAidl.attr));
 
     return OK;
 }
@@ -1065,7 +1147,7 @@
                                       audio_unique_id_t riid,
                                       audio_session_t session,
                                       const AttributionSourceState &attributionSource,
-                                      const audio_config_base_t* config,
+                                      audio_config_base_t* config,
                                       audio_input_flags_t flags,
                                       audio_port_handle_t* selectedDeviceId,
                                       audio_port_handle_t* portId) {
@@ -1102,9 +1184,14 @@
 
     media::GetInputForAttrResponse response;
 
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+    status_t status = statusTFromBinderStatus(
             aps->getInputForAttr(attrAidl, inputAidl, riidAidl, sessionAidl, attributionSource,
-                configAidl, flagsAidl, selectedDeviceIdAidl, &response)));
+                configAidl, flagsAidl, selectedDeviceIdAidl, &response));
+    if (status != NO_ERROR) {
+        *config = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioConfigBase_audio_config_base_t(response.config, true /*isInput*/));
+        return status;
+    }
 
     *input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.input));
     *selectedDeviceId = VALUE_OR_RETURN_STATUS(
@@ -1273,7 +1360,7 @@
     return result.value_or(PRODUCT_STRATEGY_NONE);
 }
 
-status_t AudioSystem::getDevicesForAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getDevicesForAttributes(const audio_attributes_t& aa,
                                               AudioDeviceTypeAddrVector* devices,
                                               bool forVolume) {
     if (devices == nullptr) {
@@ -1282,8 +1369,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+             legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
     std::vector<AudioDevice> retAidl;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(aps->getDevicesForAttributes(aaAidl, forVolume, &retAidl)));
@@ -1483,7 +1570,7 @@
             legacy2aidl_audio_port_type_t_AudioPortType(type));
     Int numPortsAidl;
     numPortsAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_ports));
-    std::vector<media::AudioPort> portsAidl;
+    std::vector<media::AudioPortFw> portsAidl;
     int32_t generationAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1491,7 +1578,16 @@
     *num_ports = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(numPortsAidl.value));
     *generation = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(generationAidl));
     RETURN_STATUS_IF_ERROR(convertRange(portsAidl.begin(), portsAidl.end(), ports,
-                                        aidl2legacy_AudioPort_audio_port_v7));
+                                        aidl2legacy_AudioPortFw_audio_port_v7));
+    return OK;
+}
+
+status_t AudioSystem::listDeclaredDevicePorts(media::AudioPortRole role,
+                                              std::vector<media::AudioPortFw>* result) {
+    if (result == nullptr) return BAD_VALUE;
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
     return OK;
 }
 
@@ -1502,10 +1598,10 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPort portAidl;
+    media::AudioPortFw portAidl;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(aps->getAudioPort(port->id, &portAidl)));
-    *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(portAidl));
+    *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortFw_audio_port_v7(portAidl));
     return OK;
 }
 
@@ -1518,8 +1614,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_patch_AudioPatch(*patch));
+    media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_patch_AudioPatchFw(*patch));
     int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(*handle));
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(aps->createAudioPatch(patchAidl, handleAidl, &handleAidl)));
@@ -1549,7 +1645,7 @@
 
     Int numPatchesAidl;
     numPatchesAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
-    std::vector<media::AudioPatch> patchesAidl;
+    std::vector<media::AudioPatchFw> patchesAidl;
     int32_t generationAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1557,7 +1653,7 @@
     *num_patches = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(numPatchesAidl.value));
     *generation = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(generationAidl));
     RETURN_STATUS_IF_ERROR(convertRange(patchesAidl.begin(), patchesAidl.end(), patches,
-                                        aidl2legacy_AudioPatch_audio_patch));
+                                        aidl2legacy_AudioPatchFw_audio_patch));
     return OK;
 }
 
@@ -1569,8 +1665,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_port_config_AudioPortConfig(*config));
+    media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_config_AudioPortConfigFw(*config));
     return statusTFromBinderStatus(aps->setAudioPortConfig(configAidl));
 }
 
@@ -1662,6 +1758,24 @@
     return afc->removeAudioDeviceCallback(callback, audioIo, portId);
 }
 
+status_t AudioSystem::addSupportedLatencyModesCallback(
+        const sp<SupportedLatencyModesCallback>& callback) {
+    const sp<AudioFlingerClient> afc = getAudioFlingerClient();
+    if (afc == 0) {
+        return NO_INIT;
+    }
+    return afc->addSupportedLatencyModesCallback(callback);
+}
+
+status_t AudioSystem::removeSupportedLatencyModesCallback(
+        const sp<SupportedLatencyModesCallback>& callback) {
+    const sp<AudioFlingerClient> afc = getAudioFlingerClient();
+    if (afc == 0) {
+        return NO_INIT;
+    }
+    return afc->removeSupportedLatencyModesCallback(callback);
+}
+
 audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
@@ -1772,8 +1886,8 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPortConfig sourceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_port_config_AudioPortConfig(*source));
+    media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_config_AudioPortConfigFw(*source));
     media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attributes));
     int32_t portIdAidl;
@@ -1837,7 +1951,7 @@
     return result.value_or(NAN);
 }
 
-status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfo>* microphones) {
+status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) {
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getMicrophones(microphones);
@@ -2012,7 +2126,7 @@
     AudioProductStrategyVector strategies;
     listAudioProductStrategies(strategies);
     for (const auto& strategy : strategies) {
-        auto attrVect = strategy.getAudioAttributes();
+        auto attrVect = strategy.getVolumeGroupAttributes();
         auto iter = std::find_if(begin(attrVect), end(attrVect), [&stream](const auto& attributes) {
             return attributes.getStreamType() == stream;
         });
@@ -2026,7 +2140,7 @@
 
 audio_stream_type_t AudioSystem::attributesToStreamType(const audio_attributes_t& attr) {
     product_strategy_t psId;
-    status_t ret = AudioSystem::getProductStrategyFromAudioAttributes(AudioAttributes(attr), psId);
+    status_t ret = AudioSystem::getProductStrategyFromAudioAttributes(attr, psId);
     if (ret != NO_ERROR) {
         ALOGE("no strategy found for attributes %s", toString(attr).c_str());
         return AUDIO_STREAM_MUSIC;
@@ -2035,10 +2149,9 @@
     listAudioProductStrategies(strategies);
     for (const auto& strategy : strategies) {
         if (strategy.getId() == psId) {
-            auto attrVect = strategy.getAudioAttributes();
+            auto attrVect = strategy.getVolumeGroupAttributes();
             auto iter = std::find_if(begin(attrVect), end(attrVect), [&attr](const auto& refAttr) {
-                return AudioProductStrategy::attributesMatches(
-                        refAttr.getAttributes(), attr);
+                return refAttr.matchesScore(attr) > 0;
             });
             if (iter != end(attrVect)) {
                 return iter->getStreamType();
@@ -2056,14 +2169,14 @@
     return AUDIO_STREAM_MUSIC;
 }
 
-status_t AudioSystem::getProductStrategyFromAudioAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getProductStrategyFromAudioAttributes(const audio_attributes_t& aa,
                                                             product_strategy_t& productStrategy,
                                                             bool fallbackOnDefault) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
     int32_t productStrategyAidl;
 
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2086,14 +2199,14 @@
     return OK;
 }
 
-status_t AudioSystem::getVolumeGroupFromAudioAttributes(const AudioAttributes& aa,
+status_t AudioSystem::getVolumeGroupFromAudioAttributes(const audio_attributes_t &aa,
                                                         volume_group_t& volumeGroup,
                                                         bool fallbackOnDefault) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
+    media::AudioAttributesInternal aaAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(aa));
     int32_t volumeGroupAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getVolumeGroupFromAudioAttributes(aaAidl, fallbackOnDefault, &volumeGroupAidl)));
@@ -2137,8 +2250,25 @@
             aps->setDevicesRoleForStrategy(strategyAidl, roleAidl, devicesAidl));
 }
 
+status_t AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                   device_role_t role,
+                                                   const AudioDeviceTypeAddrVector& devices) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+
+    int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
+    media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
+    return statusTFromBinderStatus(
+            aps->removeDevicesRoleForStrategy(strategyAidl, roleAidl, devicesAidl));
+}
+
 status_t
-AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
+AudioSystem::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) {
         return PERMISSION_DENIED;
@@ -2146,7 +2276,7 @@
     int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
     return statusTFromBinderStatus(
-            aps->removeDevicesRoleForStrategy(strategyAidl, roleAidl));
+            aps->clearDevicesRoleForStrategy(strategyAidl, roleAidl));
 }
 
 status_t AudioSystem::getDevicesForRoleAndStrategy(product_strategy_t strategy,
@@ -2272,6 +2402,9 @@
                                     const AudioDeviceTypeAddrVector &devices,
                                     bool *canBeSpatialized) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (canBeSpatialized == nullptr) {
+        return BAD_VALUE;
+    }
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
@@ -2290,6 +2423,20 @@
     return OK;
 }
 
+status_t AudioSystem::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                            sp<media::ISoundDose>* soundDose) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    if (soundDose == nullptr) {
+        return BAD_VALUE;
+    }
+
+    RETURN_STATUS_IF_ERROR(af->getSoundDoseInterface(callback, soundDose));
+    return OK;
+}
+
 status_t AudioSystem::getDirectPlaybackSupport(const audio_attributes_t *attr,
                                                const audio_config_t *config,
                                                audio_direct_mode_t* directMode) {
@@ -2317,7 +2464,7 @@
 
 status_t AudioSystem::getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                                 std::vector<audio_profile>* audioProfiles) {
-    if (attr == nullptr) {
+    if (attr == nullptr || audioProfiles == nullptr) {
         return BAD_VALUE;
     }
 
@@ -2338,6 +2485,58 @@
     return NO_ERROR;
 }
 
+status_t AudioSystem::setRequestedLatencyMode(
+            audio_io_handle_t output, audio_latency_mode_t mode) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setRequestedLatencyMode(output, mode);
+}
+
+status_t AudioSystem::getSupportedLatencyModes(audio_io_handle_t output,
+        std::vector<audio_latency_mode_t>* modes) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->getSupportedLatencyModes(output, modes);
+}
+
+status_t AudioSystem::setBluetoothVariableLatencyEnabled(bool enabled) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setBluetoothVariableLatencyEnabled(enabled);
+}
+
+status_t AudioSystem::isBluetoothVariableLatencyEnabled(
+        bool *enabled) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->isBluetoothVariableLatencyEnabled(enabled);
+}
+
+status_t AudioSystem::supportsBluetoothVariableLatency(
+        bool *support) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->supportsBluetoothVariableLatency(support);
+}
+
+status_t AudioSystem::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->getAudioPolicyConfig(config);
+}
+
 class CaptureStateListenerImpl : public media::BnCaptureStateListener,
                                  public IBinder::DeathRecipient {
 public:
@@ -2427,6 +2626,84 @@
     return af->getAAudioHardwareBurstMinUsec();
 }
 
+status_t AudioSystem::getSupportedMixerAttributes(
+        audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> *mixerAttrs) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    std::vector<media::AudioMixerAttributesInternal> _aidlReturn;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getSupportedMixerAttributes(portIdAidl, &_aidlReturn)));
+    *mixerAttrs = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<audio_mixer_attributes_t>>(
+                    _aidlReturn,
+                    aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t));
+    return OK;
+}
+
+status_t AudioSystem::setPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                  audio_port_handle_t portId,
+                                                  uid_t uid,
+                                                  const audio_mixer_attributes_t *mixerAttr) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    media::AudioMixerAttributesInternal mixerAttrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(*mixerAttr));
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+
+    return statusTFromBinderStatus(
+            aps->setPreferredMixerAttributes(attrAidl, portIdAidl, uidAidl, mixerAttrAidl));
+}
+
+status_t AudioSystem::getPreferredMixerAttributes(
+        const audio_attributes_t *attr,
+        audio_port_handle_t portId,
+        std::optional<audio_mixer_attributes_t> *mixerAttr) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    std::optional<media::AudioMixerAttributesInternal> _aidlReturn;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getPreferredMixerAttributes(attrAidl, portIdAidl, &_aidlReturn)));
+
+    if (_aidlReturn.has_value()) {
+         *mixerAttr = VALUE_OR_RETURN_STATUS(
+                 aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(
+                         _aidlReturn.value()));
+    }
+    return NO_ERROR;
+}
+
+status_t AudioSystem::clearPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                    audio_port_handle_t portId,
+                                                    uid_t uid) {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+    int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
+    return statusTFromBinderStatus(
+            aps->clearPreferredMixerAttributes(attrAidl, portIdAidl, uidAidl));
+}
+
 // ---------------------------------------------------------------------------
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 6ab8339..f01b653 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -29,6 +29,7 @@
 #include <audio_utils/clock.h>
 #include <audio_utils/primitives.h>
 #include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
 #include <media/AudioTrack.h>
 #include <utils/Log.h>
 #include <private/media/AudioTrackShared.h>
@@ -42,7 +43,9 @@
 
 #define WAIT_PERIOD_MS                  10
 #define WAIT_STREAM_END_TIMEOUT_SEC     120
+
 static const int kMaxLoopCountNotifications = 32;
+static constexpr char kAudioServiceName[] = "audio";
 
 using ::android::aidl_utils::statusTFromBinderStatus;
 using ::android::base::StringPrintf;
@@ -325,45 +328,6 @@
         }
     };
 }
-
-AudioTrack::AudioTrack(
-        audio_stream_type_t streamType,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        size_t frameCount,
-        audio_output_flags_t flags,
-        legacy_callback_t callback,
-        void* user,
-        int32_t notificationFrames,
-        audio_session_t sessionId,
-        transfer_type transferType,
-        const audio_offload_info_t *offloadInfo,
-        const AttributionSourceState& attributionSource,
-        const audio_attributes_t* pAttributes,
-        bool doNotReconnect,
-        float maxRequiredSpeed,
-        audio_port_handle_t selectedDeviceId)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mAudioTrackCallback(new AudioTrackCallback())
-{
-    mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
-    if (callback != nullptr) {
-        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
-    } else if (user) {
-        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
-    }
-    mSetParams = std::unique_ptr<SetParams>{new SetParams{
-            streamType, sampleRate, format, channelMask, frameCount, flags, mLegacyCallbackWrapper,
-            notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId,
-            transferType, offloadInfo, attributionSource, pAttributes, doNotReconnect,
-            maxRequiredSpeed, selectedDeviceId}};
-}
-
 AudioTrack::AudioTrack(
         audio_stream_type_t streamType,
         uint32_t sampleRate,
@@ -397,44 +361,6 @@
                           doNotReconnect, maxRequiredSpeed, AUDIO_PORT_HANDLE_NONE}};
 }
 
-AudioTrack::AudioTrack(
-        audio_stream_type_t streamType,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        const sp<IMemory>& sharedBuffer,
-        audio_output_flags_t flags,
-        legacy_callback_t callback,
-        void* user,
-        int32_t notificationFrames,
-        audio_session_t sessionId,
-        transfer_type transferType,
-        const audio_offload_info_t *offloadInfo,
-        const AttributionSourceState& attributionSource,
-        const audio_attributes_t* pAttributes,
-        bool doNotReconnect,
-        float maxRequiredSpeed)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mAudioTrackCallback(new AudioTrackCallback())
-{
-    mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
-    if (callback) {
-        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
-    } else if (user) {
-        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
-    }
-    mSetParams = std::unique_ptr<SetParams>{new SetParams{
-            streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags,
-            mLegacyCallbackWrapper, notificationFrames, sharedBuffer, false /*threadCanCallJava*/,
-            sessionId, transferType, offloadInfo, attributionSource, pAttributes, doNotReconnect,
-            maxRequiredSpeed, AUDIO_PORT_HANDLE_NONE}};
-}
-
 void AudioTrack::onFirstRef() {
     if (mSetParams) {
         set(*mSetParams);
@@ -496,38 +422,6 @@
         mDeviceCallback.clear();
     }
 }
-
-status_t AudioTrack::set(
-        audio_stream_type_t streamType,
-        uint32_t sampleRate,
-        audio_format_t format,
-        audio_channel_mask_t channelMask,
-        size_t frameCount,
-        audio_output_flags_t flags,
-        legacy_callback_t callback,
-        void * user,
-        int32_t notificationFrames,
-        const sp<IMemory>& sharedBuffer,
-        bool threadCanCallJava,
-        audio_session_t sessionId,
-        transfer_type transferType,
-        const audio_offload_info_t *offloadInfo,
-        const AttributionSourceState& attributionSource,
-        const audio_attributes_t* pAttributes,
-        bool doNotReconnect,
-        float maxRequiredSpeed,
-        audio_port_handle_t selectedDeviceId)
-{
-    if (callback) {
-        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
-    } else if (user) {
-        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
-    }
-    return set(streamType, sampleRate,format, channelMask, frameCount, flags,
-               mLegacyCallbackWrapper, notificationFrames, sharedBuffer, threadCanCallJava,
-               sessionId, transferType, offloadInfo, attributionSource, pAttributes,
-               doNotReconnect, maxRequiredSpeed, selectedDeviceId);
-}
 status_t AudioTrack::set(
         audio_stream_type_t streamType,
         uint32_t sampleRate,
@@ -559,7 +453,7 @@
     std::string errorMessage;
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
-          "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
+          "flags %#x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
           __func__,
           streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
           sessionId, transferType, attributionSource.uid, attributionSource.pid);
@@ -1339,6 +1233,21 @@
     return mOriginalSampleRate;
 }
 
+uint32_t AudioTrack::getHalSampleRate() const
+{
+    return mAfSampleRate;
+}
+
+uint32_t AudioTrack::getHalChannelCount() const
+{
+    return mAfChannelCount;
+}
+
+audio_format_t AudioTrack::getHalFormat() const
+{
+    return mAfFormat;
+}
+
 status_t AudioTrack::setDualMonoMode(audio_dual_mono_mode_t mode)
 {
     AutoMutex lock(mLock);
@@ -1357,7 +1266,7 @@
 status_t AudioTrack::getDualMonoMode(audio_dual_mono_mode_t* mode) const
 {
     AutoMutex lock(mLock);
-    media::AudioDualMonoMode mediaMode;
+    media::audio::common::AudioDualMonoMode mediaMode;
     const status_t status = statusTFromBinderStatus(mAudioTrack->getDualMonoMode(&mediaMode));
     if (status == NO_ERROR) {
         *mode = VALUE_OR_RETURN_STATUS(
@@ -1398,6 +1307,10 @@
                         legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate))));
         if (status == NO_ERROR) {
             mPlaybackRate = playbackRate;
+        } else if (status == INVALID_OPERATION
+                && playbackRate.mSpeed == 1.0f && mPlaybackRate.mPitch == 1.0f) {
+            mPlaybackRate = playbackRate;
+            return NO_ERROR;
         }
         return status;
     }
@@ -1468,7 +1381,7 @@
 {
     AutoMutex lock(mLock);
     if (isOffloadedOrDirect_l()) {
-        media::AudioPlaybackRate playbackRateTemp;
+        media::audio::common::AudioPlaybackRate playbackRateTemp;
         const status_t status = statusTFromBinderStatus(
                 mAudioTrack->getPlaybackRateParameters(&playbackRateTemp));
         if (status == NO_ERROR) { // update local version if changed.
@@ -1703,13 +1616,16 @@
     }
 
     AutoMutex lock(mLock);
+    // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
+    if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
+        *position = 0;
+        return NO_ERROR;
+    }
     // FIXME: offloaded and direct tracks call into the HAL for render positions
     // for compressed/synced data; however, we use proxy position for pure linear pcm data
     // as we do not know the capability of the HAL for pcm position support and standby.
     // There may be some latency differences between the HAL position and the proxy position.
     if (isOffloadedOrDirect_l() && !isPurePcmData_l()) {
-        uint32_t dspFrames = 0;
-
         if (isOffloaded_l() && ((mState == STATE_PAUSED) || (mState == STATE_PAUSED_STOPPING))) {
             ALOGV("%s(%d): called in paused state, return cached position %u",
                 __func__, mPortId, mPausedPosition);
@@ -1717,13 +1633,15 @@
             return NO_ERROR;
         }
 
+        uint32_t dspFrames = 0;
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             uint32_t halFrames; // actually unused
-            (void) AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames);
             // FIXME: on getRenderPosition() error, we return OK with frame position 0.
+            if (AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames) != NO_ERROR) {
+                *position = 0;
+                return NO_ERROR;
+            }
         }
-        // FIXME: dspFrames may not be zero in (mState == STATE_STOPPED || mState == STATE_FLUSHED)
-        // due to hardware latency. We leave this behavior for now.
         *position = dspFrames;
     } else {
         if (mCblk->mFlags & CBLK_INVALID) {
@@ -1731,11 +1649,9 @@
             // FIXME: for compatibility with the Java API we ignore the restoreTrack_l()
             // error here (e.g. DEAD_OBJECT) and return OK with the last recorded server position.
         }
-
-        // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
-        *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ?
-                0 : updateAndGetPosition_l().value();
+        *position = updateAndGetPosition_l().value();
     }
+
     return NO_ERROR;
 }
 
@@ -1789,13 +1705,21 @@
 
 status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
     AutoMutex lock(mLock);
-    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
-            __func__, mPortId, deviceId, mSelectedDeviceId);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
+            __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
-        if (mStatus == NO_ERROR) {
-            android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-            mProxy->interrupt();
+        if (mStatus == NO_ERROR && mSelectedDeviceId != mRoutedDeviceId) {
+            if (isPlaying_l()) {
+                android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                mProxy->interrupt();
+            } else {
+                // if the track is idle, try to restore now and
+                // defer to next start if not possible
+                if (restoreTrack_l("setOutputDevice") != OK) {
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                }
+            }
         }
     }
     return NO_ERROR;
@@ -1990,6 +1914,8 @@
 
     mAfFrameCount = output.afFrameCount;
     mAfSampleRate = output.afSampleRate;
+    mAfChannelCount = audio_channel_count_from_out_mask(output.afChannelMask);
+    mAfFormat = output.afFormat;
     mAfLatency = output.afLatencyMs;
 
     mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
@@ -2054,6 +1980,9 @@
     }
 
     mPortId = output.portId;
+    // notify the upper layers about the new portId
+    triggerPortIdUpdate_l();
+
     // We retain a copy of the I/O handle, but don't own the reference
     mOutput = output.outputId;
     mRefreshRemaining = true;
@@ -2267,7 +2196,6 @@
         // obtainBuffer() is called with mutex unlocked, so keep extra references to these fields to
         // keep them from going away if another thread re-creates the track during obtainBuffer()
         sp<AudioTrackClientProxy> proxy;
-        sp<IMemory> iMem;
 
         {   // start of lock scope
             AutoMutex lock(mLock);
@@ -2293,8 +2221,9 @@
             }
 
             // Keep the extra references
+            mProxyObtainBufferRef = mProxy;
             proxy = mProxy;
-            iMem = mCblkMemory;
+            mCblkMemoryObtainBufferRef = mCblkMemory;
 
             if (mState == STATE_STOPPING) {
                 status = -EINTR;
@@ -2342,6 +2271,8 @@
     buffer.mFrameCount = stepCount;
     buffer.mRaw = audioBuffer->raw;
 
+    sp<IMemory> tempMemory;
+    sp<AudioTrackClientProxy> tempProxy;
     AutoMutex lock(mLock);
     if (audioBuffer->sequence != mSequence) {
         // This Buffer came from a different IAudioTrack instance, so ignore the releaseBuffer
@@ -2351,7 +2282,12 @@
     }
     mReleased += stepCount;
     mInUnderrun = false;
-    mProxy->releaseBuffer(&buffer);
+    mProxyObtainBufferRef->releaseBuffer(&buffer);
+    // The extra reference of shared memory and proxy from `obtainBuffer` is not used after
+    // calling `releaseBuffer`. Move the extra reference to a temp strong pointer so that it
+    // will be cleared outside `releaseBuffer`.
+    tempMemory = std::move(mCblkMemoryObtainBufferRef);
+    tempProxy = std::move(mProxyObtainBufferRef);
 
     // restart track if it was disabled by audioflinger due to previous underrun
     restartIfDisabled();
@@ -2589,11 +2525,22 @@
         timeout.tv_sec = WAIT_STREAM_END_TIMEOUT_SEC;
         timeout.tv_nsec = 0;
 
+        // Use timestamp progress to safeguard we don't falsely time out.
+        AudioTimestamp timestamp{};
+        const bool isTimestampValid = getTimestamp(timestamp) == OK;
+        const auto frameCount = isTimestampValid ? timestamp.mPosition : 0;
+
         status_t status = proxy->waitStreamEndDone(&timeout);
         switch (status) {
+        case TIMED_OUT:
+            if (isTimestampValid
+                    && getTimestamp(timestamp) == OK && frameCount != timestamp.mPosition) {
+                ALOGD("%s: waitStreamEndDone retrying", __func__);
+                break;  // we retry again (and recheck possible state change).
+            }
+            [[fallthrough]];
         case NO_ERROR:
         case DEAD_OBJECT:
-        case TIMED_OUT:
             if (status != DEAD_OBJECT) {
                 // for DEAD_OBJECT, we do not send a EVENT_STREAM_END after stop();
                 // instead, the application should handle the EVENT_NEW_IAUDIOTRACK.
@@ -2611,6 +2558,7 @@
                 }
             }
             if (waitStreamEnd && status != DEAD_OBJECT) {
+               ALOGV("%s: waitStreamEndDone complete", __func__);
                return NS_INACTIVE;
             }
             break;
@@ -3616,12 +3564,34 @@
     if (mPlayerIId == playerIId) return;
 
     mPlayerIId = playerIId;
+    triggerPortIdUpdate_l();
     mediametrics::LogItem(mMetricsId)
         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID)
         .set(AMEDIAMETRICS_PROP_PLAYERIID, playerIId)
         .record();
 }
 
+void AudioTrack::triggerPortIdUpdate_l() {
+    if (mAudioManager == nullptr) {
+        // use checkService() to avoid blocking if audio service is not up yet
+        sp<IBinder> binder =
+            defaultServiceManager()->checkService(String16(kAudioServiceName));
+        if (binder == nullptr) {
+            ALOGE("%s(%d): binding to audio service failed.",
+                  __func__,
+                  mPlayerIId);
+            return;
+        }
+
+        mAudioManager = interface_cast<IAudioManager>(binder);
+    }
+
+    // first time when the track is created we do not have a valid piid
+    if (mPlayerIId != PLAYER_PIID_INVALID) {
+        mAudioManager->playerEvent(mPlayerIId, PLAYER_UPDATE_PORT_ID, mPortId);
+    }
+}
+
 status_t AudioTrack::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback)
 {
 
diff --git a/media/libaudioclient/AudioVolumeGroup.cpp b/media/libaudioclient/AudioVolumeGroup.cpp
index ab95246..978599e 100644
--- a/media/libaudioclient/AudioVolumeGroup.cpp
+++ b/media/libaudioclient/AudioVolumeGroup.cpp
@@ -23,7 +23,6 @@
 
 #include <media/AidlConversion.h>
 #include <media/AudioVolumeGroup.h>
-#include <media/AudioAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
 namespace android {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 292d92f..7b33a00 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -22,10 +22,10 @@
 
 #include <stdint.h>
 #include <sys/types.h>
-
+#include "IAudioFlinger.h"
 #include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
-#include "IAudioFlinger.h"
+#include <system/thread_defs.h>
 
 namespace android {
 
@@ -112,6 +112,10 @@
     aidl.afFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(afFrameCount));
     aidl.afSampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(afSampleRate));
     aidl.afLatencyMs = VALUE_OR_RETURN(convertIntegral<int32_t>(afLatencyMs));
+    aidl.afChannelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(afChannelMask, false /*isInput*/));
+    aidl.afFormat = VALUE_OR_RETURN(
+            legacy2aidl_audio_format_t_AudioFormatDescription(afFormat));
     aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
     aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
     aidl.audioTrack = audioTrack;
@@ -135,6 +139,11 @@
     legacy.afFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.afFrameCount));
     legacy.afSampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afSampleRate));
     legacy.afLatencyMs = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afLatencyMs));
+    legacy.afChannelMask = VALUE_OR_RETURN(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.afChannelMask,
+                                                                false /*isInput*/));
+    legacy.afFormat = VALUE_OR_RETURN(
+            aidl2legacy_AudioFormatDescription_audio_format_t(aidl.afFormat));
     legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
     legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
     legacy.audioTrack = aidl.audioTrack;
@@ -199,6 +208,8 @@
     aidl.audioRecord = audioRecord;
     aidl.serverConfig = VALUE_OR_RETURN(
             legacy2aidl_audio_config_base_t_AudioConfigBase(serverConfig, true /*isInput*/));
+    aidl.halConfig = VALUE_OR_RETURN(
+        legacy2aidl_audio_config_base_t_AudioConfigBase(halConfig, true /*isInput*/));
     return aidl;
 }
 
@@ -221,6 +232,8 @@
     legacy.audioRecord = aidl.audioRecord;
     legacy.serverConfig = VALUE_OR_RETURN(
             aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.serverConfig, true /*isInput*/));
+    legacy.halConfig = VALUE_OR_RETURN(
+        aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.halConfig, true /*isInput*/));
     return legacy;
 }
 
@@ -480,12 +493,6 @@
     return statusTFromBinderStatus(mDelegate->closeInput(inputAidl));
 }
 
-status_t AudioFlingerClientAdapter::invalidateStream(audio_stream_type_t stream) {
-    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-    return statusTFromBinderStatus(mDelegate->invalidateStream(streamAidl));
-}
-
 status_t AudioFlingerClientAdapter::setVoiceVolume(float volume) {
     return statusTFromBinderStatus(mDelegate->setVoiceVolume(volume));
 }
@@ -666,17 +673,19 @@
 }
 
 status_t AudioFlingerClientAdapter::getAudioPort(struct audio_port_v7* port) {
-    media::AudioPort portAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPort(*port));
-    media::AudioPort aidlRet;
+    media::AudioPortFw portAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPortFw(*port));
+    media::AudioPortFw aidlRet;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             mDelegate->getAudioPort(portAidl, &aidlRet)));
-    *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(aidlRet));
+    *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortFw_audio_port_v7(aidlRet));
     return OK;
 }
 
 status_t AudioFlingerClientAdapter::createAudioPatch(const struct audio_patch* patch,
                                                      audio_patch_handle_t* handle) {
-    media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_AudioPatch(*patch));
+    media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_patch_AudioPatchFw(*patch));
     int32_t aidlRet = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(
                     AUDIO_PATCH_HANDLE_NONE));
     if (handle != nullptr) {
@@ -697,18 +706,18 @@
 
 status_t AudioFlingerClientAdapter::listAudioPatches(unsigned int* num_patches,
                                                      struct audio_patch* patches) {
-    std::vector<media::AudioPatch> aidlRet;
+    std::vector<media::AudioPatchFw> aidlRet;
     int32_t maxPatches = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             mDelegate->listAudioPatches(maxPatches, &aidlRet)));
     *num_patches = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(aidlRet.size()));
     return convertRange(aidlRet.begin(), aidlRet.end(), patches,
-                        aidl2legacy_AudioPatch_audio_patch);
+                        aidl2legacy_AudioPatchFw_audio_patch);
 }
 
 status_t AudioFlingerClientAdapter::setAudioPortConfig(const struct audio_port_config* config) {
-    media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_port_config_AudioPortConfig(*config));
+    media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_config_AudioPortConfigFw(*config));
     return statusTFromBinderStatus(mDelegate->setAudioPortConfig(configAidl));
 }
 
@@ -744,14 +753,11 @@
 }
 
 status_t
-AudioFlingerClientAdapter::getMicrophones(std::vector<media::MicrophoneInfo>* microphones) {
-    std::vector<media::MicrophoneInfoData> aidlRet;
-    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-            mDelegate->getMicrophones(&aidlRet)));
+AudioFlingerClientAdapter::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) {
+    std::vector<media::MicrophoneInfoFw> aidlRet;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mDelegate->getMicrophones(&aidlRet)));
     if (microphones != nullptr) {
-        *microphones = VALUE_OR_RETURN_STATUS(
-                convertContainer<std::vector<media::MicrophoneInfo>>(aidlRet,
-                         media::aidl2legacy_MicrophoneInfo));
+        *microphones = std::move(aidlRet);
     }
     return OK;
 }
@@ -804,16 +810,99 @@
 }
 
 status_t AudioFlingerClientAdapter::setDeviceConnectedState(
-        const struct audio_port_v7 *port, bool connected) {
-    media::AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_port_v7_AudioPort(*port));
-    return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
+        const struct audio_port_v7 *port, media::DeviceConnectedState state) {
+    media::AudioPortFw aidlPort = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPortFw(*port));
+    return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, state));
+}
+
+status_t AudioFlingerClientAdapter::setSimulateDeviceConnections(bool enabled) {
+    return statusTFromBinderStatus(mDelegate->setSimulateDeviceConnections(enabled));
+}
+
+status_t AudioFlingerClientAdapter::setRequestedLatencyMode(
+        audio_io_handle_t output, audio_latency_mode_t mode) {
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    media::audio::common::AudioLatencyMode modeAidl  = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode));
+    return statusTFromBinderStatus(mDelegate->setRequestedLatencyMode(outputAidl, modeAidl));
+}
+
+status_t AudioFlingerClientAdapter::getSupportedLatencyModes(
+        audio_io_handle_t output, std::vector<audio_latency_mode_t>* modes) {
+    if (modes == nullptr) {
+        return BAD_VALUE;
+    }
+
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    std::vector<media::audio::common::AudioLatencyMode> modesAidl;
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->getSupportedLatencyModes(outputAidl, &modesAidl)));
+
+    *modes = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<audio_latency_mode_t>>(modesAidl,
+                     aidl2legacy_AudioLatencyMode_audio_latency_mode_t));
+
+    return NO_ERROR;
+}
+
+status_t AudioFlingerClientAdapter::setBluetoothVariableLatencyEnabled(bool enabled) {
+    return statusTFromBinderStatus(mDelegate->setBluetoothVariableLatencyEnabled(enabled));
+}
+
+status_t AudioFlingerClientAdapter::isBluetoothVariableLatencyEnabled(bool* enabled) {
+    if (enabled == nullptr) {
+        return BAD_VALUE;
+    }
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->isBluetoothVariableLatencyEnabled(enabled)));
+
+    return NO_ERROR;
+}
+
+status_t AudioFlingerClientAdapter::supportsBluetoothVariableLatency(bool* support) {
+    if (support == nullptr) {
+        return BAD_VALUE;
+    }
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mDelegate->supportsBluetoothVariableLatency(support)));
+
+    return NO_ERROR;
+}
+
+status_t AudioFlingerClientAdapter::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback> &callback,
+        sp<media::ISoundDose>* soundDose) {
+    return statusTFromBinderStatus(mDelegate->getSoundDoseInterface(callback, soundDose));
+}
+
+status_t AudioFlingerClientAdapter::invalidateTracks(
+        const std::vector<audio_port_handle_t>& portIds) {
+    std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(
+                    portIds, legacy2aidl_audio_port_handle_t_int32_t));
+    return statusTFromBinderStatus(mDelegate->invalidateTracks(portIdsAidl));
+}
+
+status_t AudioFlingerClientAdapter::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
+    if (config == nullptr) {
+        return BAD_VALUE;
+    }
+
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mDelegate->getAudioPolicyConfig(config)));
+
+    return NO_ERROR;
 }
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
-        const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {}
+        const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+}
 
 status_t AudioFlingerServerAdapter::onTransact(uint32_t code,
                                                const Parcel& data,
@@ -1040,12 +1129,6 @@
     return Status::fromStatusT(mDelegate->closeInput(inputLegacy));
 }
 
-Status AudioFlingerServerAdapter::invalidateStream(AudioStreamType stream) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
-    return Status::fromStatusT(mDelegate->invalidateStream(streamLegacy));
-}
-
 Status AudioFlingerServerAdapter::setVoiceVolume(float volume) {
     return Status::fromStatusT(mDelegate->setVoiceVolume(volume));
 }
@@ -1182,17 +1265,17 @@
     return Status::fromStatusT(mDelegate->setLowRamDevice(isLowRamDevice, totalMemory));
 }
 
-Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPort& port,
-                                               media::AudioPort* _aidl_return) {
-    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPortFw& port,
+                                               media::AudioPortFw* _aidl_return) {
+    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPortFw_audio_port_v7(port));
     RETURN_BINDER_IF_ERROR(mDelegate->getAudioPort(&portLegacy));
-    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPort(portLegacy));
+    *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPortFw(portLegacy));
     return Status::ok();
 }
 
-Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatch& patch,
+Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatchFw& patch,
                                                    int32_t* _aidl_return) {
-    audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatch_audio_patch(patch));
+    audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatchFw_audio_patch(patch));
     audio_patch_handle_t handleLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_int32_t_audio_patch_handle_t(*_aidl_return));
     RETURN_BINDER_IF_ERROR(mDelegate->createAudioPatch(&patchLegacy, &handleLegacy));
@@ -1207,7 +1290,7 @@
 }
 
 Status AudioFlingerServerAdapter::listAudioPatches(int32_t maxCount,
-                            std::vector<media::AudioPatch>* _aidl_return) {
+                            std::vector<media::AudioPatchFw>* _aidl_return) {
     unsigned int count = VALUE_OR_RETURN_BINDER(convertIntegral<unsigned int>(maxCount));
     count = std::min(count, static_cast<unsigned int>(MAX_ITEMS_PER_LIST));
     std::unique_ptr<audio_patch[]> patchesLegacy(new audio_patch[count]);
@@ -1215,13 +1298,13 @@
     RETURN_BINDER_IF_ERROR(convertRange(&patchesLegacy[0],
                            &patchesLegacy[count],
                            std::back_inserter(*_aidl_return),
-                           legacy2aidl_audio_patch_AudioPatch));
+                           legacy2aidl_audio_patch_AudioPatchFw));
     return Status::ok();
 }
 
-Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfig& config) {
+Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfigFw& config) {
     audio_port_config configLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioPortConfig_audio_port_config(config));
+            aidl2legacy_AudioPortConfigFw_audio_port_config(config));
     return Status::fromStatusT(mDelegate->setAudioPortConfig(&configLegacy));
 }
 
@@ -1252,11 +1335,8 @@
 }
 
 Status AudioFlingerServerAdapter::getMicrophones(
-        std::vector<media::MicrophoneInfoData>* _aidl_return) {
-    std::vector<media::MicrophoneInfo> resultLegacy;
-    RETURN_BINDER_IF_ERROR(mDelegate->getMicrophones(&resultLegacy));
-    *_aidl_return = VALUE_OR_RETURN_BINDER(convertContainer<std::vector<media::MicrophoneInfoData>>(
-            resultLegacy, media::legacy2aidl_MicrophoneInfo));
+        std::vector<media::MicrophoneInfoFw>* _aidl_return) {
+    RETURN_BINDER_IF_ERROR(mDelegate->getMicrophones(_aidl_return));
     return Status::ok();
 }
 
@@ -1299,9 +1379,68 @@
 }
 
 Status AudioFlingerServerAdapter::setDeviceConnectedState(
-        const media::AudioPort& port, bool connected) {
-    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
-    return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
+        const media::AudioPortFw& port, media::DeviceConnectedState state) {
+    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPortFw_audio_port_v7(port));
+    return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, state));
+}
+
+Status AudioFlingerServerAdapter::setSimulateDeviceConnections(bool enabled) {
+    return Status::fromStatusT(mDelegate->setSimulateDeviceConnections(enabled));
+}
+
+Status AudioFlingerServerAdapter::setRequestedLatencyMode(
+        int32_t output, media::audio::common::AudioLatencyMode modeAidl) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    audio_latency_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_AudioLatencyMode_audio_latency_mode_t(modeAidl));
+    return Status::fromStatusT(mDelegate->setRequestedLatencyMode(
+            outputLegacy, modeLegacy));
+}
+
+Status AudioFlingerServerAdapter::getSupportedLatencyModes(
+        int output, std::vector<media::audio::common::AudioLatencyMode>* _aidl_return) {
+    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
+            aidl2legacy_int32_t_audio_io_handle_t(output));
+    std::vector<audio_latency_mode_t> modesLegacy;
+
+    RETURN_BINDER_IF_ERROR(mDelegate->getSupportedLatencyModes(outputLegacy, &modesLegacy));
+
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertContainer<std::vector<media::audio::common::AudioLatencyMode>>(
+                    modesLegacy, legacy2aidl_audio_latency_mode_t_AudioLatencyMode));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setBluetoothVariableLatencyEnabled(bool enabled) {
+    return Status::fromStatusT(mDelegate->setBluetoothVariableLatencyEnabled(enabled));
+}
+
+Status AudioFlingerServerAdapter::isBluetoothVariableLatencyEnabled(bool *enabled) {
+    return Status::fromStatusT(mDelegate->isBluetoothVariableLatencyEnabled(enabled));
+}
+
+Status AudioFlingerServerAdapter::supportsBluetoothVariableLatency(bool *support) {
+    return Status::fromStatusT(mDelegate->supportsBluetoothVariableLatency(support));
+}
+
+Status AudioFlingerServerAdapter::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback>& callback,
+        sp<media::ISoundDose>* soundDose)
+{
+    return Status::fromStatusT(mDelegate->getSoundDoseInterface(callback, soundDose));
+}
+
+Status AudioFlingerServerAdapter::invalidateTracks(const std::vector<int32_t>& portIds) {
+    std::vector<audio_port_handle_t> portIdsLegacy = VALUE_OR_RETURN_BINDER(
+            convertContainer<std::vector<audio_port_handle_t>>(
+                    portIds, aidl2legacy_int32_t_audio_port_handle_t));
+    RETURN_BINDER_IF_ERROR(mDelegate->invalidateTracks(portIdsLegacy));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getAudioPolicyConfig(media::AudioPolicyConfig* _aidl_return) {
+    return Status::fromStatusT(mDelegate->getAudioPolicyConfig(_aidl_return));
 }
 
 } // namespace android
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index 446a58c..651255a 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -58,6 +58,20 @@
     }
 }
 
+void PlayerBase::triggerPortIdUpdate(audio_port_handle_t portId) const {
+    if (mAudioManager == nullptr) {
+        ALOGE("%s: no audio service, player %d will not update portId %d",
+              __func__,
+              mPIId,
+              portId);
+        return;
+    }
+
+    if (mPIId != PLAYER_PIID_INVALID && portId != AUDIO_PORT_HANDLE_NONE) {
+        mAudioManager->playerEvent(mPIId, android::PLAYER_UPDATE_PORT_ID, portId);
+    }
+}
+
 void PlayerBase::baseDestroy() {
     serviceReleasePlayer();
     if (mAudioManager != 0) {
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 520f09c..60b08fa 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -158,6 +158,11 @@
                     convertIntegral<int>(UNION_GET(aidl, userId).value()));
             *rule |= RULE_MATCH_USERID;
             return legacy;
+        case media::AudioMixMatchCriterionValue::audioSessionId:
+            legacy.mAudioSessionId = VALUE_OR_RETURN(
+                    aidl2legacy_int32_t_audio_session_t(UNION_GET(aidl, audioSessionId).value()));
+            *rule |= RULE_MATCH_AUDIO_SESSION_ID;
+            return legacy;
     }
     return unexpected(BAD_VALUE);
 }
@@ -185,7 +190,10 @@
         case RULE_MATCH_USERID:
             UNION_SET(aidl, userId, VALUE_OR_RETURN(convertReinterpret<uint32_t>(legacy.mUserId)));
             break;
-
+        case RULE_MATCH_AUDIO_SESSION_ID:
+            UNION_SET(aidl, audioSessionId,
+                VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.mAudioSessionId)));
+            break;
         default:
             return unexpected(BAD_VALUE);
     }
@@ -464,4 +472,51 @@
     return unexpected(BAD_VALUE);
 }
 
+ConversionResult<audio_mixer_behavior_t>
+aidl2legacy_AudioMixerBehavior_audio_mixer_behavior_t(media::AudioMixerBehavior aidl) {
+    switch (aidl) {
+        case media::AudioMixerBehavior::DEFAULT:
+            return AUDIO_MIXER_BEHAVIOR_DEFAULT;
+        case media::AudioMixerBehavior::BIT_PERFECT:
+            return AUDIO_MIXER_BEHAVIOR_BIT_PERFECT;
+        case media::AudioMixerBehavior::INVALID:
+            return AUDIO_MIXER_BEHAVIOR_INVALID;
+    }
+    return unexpected(BAD_VALUE);
+}
+ConversionResult<media::AudioMixerBehavior>
+legacy2aidl_audio_mixer_behavior_t_AudioMixerBehavior(audio_mixer_behavior_t legacy) {
+    switch (legacy) {
+        case AUDIO_MIXER_BEHAVIOR_DEFAULT:
+            return media::AudioMixerBehavior::DEFAULT;
+        case AUDIO_MIXER_BEHAVIOR_BIT_PERFECT:
+            return media::AudioMixerBehavior::BIT_PERFECT;
+        case AUDIO_MIXER_BEHAVIOR_INVALID:
+            return media::AudioMixerBehavior::INVALID;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_mixer_attributes_t>
+aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(
+        const media::AudioMixerAttributesInternal& aidl) {
+    audio_mixer_attributes_t legacy = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+    legacy.config = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config, false /*isInput*/));
+    legacy.mixer_behavior = VALUE_OR_RETURN(
+            aidl2legacy_AudioMixerBehavior_audio_mixer_behavior_t(aidl.mixerBehavior));
+    return legacy;
+}
+ConversionResult<media::AudioMixerAttributesInternal>
+legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(
+        const audio_mixer_attributes& legacy) {
+    media::AudioMixerAttributesInternal aidl;
+    aidl.config = VALUE_OR_RETURN(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(legacy.config, false /*isInput*/));
+    aidl.mixerBehavior = VALUE_OR_RETURN(
+            legacy2aidl_audio_mixer_behavior_t_AudioMixerBehavior(legacy.mixer_behavior));
+    return aidl;
+}
+
+
 }  // namespace android
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 3751f80..60bb4f0 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -4,12 +4,47 @@
       "name": "audio_aidl_conversion_tests"
     },
     {
+      "name": "audio_aidl_status_tests"
+    },
+    {
+      "name": "audiorecord_tests"
+    },
+    {
+      "name": "audioeffect_tests"
+    },
+    {
+      "name": "audiorouting_tests"
+    },
+    {
+      "name": "audioclient_serialization_tests"
+    },
+    {
+      "name": "trackplayerbase_tests"
+    },
+    {
+      "name": "audiosystem_tests"
+    },
+    {
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
+  ],
+  "postsubmit": [
+    {
+       "name": "audioeffect_analysis"
+    }
   ]
 }
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index f968a4b..9c4ccb8 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -1027,7 +1027,7 @@
     if (property_get("gsm.operator.iso-country", value, "") == 0) {
         property_get("gsm.sim.operator.iso-country", value, "");
     }
-    // If dual sim device has two SIM cards inserted and is not registerd to any network,
+    // If dual sim device has two SIM cards inserted and is not registered to any network,
     // "," is set to "gsm.operator.iso-country" prop.
     // In this case, "gsm.sim.operator.iso-country" prop should be used.
     if (strlen(value) == 1 && strstr(value, ",") != NULL) {
@@ -1306,6 +1306,7 @@
         streamType = AUDIO_STREAM_DTMF;
     }
     attr = AudioSystem::streamTypeToAttributes(streamType);
+    attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_LOW_LATENCY);
 
     const size_t frameCount = mProcessSize;
     status_t status = mpAudioTrack->set(
@@ -1314,7 +1315,7 @@
             AUDIO_FORMAT_PCM_16_BIT,
             AUDIO_CHANNEL_OUT_MONO,
             frameCount,
-            AUDIO_OUTPUT_FLAG_FAST,
+            AUDIO_OUTPUT_FLAG_NONE,
             wp<AudioTrack::IAudioTrackCallback>::fromExisting(this),
             0,    // notificationFrames
             0,    // sharedBuffer
diff --git a/media/libaudioclient/AudioAttributes.cpp b/media/libaudioclient/VolumeGroupAttributes.cpp
similarity index 68%
rename from media/libaudioclient/AudioAttributes.cpp
rename to media/libaudioclient/VolumeGroupAttributes.cpp
index 260c06c..530e73f 100644
--- a/media/libaudioclient/AudioAttributes.cpp
+++ b/media/libaudioclient/VolumeGroupAttributes.cpp
@@ -14,33 +14,38 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AudioAttributes"
+#define LOG_TAG "VolumeGroupAttributes"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
 #include <binder/Parcel.h>
 
 #include <media/AidlConversion.h>
-#include <media/AudioAttributes.h>
+#include <media/AudioProductStrategy.h>
+#include <media/VolumeGroupAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
 namespace android {
 
-status_t AudioAttributes::readFromParcel(const Parcel* parcel) {
+int VolumeGroupAttributes::matchesScore(const audio_attributes_t &attributes) const {
+    return AudioProductStrategy::attributesMatchesScore(mAttributes, attributes);
+}
+
+status_t VolumeGroupAttributes::readFromParcel(const Parcel* parcel) {
     media::AudioAttributesEx aidl;
     RETURN_STATUS_IF_ERROR(aidl.readFromParcel(parcel));
-    *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioAttributesEx_AudioAttributes(aidl));
+    *this = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(aidl));
     return OK;
 }
 
-status_t AudioAttributes::writeToParcel(Parcel* parcel) const {
+status_t VolumeGroupAttributes::writeToParcel(Parcel* parcel) const {
     media::AudioAttributesEx aidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioAttributes_AudioAttributesEx(*this));
+            legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(*this));
     return aidl.writeToParcel(parcel);
 }
 
 ConversionResult<media::AudioAttributesEx>
-legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy) {
+legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(const VolumeGroupAttributes& legacy) {
     media::AudioAttributesEx aidl;
     aidl.attributes = VALUE_OR_RETURN(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(legacy.getAttributes()));
@@ -50,9 +55,9 @@
     return aidl;
 }
 
-ConversionResult<AudioAttributes>
-aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl) {
-    return AudioAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
+ConversionResult<VolumeGroupAttributes>
+aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(const media::AudioAttributesEx& aidl) {
+    return VolumeGroupAttributes(VALUE_OR_RETURN(aidl2legacy_int32_t_volume_group_t(aidl.groupId)),
                            VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
                                    aidl.streamType)),
                            VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(
diff --git a/media/libaudioclient/aidl/android/media/AudioDualMonoMode.aidl b/media/libaudioclient/aidl/android/media/AudioDualMonoMode.aidl
deleted file mode 100644
index f6220c2..0000000
--- a/media/libaudioclient/aidl/android/media/AudioDualMonoMode.aidl
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-// TODO(b/175167149): Reconcile AudioDualMonoMode with framework-media-sources
-
-@Backing(type="int")
-enum AudioDualMonoMode {
-    OFF = 0,
-    LR = 1,
-    LL = 2,
-    RR = 3,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioHalVersion.aidl b/media/libaudioclient/aidl/android/media/AudioHalVersion.aidl
new file mode 100644
index 0000000..49048040
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioHalVersion.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The audio HAL version definition.
+ *
+ * {@hide}
+ */
+parcelable AudioHalVersion {
+
+    @Backing(type="int")
+    enum Type {
+        /**
+         * Indicate the audio HAL is implemented with HIDL (HAL interface definition language).
+         * @see <a href="https://source.android.com/docs/core/architecture/hidl/">HIDL</a>
+         */
+        HIDL = 0,
+
+        /**
+         * Indicate the audio HAL is implemented with AIDL (Android Interface Definition Language).
+         * @see <a href="https://source.android.com/docs/core/architecture/aidl/">AIDL</a>
+         */
+        AIDL
+    }
+
+    Type type = Type.HIDL;
+
+    /**
+     * Major version number.
+     */
+    int major;
+
+    /**
+     * Minor version number.
+     */
+    int minor;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioHwModule.aidl
similarity index 69%
copy from media/libaudioclient/aidl/android/media/AudioPort.aidl
copy to media/libaudioclient/aidl/android/media/AudioHwModule.aidl
index ff177c0..9251400 100644
--- a/media/libaudioclient/aidl/android/media/AudioPort.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioHwModule.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -16,13 +16,16 @@
 
 package android.media;
 
-import android.media.AudioPortSys;
 import android.media.audio.common.AudioPort;
+import android.media.AudioRoute;
 
-/**
+/*
+ * A representation of a HAL module configuration.
  * {@hide}
  */
-parcelable AudioPort {
-    AudioPort hal;
-    AudioPortSys sys;
+parcelable AudioHwModule {
+    int /* audio_module_handle_t */ handle;
+    @utf8InCpp String name;
+    AudioPort[] ports;
+    AudioRoute[] routes;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
index b01f902..5dd898c 100644
--- a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioPatch;
+import android.media.AudioPatchFw;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
 
@@ -26,7 +26,7 @@
 parcelable AudioIoDescriptor {
     /** Interpreted as audio_io_handle_t. */
     int ioHandle;
-    AudioPatch patch;
+    AudioPatchFw patch;
     boolean isInput;
     int samplingRate;
     AudioFormatDescription format;
diff --git a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
index 921a93a..0f373a2 100644
--- a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
@@ -28,4 +28,6 @@
     /** Interpreted as uid_t. */
     int uid;
     int userId;
+    /** Interpreted as audio_session_t. */
+    int audioSessionId;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioMixerAttributesInternal.aidl b/media/libaudioclient/aidl/android/media/AudioMixerAttributesInternal.aidl
new file mode 100644
index 0000000..ed25060
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixerAttributesInternal.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioMixerBehavior;
+import android.media.audio.common.AudioConfigBase;
+
+/**
+ * This class is used to contains information about audio mixer.
+ * The "Internal" suffix of this type name is to disambiguate it from the
+ * android.media.AudioMixerAttributes SDK type.
+ *
+ * {@hide}
+ */
+parcelable AudioMixerAttributesInternal {
+    AudioConfigBase config;
+    AudioMixerBehavior mixerBehavior;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixerBehavior.aidl b/media/libaudioclient/aidl/android/media/AudioMixerBehavior.aidl
new file mode 100644
index 0000000..38f50d6
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioMixerBehavior.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Defines the mixer behavior that can be used when setting mixer attributes.
+ */
+@Backing(type="int")
+enum AudioMixerBehavior {
+    /**
+     * The mixer behavior is invalid.
+     */
+    INVALID = -1,
+    /**
+     * The mixer behavior that follows platform default behavior, which is mixing audio from
+     * different sources.
+     */
+    DEFAULT = 0,
+    /**
+     * The audio data in the mixer will be bit-perfect as long as possible.
+     */
+    BIT_PERFECT = 1,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPatch.aidl b/media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
similarity index 74%
rename from media/libaudioclient/aidl/android/media/AudioPatch.aidl
rename to media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
index 8519faf..9ec3fa9 100644
--- a/media/libaudioclient/aidl/android/media/AudioPatch.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
@@ -16,17 +16,19 @@
 
 package android.media;
 
-import android.media.AudioPortConfig;
+import android.media.AudioPortConfigFw;
 
 /**
  * {@hide}
+ * The Fw suffix is used to break a namespace collision with an SDK API.
+ * It contains the framework version of AudioPortConfig.
  */
-parcelable AudioPatch {
+parcelable AudioPatchFw {
     /**
      * Patch unique ID.
      * Interpreted as audio_patch_handle_t.
      */
     int id;
-    AudioPortConfig[] sources;
-    AudioPortConfig[] sinks;
+    AudioPortConfigFw[] sources;
+    AudioPortConfigFw[] sinks;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPlaybackRate.aidl b/media/libaudioclient/aidl/android/media/AudioPlaybackRate.aidl
deleted file mode 100644
index e29d398..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPlaybackRate.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * The AudioPlaybackRate.
- *
- * See https://developer.android.com/reference/android/media/PlaybackParams.
- * TODO(b/175166815): Reconcile with framework-media-sources PlaybackParams.aidl.
- *       As this is used for native wire serialization, no need to define
- *       audio_timestretch_stretch_mode_t and audio_timestretch_fallback_mode_t enums
- *       until we attempt to unify with PlaybackParams.
- *
- * {@hide}
- */
-parcelable AudioPlaybackRate {
-    /** Speed of audio playback, >= 0.f, 1.f nominal (system limits are further restrictive) */
-    float speed;
-    /** Pitch of audio, >= 0.f, 1.f nominal (system limits are further restrictive) */
-    float pitch;
-    /** Interpreted as audio_timestretch_stretch_mode_t */
-    int stretchMode;
-    /** Interpreted as audio_timestretch_fallback_mode_t */
-    int fallbackMode;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyConfig.aidl
new file mode 100644
index 0000000..87767c2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPolicyConfig.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioHwModule;
+import android.media.SurroundSoundConfig;
+import android.media.audio.common.AudioHalEngineConfig;
+import android.media.audio.common.AudioMode;
+
+/*
+ * Audio policy configuration. Functionally replaces the APM XML file.
+ * {@hide}
+ */
+parcelable AudioPolicyConfig {
+    AudioHwModule[] modules;
+    AudioMode[] supportedModes;
+    SurroundSoundConfig surroundSoundConfig;
+    AudioHalEngineConfig engineConfig;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
similarity index 89%
rename from media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
index 3a4ca31..e7565d7 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
@@ -21,8 +21,9 @@
 
 /**
  * {@hide}
+ * Suffixed with Fw to avoid name conflict with SDK class.
  */
-parcelable AudioPortConfig {
+parcelable AudioPortConfigFw {
     AudioPortConfig hal;
     AudioPortConfigSys sys;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
index 0f5a9b6..24ec230 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
@@ -22,8 +22,4 @@
 parcelable AudioPortDeviceExtSys {
     /** Module the device is attached to. Interpreted as audio_module_handle_t. */
     int hwModule;
-    /** Bitmask, indexed by AudioEncapsulationMode. */
-    int encapsulationModes;
-    /** Bitmask, indexed by AudioEncapsulationMetadataType. */
-    int encapsulationMetadataTypes;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
index 2cdf4f6..d9c6df4 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
@@ -31,4 +31,6 @@
     AudioPortDeviceExtSys device;
     /** System-only parameters when the port is an audio mix. */
     AudioPortMixExtSys mix;
+    /** Framework audio session identifier. */
+    int session;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioPortFw.aidl
similarity index 88%
rename from media/libaudioclient/aidl/android/media/AudioPort.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortFw.aidl
index ff177c0..5580e35 100644
--- a/media/libaudioclient/aidl/android/media/AudioPort.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortFw.aidl
@@ -21,8 +21,9 @@
 
 /**
  * {@hide}
+ * The Fw suffix is used to break a namespace collision with an SDK API.
  */
-parcelable AudioPort {
+parcelable AudioPortFw {
     AudioPort hal;
     AudioPortSys sys;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
index f3b5c19..756c469 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
@@ -17,7 +17,7 @@
 package android.media;
 
 import android.media.AudioGainSys;
-import android.media.AudioPortConfig;
+import android.media.AudioPortConfigFw;
 import android.media.AudioPortExtSys;
 import android.media.AudioPortRole;
 import android.media.AudioPortType;
@@ -36,7 +36,7 @@
     /** System-only parameters for each AudioGain from 'port.gains'. */
     AudioGainSys[] gains;
     /** Current audio port configuration. */
-    AudioPortConfig activeConfig;
+    AudioPortConfigFw activeConfig;
     /** System-only extra parameters for 'port.ext'. */
     AudioPortExtSys ext;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioRoute.aidl b/media/libaudioclient/aidl/android/media/AudioRoute.aidl
new file mode 100644
index 0000000..5ee2161
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioRoute.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * TODO(b/280077672): This is a temporary copy of the stable
+ * android.hardware.audio.core.AudioRoute. Interfaces from the Core API do not
+ * support the CPP backend. This copy will be removed either by moving the
+ * AudioRoute from core to a.m.a.common or by switching the framework internal
+ * interfaces to the NDK backend.
+ * {@hide}
+ */
+parcelable AudioRoute {
+    /**
+     * The list of IDs of source audio ports ('AudioPort.id').
+     * There must be at least one source in a valid route and all IDs must be
+     * unique.
+     */
+    int[] sourcePortIds;
+    /** The ID of the sink audio port ('AudioPort.id'). */
+    int sinkPortId;
+    /** If set, only one source can be active, mixing is not supported. */
+    boolean isExclusive;
+}
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
index 7d159d0..5f1e288 100644
--- a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
@@ -45,4 +45,5 @@
     /** The newly created record. */
     @nullable IAudioRecord audioRecord;
     AudioConfigBase serverConfig;
+    AudioConfigBase halConfig;
 }
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index da6f454..42e0bb4 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.audio.common.AudioChannelLayout;
+import android.media.audio.common.AudioFormatDescription;
 import android.media.audio.common.AudioStreamType;
 import android.media.IAudioTrack;
 
@@ -38,6 +40,8 @@
     AudioStreamType streamType;
     long afFrameCount;
     int afSampleRate;
+    AudioChannelLayout afChannelMask;
+    AudioFormatDescription afFormat;
     int afLatencyMs;
     /** Interpreted as audio_io_handle_t. */
     int outputId;
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/DeviceConnectedState.aidl
similarity index 75%
copy from media/libaudioclient/aidl/android/media/AudioPort.aidl
copy to media/libaudioclient/aidl/android/media/DeviceConnectedState.aidl
index ff177c0..e401384 100644
--- a/media/libaudioclient/aidl/android/media/AudioPort.aidl
+++ b/media/libaudioclient/aidl/android/media/DeviceConnectedState.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,16 +13,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package android.media;
 
-import android.media.AudioPortSys;
-import android.media.audio.common.AudioPort;
-
 /**
  * {@hide}
  */
-parcelable AudioPort {
-    AudioPort hal;
-    AudioPortSys sys;
+@Backing(type="int")
+enum DeviceConnectedState {
+    CONNECTED = 0,
+    DISCONNECTED = 1,
+    PREPARE_TO_DISCONNECT = 2,
 }
diff --git a/media/libaudioclient/aidl/android/media/EffectConfig.aidl b/media/libaudioclient/aidl/android/media/EffectConfig.aidl
new file mode 100644
index 0000000..5f62b73
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/EffectConfig.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioConfigBase;
+
+/**
+ * Describes configuration of an audio effect. Input and output
+ * audio configurations are described separately because the effect
+ * can perform transformations on channel layouts, for example.
+ *
+ * {@hide}
+ */
+parcelable EffectConfig {
+    /** Configuration of the audio input of the effect. */
+    AudioConfigBase inputCfg;
+    /** Configuration of the audio output of the effect. */
+    AudioConfigBase outputCfg;
+    /**
+     * Specifies whether the effect is instantiated on an input stream,
+     * e.g. on the input from a microphone.
+     */
+    boolean isOnInputStream;
+}
diff --git a/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
index 9696124..347bf79 100644
--- a/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.audio.common.AudioConfigBase;
+
 /**
  * {@hide}
  */
@@ -26,4 +28,6 @@
     int selectedDeviceId;
     /** Interpreted as audio_port_handle_t. */
     int portId;
+    /** The suggested config if fails to get an input. **/
+    AudioConfigBase config;
 }
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index f1848b6..9d44bb0 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -16,8 +16,9 @@
 
 package android.media;
 
+import android.media.audio.common.AudioConfigBase;
 import android.media.audio.common.AudioStreamType;
-
+import android.media.AudioAttributesInternal;
 /**
  * {@hide}
  */
@@ -33,4 +34,9 @@
     int[] secondaryOutputs;
     /** True if the track is connected to a spatializer mixer and actually spatialized */
     boolean isSpatialized;
+    /** The suggested audio config if fails to get an output. **/
+    AudioConfigBase configBase;
+    boolean isBitPerfect;
+    /** The corrected audio attributes. **/
+    AudioAttributesInternal attr;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
index 421c31c..f055cc4 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
@@ -18,6 +18,7 @@
 
 import android.media.AudioIoConfigEvent;
 import android.media.AudioIoDescriptor;
+import android.media.audio.common.AudioLatencyMode;
 
 /**
  * A callback interface for AudioFlinger.
@@ -27,4 +28,11 @@
 interface IAudioFlingerClient {
     oneway void ioConfigChanged(AudioIoConfigEvent event,
                                 in AudioIoDescriptor ioDesc);
+    /**
+     * Called when the latency modes supported on a given output stream change.
+     * output is the I/O handle of the output stream for which the change is signalled.
+     * latencyModes is the new list of supported latency modes (See AudioLatencyMode.aidl).
+     */
+    oneway void onSupportedLatencyModesChanged(
+            int output, in AudioLatencyMode[] latencyModes);
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 10da028..6412810 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -16,9 +16,10 @@
 
 package android.media;
 
-import android.media.AudioPatch;
-import android.media.AudioPort;
-import android.media.AudioPortConfig;
+import android.media.AudioPatchFw;
+import android.media.AudioPolicyConfig;
+import android.media.AudioPortFw;
+import android.media.AudioPortConfigFw;
 import android.media.AudioUniqueIdUse;
 import android.media.AudioVibratorInfo;
 import android.media.CreateEffectRequest;
@@ -27,6 +28,7 @@
 import android.media.CreateRecordResponse;
 import android.media.CreateTrackRequest;
 import android.media.CreateTrackResponse;
+import android.media.DeviceConnectedState;
 import android.media.OpenInputRequest;
 import android.media.OpenInputResponse;
 import android.media.OpenOutputRequest;
@@ -35,11 +37,14 @@
 import android.media.IAudioFlingerClient;
 import android.media.IAudioRecord;
 import android.media.IAudioTrack;
-import android.media.MicrophoneInfoData;
+import android.media.ISoundDose;
+import android.media.ISoundDoseCallback;
+import android.media.MicrophoneInfoFw;
 import android.media.RenderPosition;
 import android.media.TrackSecondaryOutputInfo;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
+import android.media.audio.common.AudioLatencyMode;
 import android.media.audio.common.AudioMMapPolicyInfo;
 import android.media.audio.common.AudioMMapPolicyType;
 import android.media.audio.common.AudioMode;
@@ -131,8 +136,6 @@
     OpenInputResponse openInput(in OpenInputRequest request);
     void closeInput(int /* audio_io_handle_t */ input);
 
-    void invalidateStream(AudioStreamType stream);
-
     void setVoiceVolume(float volume);
 
     RenderPosition getRenderPosition(int /* audio_io_handle_t */ output);
@@ -181,18 +184,18 @@
     void setLowRamDevice(boolean isLowRamDevice, long totalMemory);
 
     /* Get attributes for a given audio port */
-    AudioPort getAudioPort(in AudioPort port);
+    AudioPortFw getAudioPort(in AudioPortFw port);
 
     /* Create an audio patch between several source and sink ports */
-    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch);
+    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatchFw patch);
 
     /* Release an audio patch */
     void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
 
     /* List existing audio patches */
-    AudioPatch[] listAudioPatches(int maxCount);
+    AudioPatchFw[] listAudioPatches(int maxCount);
     /* Set audio port configuration */
-    void setAudioPortConfig(in AudioPortConfig config);
+    void setAudioPortConfig(in AudioPortConfigFw config);
 
     /* Get the HW synchronization source used for an audio session */
     int /* audio_hw_sync_t */ getAudioHwSyncForSession(int /* audio_session_t */ sessionId);
@@ -207,7 +210,7 @@
     long frameCountHAL(int /* audio_io_handle_t */ ioHandle);
 
     /* List available microphones and their characteristics */
-    MicrophoneInfoData[] getMicrophones();
+    MicrophoneInfoFw[] getMicrophones();
 
     void setAudioHalPids(in int[] /* pid_t[] */ pids);
 
@@ -226,7 +229,64 @@
 
     int getAAudioHardwareBurstMinUsec();
 
-    void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
+    void setDeviceConnectedState(in AudioPortFw devicePort, DeviceConnectedState state);
+
+    // Used for tests only. Requires AIDL HAL to work.
+    void setSimulateDeviceConnections(boolean enabled);
+
+    /**
+     * Requests a given latency mode (See AudioLatencyMode.aidl) on an output stream.
+     * This can be used when some use case on a given mixer/stream can only be enabled
+     * if a specific latency mode is selected on the audio path below the HAL.
+     * For instance spatial audio with head tracking.
+     * output is the I/O handle of the output stream for which the request is made.
+     * latencyMode is the requested latency mode.
+     */
+     void setRequestedLatencyMode(int output, AudioLatencyMode latencyMode);
+
+    /**
+     * Queries the list of latency modes (See LatencyMode.aidl) supported by an output stream.
+     * output is the I/O handle of the output stream to which the query applies.
+     * returns the list of supported latency modes.
+     */
+    AudioLatencyMode[] getSupportedLatencyModes(int output);
+
+    /**
+     * Requests if the implementation supports controlling the latency modes
+     * over the Bluetooth A2DP or LE Audio links. If it does,
+     * setRequestedLatencyMode() and getSupportedLatencyModes() APIs can also be used
+     * for streams routed to Bluetooth and not just for the spatializer output.
+     */
+     boolean supportsBluetoothVariableLatency();
+
+    /**
+     * Enables or disables the variable Bluetooth latency control mechanism in the
+     * audio framework and the audio HAL. This does not apply to the latency mode control
+     * on the spatializer output as this is a built-in feature.
+     */
+    void setBluetoothVariableLatencyEnabled(boolean enabled);
+
+    /**
+     * Indicates if the variable Bluetooth latency control mechanism is enabled or disabled.
+     */
+    boolean isBluetoothVariableLatencyEnabled();
+
+    /**
+     * Registers the sound dose callback and returns the interface for executing
+     * sound dose methods on the audio server.
+     */
+    ISoundDose getSoundDoseInterface(in ISoundDoseCallback callback);
+
+    /**
+     * Invalidate all tracks with given port ids.
+     */
+    void invalidateTracks(in int[] /* audio_port_handle_t[] */ portIds);
+
+    /**
+     * Only implemented for AIDL. Provides the APM configuration which
+     * used to be in the XML file.
+     */
+    AudioPolicyConfig getAudioPolicyConfig();
 
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 8ac89a8..90ede8b 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -22,13 +22,14 @@
 import android.media.AudioAttributesInternal;
 import android.media.AudioDirectMode;
 import android.media.AudioMix;
+import android.media.AudioMixerAttributesInternal;
 import android.media.AudioOffloadMode;
-import android.media.AudioPatch;
+import android.media.AudioPatchFw;
 import android.media.AudioPolicyDeviceState;
 import android.media.AudioPolicyForcedConfig;
 import android.media.AudioPolicyForceUse;
-import android.media.AudioPort;
-import android.media.AudioPortConfig;
+import android.media.AudioPortFw;
+import android.media.AudioPortConfigFw;
 import android.media.AudioPortRole;
 import android.media.AudioPortType;
 import android.media.AudioProductStrategy;
@@ -137,7 +138,7 @@
 
     int /* product_strategy_t */ getStrategyForStream(AudioStreamType stream);
 
-    AudioDevice[] getDevicesForAttributes(in AudioAttributesEx attr, boolean forVolume);
+    AudioDevice[] getDevicesForAttributes(in AudioAttributesInternal attr, boolean forVolume);
 
     int /* audio_io_handle_t */ getOutputForEffect(in EffectDescriptor desc);
 
@@ -202,7 +203,9 @@
                                     in AudioAttributesInternal attributes);
 
     /**
-     * List available audio ports and their attributes. Returns the generation.
+     * List currently attached audio ports and their attributes. Returns the generation.
+     * The generation is incremented each time when anything changes in the ports
+     * configuration.
      *
      * On input, count represents the maximum length of the returned array.
      * On output, count is the total number of elements, which may be larger than the array size.
@@ -212,16 +215,23 @@
     int listAudioPorts(AudioPortRole role,
                        AudioPortType type,
                        inout Int count,
-                       out AudioPort[] ports);
+                       out AudioPortFw[] ports);
+
+    /**
+     * List all device ports declared in the configuration (including currently detached ones)
+     * 'role' can be 'NONE' to get both input and output devices,
+     * 'SINK' for output devices, and 'SOURCE' for input devices.
+     */
+    AudioPortFw[] listDeclaredDevicePorts(AudioPortRole role);
 
     /** Get attributes for the audio port with the given id (AudioPort.hal.id field). */
-    AudioPort getAudioPort(int /* audio_port_handle_t */ portId);
+    AudioPortFw getAudioPort(int /* audio_port_handle_t */ portId);
 
     /**
      * Create an audio patch between several source and sink ports.
      * The handle argument is used when updating an existing patch.
      */
-    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch, int handle);
+    int /* audio_patch_handle_t */ createAudioPatch(in AudioPatchFw patch, int handle);
 
     /** Release an audio patch. */
     void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
@@ -234,10 +244,10 @@
      * Passing '0' on input and inspecting the value on output is a common way of determining the
      * number of elements without actually retrieving them.
      */
-    int listAudioPatches(inout Int count, out AudioPatch[] patches);
+    int listAudioPatches(inout Int count, out AudioPatchFw[] patches);
 
     /** Set audio port configuration. */
-    void setAudioPortConfig(in AudioPortConfig config);
+    void setAudioPortConfig(in AudioPortConfigFw config);
 
     void registerClient(IAudioPolicyServiceClient client);
 
@@ -261,7 +271,7 @@
 
     void removeUserIdDeviceAffinities(int userId);
 
-    int /* audio_port_handle_t */ startAudioSource(in AudioPortConfig source,
+    int /* audio_port_handle_t */ startAudioSource(in AudioPortConfigFw source,
                                                    in AudioAttributesInternal attributes);
 
     void stopAudioSource(int /* audio_port_handle_t */ portId);
@@ -312,12 +322,21 @@
 
     boolean isUltrasoundSupported();
 
+    /**
+     * Queries if there is hardware support for requesting audio capture content from
+     * the DSP hotword pipeline.
+     *
+     * @param lookbackAudio true if additionally querying for the ability to capture audio
+     *                      from the pipeline prior to capture stream open.
+     */
+    boolean isHotwordStreamSupported(boolean lookbackAudio);
+
     AudioProductStrategy[] listAudioProductStrategies();
-    int /* product_strategy_t */ getProductStrategyFromAudioAttributes(in AudioAttributesEx aa,
-                                                                       boolean fallbackOnDefault);
+    int /* product_strategy_t */ getProductStrategyFromAudioAttributes(
+            in AudioAttributesInternal aa, boolean fallbackOnDefault);
 
     AudioVolumeGroup[] listAudioVolumeGroups();
-    int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesEx aa,
+    int /* volume_group_t */ getVolumeGroupFromAudioAttributes(in AudioAttributesInternal aa,
                                                                boolean fallbackOnDefault);
 
     void setRttEnabled(boolean enabled);
@@ -329,7 +348,10 @@
                                    in AudioDevice[] devices);
 
     void removeDevicesRoleForStrategy(int /* product_strategy_t */ strategy,
-                                       DeviceRole role);
+                                      DeviceRole role,
+                                      in AudioDevice[] devices);
+
+    void clearDevicesRoleForStrategy(int /* product_strategy_t */ strategy, DeviceRole role);
 
     AudioDevice[] getDevicesForRoleAndStrategy(int /* product_strategy_t */ strategy,
                                                DeviceRole role);
@@ -391,6 +413,60 @@
      */
     AudioProfile[] getDirectProfilesForAttributes(in AudioAttributesInternal attr);
 
+    /**
+     * Return a list of AudioMixerAttributes that can be used to set preferred mixer attributes
+     * for the given device.
+     */
+    AudioMixerAttributesInternal[] getSupportedMixerAttributes(
+            int /* audio_port_handle_t */ portId);
+
+    /**
+     * Set preferred mixer attributes for a given device on a given audio attributes.
+     * When conflicting requests are received, the last request will be honored.
+     * The preferred mixer attributes can only be set when 1) the usage is media, 2) the
+     * given device is currently available, 3) the given device is usb device, 4) the given mixer
+     * attributes is supported by the given device.
+     *
+     * @param attr the audio attributes whose mixer attributes should be set.
+     * @param portId the port id of the device to be routed.
+     * @param uid the uid of the request client. The uid will be used to recognize the ownership for
+     *            the preferred mixer attributes. All the playback with same audio attributes from
+     *            the same uid will be attached to the mixer with the preferred attributes if the
+     *            playback is routed to the given device.
+     * @param mixerAttr the preferred mixer attributes.
+     */
+    void setPreferredMixerAttributes(in AudioAttributesInternal attr,
+                                     int /* audio_port_handle_t */ portId,
+                                     int /* uid_t */ uid,
+                                     in AudioMixerAttributesInternal mixerAttr);
+
+    /**
+     * Get preferred mixer attributes for a given device on a given audio attributes.
+     * Null will be returned if there is no preferred mixer attributes set or it has
+     * been cleared.
+     *
+     * @param attr the audio attributes whose mixer attributes should be set.
+     * @param portId the port id of the device to be routed.
+     */
+    @nullable AudioMixerAttributesInternal getPreferredMixerAttributes(
+            in AudioAttributesInternal attr,
+            int /* audio_port_handle_t */ portId);
+
+    /**
+     * Clear preferred mixer attributes for a given device on a given audio attributes that
+     * is previously set via setPreferredMixerAttributes.
+     *
+     * @param attr the audio attributes whose mixer attributes should be set.
+     * @param portId the port id of the device to be routed.
+     * @param uid the uid of the request client. The uid is used to identify the ownership for the
+     *            preferred mixer attributes. The preferred mixer attributes will only be cleared
+     *            if the uid is the same as the owner of current preferred mixer attributes.
+     */
+    void clearPreferredMixerAttributes(in AudioAttributesInternal attr,
+                                       int /* audio_port_handle_t */ portId,
+                                       int /* uid_t */ uid);
+
+
     // When adding a new method, please review and update
     // AudioPolicyService.cpp AudioPolicyService::onTransact()
     // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
index 44ef80b..1ea4156 100644
--- a/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioRecord.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.MicrophoneInfoData;
+import android.media.MicrophoneInfoFw;
 
 /**
  * Native code must specify namespace media (media::IAudioRecord) when referring to this class.
@@ -39,7 +39,7 @@
 
   /* Get a list of current active microphones.
    */
-  void getActiveMicrophones(out MicrophoneInfoData[] activeMicrophones);
+  void getActiveMicrophones(out MicrophoneInfoFw[] activeMicrophones);
 
   /* Set the microphone direction (for processing).
    */
diff --git a/media/libaudioclient/aidl/android/media/IAudioTrack.aidl b/media/libaudioclient/aidl/android/media/IAudioTrack.aidl
index ac58925..c3a2dbe 100644
--- a/media/libaudioclient/aidl/android/media/IAudioTrack.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioTrack.aidl
@@ -16,13 +16,13 @@
 
 package android.media;
 
-import android.media.AudioDualMonoMode;
-import android.media.AudioPlaybackRate;
 import android.media.AudioTimestampInternal;
 import android.media.SharedFileRegion;
 import android.media.VolumeShaperConfiguration;
 import android.media.VolumeShaperOperation;
 import android.media.VolumeShaperState;
+import android.media.audio.common.AudioDualMonoMode;
+import android.media.audio.common.AudioPlaybackRate;
 
 /**
  * Unless otherwise noted, methods returning int expect it to be interpreted as a status_t.
diff --git a/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl b/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
index 3b2206a..3ec6e7a 100644
--- a/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
+++ b/media/libaudioclient/aidl/android/media/ICaptureStateListener.aidl
@@ -19,6 +19,6 @@
 /**
  * {@hide}
  */
-interface ICaptureStateListener {
+oneway interface ICaptureStateListener {
     void setCaptureState(boolean active);
 }
diff --git a/media/libaudioclient/aidl/android/media/IEffect.aidl b/media/libaudioclient/aidl/android/media/IEffect.aidl
index 6ec0405..b7f70a6 100644
--- a/media/libaudioclient/aidl/android/media/IEffect.aidl
+++ b/media/libaudioclient/aidl/android/media/IEffect.aidl
@@ -16,6 +16,7 @@
 
 package android.media;
 
+import android.media.EffectConfig;
 import android.media.SharedFileRegion;
 
 /**
@@ -63,6 +64,14 @@
      */
     SharedFileRegion getCblk();
 
+    /**
+     * Provides audio configurations for effect's input and output,
+     * see EffectConfig parcelable for more details.
+     *
+     * @return a status_t code.
+     */
+    int getConfig(out EffectConfig config);
+
     // When adding a new method, please review and update
     // Effects.cpp AudioFlinger::EffectHandle::onTransact()
     // Effects.cpp IEFFECT_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
new file mode 100644
index 0000000..6cb22ef
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SoundDoseRecord;
+
+/**
+ * Interface used to push the sound dose related information from the
+ * AudioService#SoundDoseHelper to the audio server
+ */
+interface ISoundDose {
+    /** Set a new RS2 upper bound used for momentary exposure warnings. */
+    oneway void setOutputRs2UpperBound(float rs2Value);
+
+    /**
+     * Resets the native CSD values. This can happen after a crash in the
+     * audio server or after booting when restoring the previous state.
+     * 'currentCsd' represents the restored CSD value and 'records' contains the
+     * dosage values and MELs together with their timestamps that lead to this
+     * CSD.
+     */
+    oneway void resetCsd(float currentCsd, in SoundDoseRecord[] records);
+
+    /**
+     * Updates the attenuation used for the MEL calculation when the volume is
+     * not applied by the audio framework. This can be the case when for example
+     * the absolute volume is used for a particular device.
+     *
+     * @param attenuationDB the attenuation as a negative value in dB that will
+     *                      be applied for the internal MEL when computing CSD.
+     *                      A value of 0 represents no attenuation for the MELs
+     * @param device        the audio_devices_t type for which we will apply the
+     *                      attenuation
+     */
+    oneway void updateAttenuation(float attenuationDB, int device);
+
+    /**
+     * Enables/disables the calculation of sound dose. This has the effect that
+     * if disabled no MEL values will be computed on the framework side. The MEL
+     * returned from the IHalSoundDoseCallbacks will be ignored.
+     */
+    oneway void setCsdEnabled(boolean enabled);
+
+    /* -------------------------- Test API methods --------------------------
+    /** Get the currently used RS2 upper bound. */
+    float getOutputRs2UpperBound();
+    /** Get the current CSD from audioserver. */
+    float getCsd();
+    /**
+     * Returns true if the HAL supports the ISoundDose interface. Can be either
+     * as part of IModule or standalon sound dose HAL.
+     */
+    boolean isSoundDoseHalSupported();
+    /** Enables/Disables MEL computations from framework. */
+    oneway void forceUseFrameworkMel(boolean useFrameworkMel);
+    /** Enables/Disables the computation of CSD on all devices. */
+    oneway void forceComputeCsdOnAllDevices(boolean computeCsdOnAllDevices);
+}
diff --git a/media/libaudioclient/aidl/android/media/ISoundDoseCallback.aidl b/media/libaudioclient/aidl/android/media/ISoundDoseCallback.aidl
new file mode 100644
index 0000000..7e59409
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/ISoundDoseCallback.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.SoundDoseRecord;
+
+/**
+ * Interface used to push the sound dose related information from the audio
+ * server to the AudioService#SoundDoseHelper.
+ */
+interface ISoundDoseCallback {
+    /** Called whenever the momentary exposure exceeds the RS2 value. */
+    oneway void onMomentaryExposure(float currentMel, int deviceId);
+
+    /**
+     * Notifies that the CSD value has changed. The currentCsd is normalized
+     * with value 1 representing 100% of sound dose. SoundDoseRecord represents
+     * the newest record that lead to the new currentCsd.
+     */
+    oneway void onNewCsdValue(float currentCsd, in SoundDoseRecord[] records);
+}
diff --git a/media/libaudioclient/aidl/android/media/ISpatializer.aidl b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
index a61ad58..250c450 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializer.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
@@ -96,17 +96,33 @@
 
     /**
      * Sets the display orientation.
+     *
+     * This is the rotation of the displayed content relative to its natural orientation.
+     *
      * Orientation is expressed in the angle of rotation from the physical "up" side of the screen
      * to the logical "up" side of the content displayed the screen. Counterclockwise angles, as
      * viewed while facing the screen are positive.
+     *
+     * Note: DisplayManager currently only returns this in increments of 90 degrees,
+     * so the values will be 0, PI/2, PI, 3PI/2.
      */
     void setDisplayOrientation(float physicalToLogicalAngle);
 
     /**
      * Sets the hinge angle for foldable devices.
+     *
+     * Per the hinge angle sensor, this returns a value from 0 to 2PI.
+     * The value of 0 is considered closed, and PI is considered flat open.
      */
     void setHingeAngle(float hingeAngle);
 
+    /**
+     * Sets whether a foldable is considered "folded" or not.
+     *
+     * The fold state may affect which physical screen is active for display.
+     */
+    void setFoldState(boolean folded);
+
     /** Reports the list of supported spatialization modess (see SpatializationMode.aidl).
      * The list should never be empty if an ISpatializer interface was successfully
      * retrieved with IAudioPolicyService.getSpatializer().
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index 90e7ea6..ddda8bb 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioPort;
+import android.media.AudioPortFw;
 import android.media.audio.common.AudioConfig;
 import android.media.audio.common.AudioConfigBase;
 
@@ -29,7 +29,7 @@
     AudioConfig halConfig;
     AudioConfigBase mixerConfig;
     /** Type must be DEVICE. */
-    AudioPort device;
+    AudioPortFw device;
     /** Bitmask, indexed by AudioOutputFlag. */
     int flags;
 }
diff --git a/media/libaudioclient/aidl/android/media/SoundDoseRecord.aidl b/media/libaudioclient/aidl/android/media/SoundDoseRecord.aidl
new file mode 100644
index 0000000..1397e23
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/SoundDoseRecord.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/** Record containing information about the computed sound dose. */
+@JavaDerive(toString = true)
+parcelable SoundDoseRecord {
+    /**
+     * Corresponds to the time in seconds when the CSD value is calculated from.
+     * Values should be consistent and referenced from the same clock (e.g.: monotonic)
+     */
+    long timestamp;
+    /** Corresponds to the duration that leads to the CSD value. */
+    int duration;
+    /** The actual contribution to the CSD computation normalized: 1.f is 100%CSD. */
+    float value;
+    /** The average MEL value in this time frame that lead to this CSD value. */
+    float averageMel;
+}
diff --git a/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl b/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl
new file mode 100644
index 0000000..f83fdef
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/SurroundSoundConfig.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.audio.common.AudioFormatDescription;
+
+/**
+ * TODO(b/280077672): This is a temporary copy of the stable
+ * android.hardware.audio.core.SurroundSoundConfig parcelable.
+ * Interfaces from the Core API do not support the CPP backend. This copy will
+ * be removed either by moving the AudioRoute from core to a.m.a.common or by
+ * switching the framework internal interfaces to the NDK backend.
+ * {@hide}
+ */
+parcelable SurroundSoundConfig {
+    parcelable SurroundFormatFamily {
+        /**
+         * A primaryFormat shall get an entry in the Surround Settings dialog on TV
+         * devices. There must be a corresponding Java ENCODING_... constant
+         * defined in AudioFormat.java, and a display name defined in
+         * AudioFormat.toDisplayName.
+         */
+        AudioFormatDescription primaryFormat;
+        /**
+         * List of formats that shall be equivalent to the primaryFormat from the
+         * users' point of view and don't need a dedicated Surround Settings
+         * dialog entry.
+         */
+        AudioFormatDescription[] subFormats;
+    }
+    SurroundFormatFamily[] formatFamilies;
+}
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index 969e3e6..6080314 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -25,6 +25,9 @@
 
 cc_fuzz {
     name: "audioflinger_fuzzer",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
     srcs: [
         "audioflinger_fuzzer.cpp",
     ],
@@ -46,7 +49,6 @@
     ],
     shared_libs: [
         "android.hardware.audio.common-util",
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -54,6 +56,7 @@
         "av-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libaudio_aidl_conversion_common_cpp",
         "libaudioflinger",
         "libaudiofoundation",
         "libaudiomanager",
@@ -77,5 +80,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libaudioflinger",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index 036e72e..dfdb4cf 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -84,13 +84,15 @@
 };
 
 template <typename T, typename X, typename FUNC>
-std::vector<T> getFlags(const xsdc_enum_range<X> &range, const FUNC &func,
-                        const std::string &findString = {}) {
+std::vector<T> getFlags(const xsdc_enum_range<X>& range, const FUNC& func,
+                        const std::string& findString = {},
+                        const std::set<X>& excludedValues = {}) {
     std::vector<T> vec;
     for (const auto &xsdEnumVal : range) {
         T enumVal;
         std::string enumString = toString(xsdEnumVal);
         if (enumString.find(findString) != std::string::npos &&
+            (excludedValues.find(xsdEnumVal) == excludedValues.end()) &&
             func(enumString.c_str(), &enumVal)) {
             vec.push_back(enumVal);
         }
@@ -102,13 +104,29 @@
     getFlags<audio_stream_type_t, xsd::AudioStreamType, decltype(audio_stream_type_from_string)>(
         xsdc_enum_range<xsd::AudioStreamType>{}, audio_stream_type_from_string);
 
+/**
+ * AudioFormat - AUDIO_FORMAT_HE_AAC_V1 and AUDIO_FORMAT_HE_AAC_V2
+ * are excluded from kFormats[] in order to avoid the abort triggered
+ * for these two types of AudioFormat in
+ * AidlConversion::legacy2aidl_audio_format_t_AudioFormatDescription()
+ */
 static const std::vector<audio_format_t> kFormats =
-    getFlags<audio_format_t, xsd::AudioFormat, decltype(audio_format_from_string)>(
-        xsdc_enum_range<xsd::AudioFormat>{}, audio_format_from_string);
+        getFlags<audio_format_t, xsd::AudioFormat, decltype(audio_format_from_string)>(
+                xsdc_enum_range<xsd::AudioFormat>{}, audio_format_from_string, {},
+                {xsd::AudioFormat::AUDIO_FORMAT_HE_AAC_V1,
+                 xsd::AudioFormat::AUDIO_FORMAT_HE_AAC_V2});
 
+/**
+ * AudioChannelMask - AUDIO_CHANNEL_IN_6
+ * is excluded from kChannelMasks[] in order to avoid the abort triggered
+ * for this type of AudioChannelMask in
+ * AidlConversion::legacy2aidl_audio_channel_mask_t_AudioChannelLayout()
+ */
 static const std::vector<audio_channel_mask_t> kChannelMasks =
-    getFlags<audio_channel_mask_t, xsd::AudioChannelMask, decltype(audio_channel_mask_from_string)>(
-        xsdc_enum_range<xsd::AudioChannelMask>{}, audio_channel_mask_from_string);
+        getFlags<audio_channel_mask_t, xsd::AudioChannelMask,
+                 decltype(audio_channel_mask_from_string)>(
+                xsdc_enum_range<xsd::AudioChannelMask>{}, audio_channel_mask_from_string, {},
+                {xsd::AudioChannelMask::AUDIO_CHANNEL_IN_6});
 
 static const std::vector<audio_usage_t> kUsages =
     getFlags<audio_usage_t, xsd::AudioUsage, decltype(audio_usage_from_string)>(
@@ -126,9 +144,17 @@
     getFlags<audio_gain_mode_t, xsd::AudioGainMode, decltype(audio_gain_mode_from_string)>(
         xsdc_enum_range<xsd::AudioGainMode>{}, audio_gain_mode_from_string);
 
+/**
+ * AudioDevice - AUDIO_DEVICE_IN_AMBIENT and AUDIO_DEVICE_IN_COMMUNICATION
+ * are excluded from kDevices[] in order to avoid the abort triggered
+ * for these two types of AudioDevice in
+ * AidlConversion::aidl2legacy_AudioDeviceDescription_audio_devices_t()
+ */
 static const std::vector<audio_devices_t> kDevices =
-    getFlags<audio_devices_t, xsd::AudioDevice, decltype(audio_device_from_string)>(
-        xsdc_enum_range<xsd::AudioDevice>{}, audio_device_from_string);
+        getFlags<audio_devices_t, xsd::AudioDevice, decltype(audio_device_from_string)>(
+                xsdc_enum_range<xsd::AudioDevice>{}, audio_device_from_string, {},
+                {xsd::AudioDevice::AUDIO_DEVICE_IN_AMBIENT,
+                 xsd::AudioDevice::AUDIO_DEVICE_IN_COMMUNICATION});
 
 static const std::vector<audio_input_flags_t> kInputFlags =
     getFlags<audio_input_flags_t, xsd::AudioInOutFlag, decltype(audio_input_flag_from_string)>(
@@ -359,7 +385,7 @@
     record->getInputFramesLost();
     record->getFlags();
 
-    std::vector<media::MicrophoneInfo> activeMicrophones;
+    std::vector<media::MicrophoneInfoFw> activeMicrophones;
     record->getActiveMicrophones(&activeMicrophones);
     record->releaseBuffer(&audioBuffer);
 
@@ -541,7 +567,7 @@
     AudioSystem::getPrimaryOutputFrameCount();
     AudioSystem::setLowRamDevice(mFdp.ConsumeBool(), mFdp.ConsumeIntegral<int64_t>());
 
-    std::vector<media::MicrophoneInfo> microphones;
+    std::vector<media::MicrophoneInfoFw> microphones;
     AudioSystem::getMicrophones(&microphones);
 
     std::vector<pid_t> pids;
@@ -558,7 +584,12 @@
 
     float balance = mFdp.ConsumeFloatingPoint<float>();
     af->getMasterBalance(&balance);
-    af->invalidateStream(static_cast<audio_stream_type_t>(mFdp.ConsumeIntegral<uint32_t>()));
+
+    std::vector<audio_port_handle_t> tracks;
+    for (int i = 0; i < mFdp.ConsumeIntegralInRange<int32_t>(0, MAX_ARRAY_LENGTH); ++i) {
+        tracks.push_back(static_cast<audio_port_handle_t>(mFdp.ConsumeIntegral<int32_t>()));
+    }
+    af->invalidateTracks(tracks);
 }
 
 status_t AudioFlingerFuzzer::invokeAudioInputDevice() {
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index e769303..b0d48b7 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -24,45 +24,21 @@
 #include <android/media/AudioAttributesInternal.h>
 #include <android/media/AudioClient.h>
 #include <android/media/AudioDirectMode.h>
-#include <android/media/AudioDualMonoMode.h>
 #include <android/media/AudioFlag.h>
 #include <android/media/AudioIoConfigEvent.h>
 #include <android/media/AudioIoDescriptor.h>
-#include <android/media/AudioPlaybackRate.h>
-#include <android/media/AudioPort.h>
-#include <android/media/AudioPortConfig.h>
+#include <android/media/AudioPortFw.h>
+#include <android/media/AudioPortConfigFw.h>
 #include <android/media/AudioPortDeviceExtSys.h>
 #include <android/media/AudioTimestampInternal.h>
 #include <android/media/AudioUniqueIdUse.h>
 #include <android/media/EffectDescriptor.h>
+#include <android/media/MicrophoneInfoFw.h>
 #include <android/media/TrackSecondaryOutputInfo.h>
-#include <android/media/audio/common/AudioChannelLayout.h>
-#include <android/media/audio/common/AudioConfig.h>
-#include <android/media/audio/common/AudioConfigBase.h>
-#include <android/media/audio/common/AudioContentType.h>
-#include <android/media/audio/common/AudioDeviceDescription.h>
-#include <android/media/audio/common/AudioEncapsulationMetadataType.h>
-#include <android/media/audio/common/AudioEncapsulationMode.h>
-#include <android/media/audio/common/AudioEncapsulationType.h>
-#include <android/media/audio/common/AudioFormatDescription.h>
-#include <android/media/audio/common/AudioGain.h>
-#include <android/media/audio/common/AudioGainConfig.h>
-#include <android/media/audio/common/AudioGainMode.h>
-#include <android/media/audio/common/AudioInputFlags.h>
-#include <android/media/audio/common/AudioMode.h>
-#include <android/media/audio/common/AudioOffloadInfo.h>
-#include <android/media/audio/common/AudioOutputFlags.h>
-#include <android/media/audio/common/AudioPortExt.h>
-#include <android/media/audio/common/AudioPortMixExt.h>
-#include <android/media/audio/common/AudioProfile.h>
-#include <android/media/audio/common/AudioSource.h>
-#include <android/media/audio/common/AudioStandard.h>
-#include <android/media/audio/common/AudioUsage.h>
-#include <android/media/audio/common/AudioUuid.h>
-#include <android/media/audio/common/ExtraAudioDescriptor.h>
 
 #include <android/media/SharedFileRegion.h>
 #include <binder/IMemory.h>
+#include <media/AidlConversionCppNdk.h>
 #include <media/AidlConversionUtil.h>
 #include <media/AudioClient.h>
 #include <media/AudioCommonTypes.h>
@@ -72,49 +48,6 @@
 
 namespace android {
 
-// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
-// the string.
-status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
-ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
-
-ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
-
-ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
-
-ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
-
-ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
-
-ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
-
-ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
-
-ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
-
-ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
-
-ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
-
-ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl);
-ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy);
-
-ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl);
-ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy);
-
-ConversionResult<std::optional<String16>>
-aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
-ConversionResult<std::optional<std::string_view>>
-legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy);
-
 ConversionResult<audio_io_config_event_t> aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(
         media::AudioIoConfigEvent aidl);
 ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(
@@ -130,75 +63,6 @@
 ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
         audio_port_type_t legacy);
 
-ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
-        const media::audio::common::AudioChannelLayout& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioChannelLayout>
-legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
-
-ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
-        const media::audio::common::AudioDeviceDescription& aidl);
-ConversionResult<media::audio::common::AudioDeviceDescription>
-legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
-
-status_t aidl2legacy_AudioDevice_audio_device(
-        const media::audio::common::AudioDevice& aidl,
-        audio_devices_t* legacyType, char* legacyAddress);
-status_t aidl2legacy_AudioDevice_audio_device(
-        const media::audio::common::AudioDevice& aidl,
-        audio_devices_t* legacyType, String8* legacyAddress);
-status_t aidl2legacy_AudioDevice_audio_device(
-        const media::audio::common::AudioDevice& aidl,
-        audio_devices_t* legacyType, std::string* legacyAddress);
-ConversionResult<media::audio::common::AudioDevice>
-legacy2aidl_audio_device_AudioDevice(
-        audio_devices_t legacyType, const char* legacyAddress);
-ConversionResult<media::audio::common::AudioDevice>
-legacy2aidl_audio_device_AudioDevice(
-        audio_devices_t legacyType, const String8& legacyAddress);
-
-ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
-        const media::audio::common::AudioFormatDescription& aidl);
-ConversionResult<media::audio::common::AudioFormatDescription>
-legacy2aidl_audio_format_t_AudioFormatDescription(audio_format_t legacy);
-
-ConversionResult<audio_gain_mode_t>
-aidl2legacy_AudioGainMode_audio_gain_mode_t(media::audio::common::AudioGainMode aidl);
-ConversionResult<media::audio::common::AudioGainMode>
-legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
-
-ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
-
-ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
-        const media::audio::common::AudioGainConfig& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioGainConfig>
-legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
-
-ConversionResult<audio_input_flags_t>
-aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
-ConversionResult<media::audio::common::AudioInputFlags>
-legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
-
-ConversionResult<audio_output_flags_t>
-aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
-ConversionResult<media::audio::common::AudioOutputFlags>
-legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
-
-ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
-        int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
-        audio_input_flags_t legacy);
-
-ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
-        int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
-        audio_output_flags_t legacy);
-
-ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
-        const media::audio::common::AudioIoFlags& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
-        const audio_io_flags& legacy, bool isInput);
-
 ConversionResult<audio_port_config_device_ext>
 aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
         const media::audio::common::AudioPortDeviceExt& aidl,
@@ -208,20 +72,6 @@
         media::audio::common::AudioPortDeviceExt* aidl,
         media::AudioPortDeviceExtSys* aidlDeviceExt);
 
-ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
-        media::audio::common::AudioStreamType aidl);
-ConversionResult<media::audio::common::AudioStreamType>
-legacy2aidl_audio_stream_type_t_AudioStreamType(audio_stream_type_t legacy);
-
-ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
-        media::audio::common::AudioSource aidl);
-ConversionResult<media::audio::common::AudioSource>
-        legacy2aidl_audio_source_t_AudioSource(
-        audio_source_t legacy);
-
-ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
-
 ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt(
         const media::audio::common::AudioPortMixExt& aidl, media::AudioPortRole role,
         const media::AudioPortMixExtSys& aidlMixExt);
@@ -234,14 +84,15 @@
 ConversionResult<int32_t> legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
         const audio_port_config_session_ext& legacy);
 
-ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
-        const media::AudioPortConfig& aidl);
-ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
-        const audio_port_config& legacy);
+// portId needs to be set when dealing with the HAL.
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfigFw_audio_port_config(
+        const media::AudioPortConfigFw& aidl, int32_t* aidlPortId = nullptr);
+ConversionResult<media::AudioPortConfigFw> legacy2aidl_audio_port_config_AudioPortConfigFw(
+        const audio_port_config& legacy, int32_t portId = 0);
 
-ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
-        const media::AudioPatch& aidl);
-ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatchFw_audio_patch(
+        const media::AudioPatchFw& aidl);
+ConversionResult<media::AudioPatchFw> legacy2aidl_audio_patch_AudioPatchFw(
         const struct audio_patch& legacy);
 
 ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
@@ -254,17 +105,6 @@
 ConversionResult<media::AudioClient> legacy2aidl_AudioClient_AudioClient(
         const AudioClient& legacy);
 
-ConversionResult<audio_content_type_t>
-aidl2legacy_AudioContentType_audio_content_type_t(
-        media::audio::common::AudioContentType aidl);
-ConversionResult<media::audio::common::AudioContentType>
-legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
-
-ConversionResult<audio_usage_t>
-aidl2legacy_AudioUsage_audio_usage_t(media::audio::common::AudioUsage aidl);
-ConversionResult<media::audio::common::AudioUsage>
-legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy);
-
 ConversionResult<audio_flags_mask_t>
 aidl2legacy_AudioFlag_audio_flags_mask_t(media::AudioFlag aidl);
 ConversionResult<media::AudioFlag>
@@ -280,36 +120,13 @@
 ConversionResult<media::AudioAttributesInternal>
 legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy);
 
-ConversionResult<audio_encapsulation_mode_t>
-aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(
-        media::audio::common::AudioEncapsulationMode aidl);
-ConversionResult<media::audio::common::AudioEncapsulationMode>
-legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
-
-ConversionResult<audio_offload_info_t>
-aidl2legacy_AudioOffloadInfo_audio_offload_info_t(
-        const media::audio::common::AudioOffloadInfo& aidl);
-ConversionResult<media::audio::common::AudioOffloadInfo>
-legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
-
-ConversionResult<audio_config_t>
-aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfig>
-legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput);
-
-ConversionResult<audio_config_base_t>
-aidl2legacy_AudioConfigBase_audio_config_base_t(
-        const media::audio::common::AudioConfigBase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfigBase>
-legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput);
-
 ConversionResult<sp<IMemory>>
 aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl);
 ConversionResult<media::SharedFileRegion>
 legacy2aidl_IMemory_SharedFileRegion(const sp<IMemory>& legacy);
 
-ConversionResult<sp<IMemory>>
-aidl2legacy_NullableSharedFileRegion_IMemory(const std::optional<media::SharedFileRegion>& aidl);
+ConversionResult<sp<::android::IMemory>> aidl2legacy_NullableSharedFileRegion_IMemory(
+        const std::optional<media::SharedFileRegion>& aidl);
 ConversionResult<std::optional<media::SharedFileRegion>>
 legacy2aidl_NullableIMemory_SharedFileRegion(const sp<IMemory>& legacy);
 
@@ -318,32 +135,10 @@
 ConversionResult<media::AudioTimestampInternal>
 legacy2aidl_AudioTimestamp_AudioTimestampInternal(const AudioTimestamp& legacy);
 
-ConversionResult<audio_uuid_t>
-aidl2legacy_AudioUuid_audio_uuid_t(const media::audio::common::AudioUuid& aidl);
-ConversionResult<media::audio::common::AudioUuid>
-legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy);
-
 ConversionResult<effect_descriptor_t>
 aidl2legacy_EffectDescriptor_effect_descriptor_t(const media::EffectDescriptor& aidl);
-ConversionResult<media::EffectDescriptor>
-legacy2aidl_effect_descriptor_t_EffectDescriptor(const effect_descriptor_t& legacy);
-
-ConversionResult<audio_encapsulation_metadata_type_t>
-aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
-        media::audio::common::AudioEncapsulationMetadataType aidl);
-ConversionResult<media::audio::common::AudioEncapsulationMetadataType>
-legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
-        audio_encapsulation_metadata_type_t legacy);
-
-ConversionResult<uint32_t>
-aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
-
-ConversionResult<uint32_t>
-aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
+ConversionResult<media::EffectDescriptor> legacy2aidl_effect_descriptor_t_EffectDescriptor(
+        const effect_descriptor_t& legacy);
 
 ConversionResult<audio_port_device_ext>
 aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
@@ -368,26 +163,10 @@
 ConversionResult<int32_t>
 legacy2aidl_audio_port_session_ext_int32_t(const audio_port_session_ext& legacy);
 
-ConversionResult<audio_profile>
-aidl2legacy_AudioProfile_audio_profile(
-        const media::audio::common::AudioProfile& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioProfile>
-legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy, bool isInput);
-
-ConversionResult<audio_gain>
-aidl2legacy_AudioGain_audio_gain(const media::audio::common::AudioGain& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioGain>
-legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput);
-
 ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl);
-ConversionResult<media::AudioPort>
-legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy);
-
-ConversionResult<audio_mode_t>
-aidl2legacy_AudioMode_audio_mode_t(media::audio::common::AudioMode aidl);
-ConversionResult<media::audio::common::AudioMode>
-legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
+aidl2legacy_AudioPortFw_audio_port_v7(const media::AudioPortFw& aidl);
+ConversionResult<media::AudioPortFw>
+legacy2aidl_audio_port_v7_AudioPortFw(const audio_port_v7& legacy);
 
 ConversionResult<audio_unique_id_use_t>
 aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(media::AudioUniqueIdUse aidl);
@@ -399,45 +178,6 @@
 ConversionResult<int32_t>
 legacy2aidl_volume_group_t_int32_t(volume_group_t legacy);
 
-ConversionResult<audio_dual_mono_mode_t>
-aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::AudioDualMonoMode aidl);
-ConversionResult<media::AudioDualMonoMode>
-legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy);
-
-ConversionResult<audio_timestretch_fallback_mode_t>
-aidl2legacy_int32_t_audio_timestretch_fallback_mode_t(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_audio_timestretch_fallback_mode_t_int32_t(audio_timestretch_fallback_mode_t legacy);
-
-ConversionResult<audio_timestretch_stretch_mode_t>
-aidl2legacy_int32_t_audio_timestretch_stretch_mode_t(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_audio_timestretch_stretch_mode_t_int32_t(audio_timestretch_stretch_mode_t legacy);
-
-ConversionResult<audio_playback_rate_t>
-aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(const media::AudioPlaybackRate& aidl);
-ConversionResult<media::AudioPlaybackRate>
-legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
-
-ConversionResult<audio_standard_t>
-aidl2legacy_AudioStandard_audio_standard_t(media::audio::common::AudioStandard aidl);
-ConversionResult<media::audio::common::AudioStandard>
-legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy);
-
-ConversionResult<audio_extra_audio_descriptor>
-aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
-        const media::audio::common::ExtraAudioDescriptor& aidl);
-ConversionResult<media::audio::common::ExtraAudioDescriptor>
-legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
-        const audio_extra_audio_descriptor& legacy);
-
-ConversionResult<audio_encapsulation_type_t>
-aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
-        const media::audio::common::AudioEncapsulationType& aidl);
-ConversionResult<media::audio::common::AudioEncapsulationType>
-legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
-        const audio_encapsulation_type_t & legacy);
-
 using TrackSecondaryOutputInfoPair = std::pair<audio_port_handle_t, std::vector<audio_io_handle_t>>;
 ConversionResult<TrackSecondaryOutputInfoPair>
 aidl2legacy_TrackSecondaryOutputInfo_TrackSecondaryOutputInfoPair(
@@ -454,5 +194,11 @@
 ConversionResult<audio_direct_mode_t> aidl2legacy_int32_t_audio_direct_mode_t_mask(int32_t aidl);
 ConversionResult<int32_t> legacy2aidl_audio_direct_mode_t_int32_t_mask(audio_direct_mode_t legacy);
 
+ConversionResult<audio_microphone_characteristic_t>
+aidl2legacy_MicrophoneInfoFw_audio_microphone_characteristic_t(
+        const media::MicrophoneInfoFw& aidl);
+ConversionResult<media::MicrophoneInfoFw>
+legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(
+        const audio_microphone_characteristic_t& legacy);
 
 }  // namespace android
diff --git a/media/libaudioclient/include/media/AudioCommonTypes.h b/media/libaudioclient/include/media/AudioCommonTypes.h
index 862a0f9..2567542 100644
--- a/media/libaudioclient/include/media/AudioCommonTypes.h
+++ b/media/libaudioclient/include/media/AudioCommonTypes.h
@@ -94,6 +94,7 @@
 
 using AttributesVector = std::vector<audio_attributes_t>;
 using StreamTypeVector = std::vector<audio_stream_type_t>;
+using PortHandleVector = std::vector<audio_port_handle_t>;
 
 using TrackSecondaryOutputsMap = std::map<audio_port_handle_t, std::vector<audio_io_handle_t>>;
 
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index 56884a3..291312e 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -136,7 +136,7 @@
      *                      indicated by count.
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *      NO_INIT         effect library failed to initialize
-     *      BAD_VALUE       invalid audio session or descriptor pointers
+     *      BAD_VALUE       invalid audio session, or invalid descriptor or count pointers
      *
      * Returned value
      *   *descriptor updated with descriptors of pre processings enabled by default
@@ -160,6 +160,7 @@
      *      NO_ERROR        successful operation.
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *                        or caller lacks required permissions.
+     *      BAD_VALUE       invalid pointer to id
      * Returned value
      *   *id:  The new unique system-wide effect id.
      */
@@ -194,7 +195,7 @@
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *                        or caller lacks required permissions.
      *      NO_INIT         effect library failed to initialize.
-     *      BAD_VALUE       invalid source, type uuid or implementation uuid.
+     *      BAD_VALUE       invalid source, type uuid or implementation uuid, or id pointer
      *      NAME_NOT_FOUND  no effect with this uuid or type found.
      *
      * Returned value
@@ -233,7 +234,7 @@
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *                        or caller lacks required permissions.
      *      NO_INIT         effect library failed to initialize.
-     *      BAD_VALUE       invalid type uuid or implementation uuid.
+     *      BAD_VALUE       invalid type uuid or implementation uuid, or id pointer
      *      NAME_NOT_FOUND  no effect with this uuid or type found.
      *
      * Returned value
@@ -517,7 +518,7 @@
      * Returned status (from utils/Errors.h) can be:
      *  - NO_ERROR: successful operation.
      *  - INVALID_OPERATION: the application does not have control of the effect engine.
-     *  - BAD_VALUE: invalid parameter identifier or value.
+     *  - BAD_VALUE: invalid parameter structure pointer, or invalid identifier or value.
      *  - DEAD_OBJECT: the effect engine has been deleted.
      */
      virtual status_t   setParameter(effect_param_t *param);
@@ -562,7 +563,7 @@
      * Returned status (from utils/Errors.h) can be:
      *  - NO_ERROR: successful operation.
      *  - INVALID_OPERATION: the AudioEffect was not successfully initialized.
-     *  - BAD_VALUE: invalid parameter identifier.
+     *  - BAD_VALUE: invalid parameter structure pointer, or invalid parameter identifier.
      *  - DEAD_OBJECT: the effect engine has been deleted.
      */
      virtual status_t   getParameter(effect_param_t *param);
@@ -577,6 +578,25 @@
                               uint32_t *replySize,
                               void *replyData);
 
+    /* Retrieves the configuration of the effect.
+     *
+     * Parameters:
+     *      inputCfg:  pointer to audio_config_base_t structure receiving input
+     *          configuration of the effect
+     *      outputCfg: pointer to audio_config_base_t structure receiving output
+     *          configuration of the effect
+     *
+     * Channel masks of the returned configs are "input" or "output" depending
+     * on the direction of the stream that the effect is attached to.
+     *
+     * Returned status (from utils/Errors.h) can be:
+     *  - NO_ERROR: successful operation.
+     *  - INVALID_OPERATION: the AudioEffect was not successfully initialized.
+     *  - BAD_VALUE: null config pointers
+     *  - DEAD_OBJECT: the effect engine has been deleted.
+     */
+     virtual status_t   getConfigs(audio_config_base_t *inputCfg,
+                                   audio_config_base_t *outputCfg);
 
      /*
       * Utility functions.
@@ -628,7 +648,7 @@
     {
     public:
 
-        EffectClient(AudioEffect *effect) : mEffect(effect){}
+        explicit EffectClient(const sp<AudioEffect>& effect) : mEffect(effect){}
 
         // IEffectClient
         binder::Status controlStatusChanged(bool controlGranted) override {
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 08b3da1..ec35e93 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -34,11 +34,13 @@
 #define RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET (0x1 << 1)
 #define RULE_MATCH_UID                      (0x1 << 2)
 #define RULE_MATCH_USERID                   (0x1 << 3)
+#define RULE_MATCH_AUDIO_SESSION_ID         (0x1 << 4)
 #define RULE_EXCLUDE_ATTRIBUTE_USAGE  (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_USAGE)
 #define RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET \
                                       (RULE_EXCLUSION_MASK|RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET)
 #define RULE_EXCLUDE_UID              (RULE_EXCLUSION_MASK|RULE_MATCH_UID)
 #define RULE_EXCLUDE_USERID           (RULE_EXCLUSION_MASK|RULE_MATCH_USERID)
+#define RULE_EXCLUDE_AUDIO_SESSION_ID       (RULE_EXCLUSION_MASK|RULE_MATCH_AUDIO_SESSION_ID)
 
 #define MIX_TYPE_INVALID (-1)
 #define MIX_TYPE_PLAYERS 0
@@ -59,9 +61,12 @@
 #define MIX_ROUTE_FLAG_LOOP_BACK (0x1 << 1)
 /** Loop back some audio while it is rendered */
 #define MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER (MIX_ROUTE_FLAG_RENDER | MIX_ROUTE_FLAG_LOOP_BACK)
-#define MIX_ROUTE_FLAG_ALL (MIX_ROUTE_FLAG_RENDER | MIX_ROUTE_FLAG_LOOP_BACK)
+/** Control if audio routing disallows preferred device routing **/
+#define MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE (0x1 << 2)
+#define MIX_ROUTE_FLAG_ALL (MIX_ROUTE_FLAG_RENDER | MIX_ROUTE_FLAG_LOOP_BACK | \
+    MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE)
 
-#define MAX_MIXES_PER_POLICY 10
+#define MAX_MIXES_PER_POLICY 50
 #define MAX_CRITERIA_PER_MIX 20
 
 class AudioMixMatchCriterion {
@@ -72,11 +77,13 @@
     status_t readFromParcel(Parcel *parcel);
     status_t writeToParcel(Parcel *parcel) const;
 
+    bool isExcludeCriterion() const;
     union {
         audio_usage_t   mUsage;
         audio_source_t  mSource;
         uid_t           mUid;
         int        mUserId;
+        audio_session_t  mAudioSessionId;
     } mValue;
     uint32_t        mRule;
 };
@@ -88,31 +95,32 @@
     static const uint32_t kCbFlagNotifyActivity = 0x1;
 
     AudioMix() {}
-    AudioMix(Vector<AudioMixMatchCriterion> criteria, uint32_t mixType, audio_config_t format,
-             uint32_t routeFlags, String8 registrationId, uint32_t flags) :
+    AudioMix(const std::vector<AudioMixMatchCriterion> &criteria, uint32_t mixType,
+             audio_config_t format, uint32_t routeFlags,const String8 &registrationId,
+             uint32_t flags) :
         mCriteria(criteria), mMixType(mixType), mFormat(format),
         mRouteFlags(routeFlags), mDeviceAddress(registrationId), mCbFlags(flags){}
 
     status_t readFromParcel(Parcel *parcel);
     status_t writeToParcel(Parcel *parcel) const;
 
-    void setExcludeUid(uid_t uid) const;
-    void setMatchUid(uid_t uid) const;
+    void setExcludeUid(uid_t uid);
+    void setMatchUid(uid_t uid);
     /** returns true if this mix has a rule to match or exclude the given uid */
     bool hasUidRule(bool match, uid_t uid) const;
     /** returns true if this mix has a rule for uid match (any uid) */
     bool hasMatchUidRule() const;
 
-    void setExcludeUserId(int userId) const;
-    void setMatchUserId(int userId) const;
+    void setExcludeUserId(int userId);
+    void setMatchUserId(int userId);
     /** returns true if this mix has a rule to match or exclude the given userId */
     bool hasUserIdRule(bool match, int userId) const;
-    /** returns true if this mix has a rule for userId match (any userId) */
-    bool hasMatchUserIdRule() const;
-    /** returns true if this mix can be used for uid-device affinity routing */
+    /** returns true if this mix has a rule to match or exclude (any userId) */
+    bool hasUserIdRule(bool match) const;
+    /** returns true if this mix has a rule for userId exclude (any userId) */
     bool isDeviceAffinityCompatible() const;
 
-    mutable Vector<AudioMixMatchCriterion> mCriteria;
+    std::vector<AudioMixMatchCriterion> mCriteria;
     uint32_t        mMixType;
     audio_config_t  mFormat;
     uint32_t        mRouteFlags;
@@ -137,6 +145,16 @@
            == MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER;
 }
 
+static inline bool is_mix_loopback(uint32_t routeFlags) {
+    return (routeFlags & MIX_ROUTE_FLAG_LOOP_BACK)
+           == MIX_ROUTE_FLAG_LOOP_BACK;
+}
+
+static inline bool is_mix_disallows_preferred_device(uint32_t routeFlags) {
+    return (routeFlags & MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE)
+           == MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE;
+}
+
 }; // namespace android
 
 #endif  // ANDROID_AUDIO_POLICY_H
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index b55b506..fcbb019 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -20,7 +20,7 @@
 #include <android/media/AudioProductStrategy.h>
 #include <media/AidlConversionUtil.h>
 #include <media/AudioCommonTypes.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
 #include <binder/Parcelable.h>
@@ -31,34 +31,53 @@
 {
 public:
     AudioProductStrategy() {}
-    AudioProductStrategy(const std::string &name, const std::vector<AudioAttributes> &attributes,
+    AudioProductStrategy(const std::string &name,
+                         const std::vector<VolumeGroupAttributes> &attributes,
                          product_strategy_t id) :
-        mName(name), mAudioAttributes(attributes), mId(id) {}
+        mName(name), mVolumeGroupAttributes(attributes), mId(id) {}
 
     const std::string &getName() const { return mName; }
-    std::vector<AudioAttributes> getAudioAttributes() const { return mAudioAttributes; }
+    std::vector<VolumeGroupAttributes> getVolumeGroupAttributes() const {
+        return mVolumeGroupAttributes;
+    }
     product_strategy_t getId() const { return mId; }
 
     status_t readFromParcel(const Parcel *parcel) override;
     status_t writeToParcel(Parcel *parcel) const override;
 
     /**
-     * @brief attributesMatches: checks if client attributes matches with a reference attributes
-     * "matching" means the usage shall match if reference attributes has a defined usage, AND
-     * content type shall match if reference attributes has a defined content type AND
+     * @brief attributesMatchesScore: checks if client attributes matches with a reference
+     * attributes "matching" means the usage shall match if reference attributes has a defined
+     * usage, AND content type shall match if reference attributes has a defined content type AND
      * flags shall match if reference attributes has defined flags AND
      * tags shall match if reference attributes has defined tags.
-     * Reference attributes "default" shall not be considered as a "true" case. This convention
+     * Reference attributes "default" shall be considered as a weak match case. This convention
      * is used to identify the default strategy.
      * @param refAttributes to be considered
      * @param clientAttritubes to be considered
-     * @return true if matching, false otherwise
+     * @return {@code INVALID_SCORE} if not matching, {@code MATCH_ON_DEFAULT_SCORE} if matching
+     * to default strategy, non zero positive score if matching a strategy.
      */
+    static int attributesMatchesScore(const audio_attributes_t refAttributes,
+                                      const audio_attributes_t clientAttritubes);
+
     static bool attributesMatches(const audio_attributes_t refAttributes,
-                                  const audio_attributes_t clientAttritubes);
+                                      const audio_attributes_t clientAttritubes) {
+        return attributesMatchesScore(refAttributes, clientAttritubes) > 0;
+    }
+
+    static const int MATCH_ON_TAGS_SCORE = 1 << 3;
+    static const int MATCH_ON_FLAGS_SCORE = 1 << 2;
+    static const int MATCH_ON_USAGE_SCORE = 1 << 1;
+    static const int MATCH_ON_CONTENT_TYPE_SCORE = 1 << 0;
+    static const int MATCH_ON_DEFAULT_SCORE = 0;
+    static const int MATCH_EQUALS = MATCH_ON_TAGS_SCORE | MATCH_ON_FLAGS_SCORE
+            | MATCH_ON_USAGE_SCORE | MATCH_ON_CONTENT_TYPE_SCORE;
+    static const int NO_MATCH = -1;
+
 private:
     std::string mName;
-    std::vector<AudioAttributes> mAudioAttributes;
+    std::vector<VolumeGroupAttributes> mVolumeGroupAttributes;
     product_strategy_t mId;
 };
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index cb05dd9..00f2c7a 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -26,7 +26,6 @@
 #include <media/AudioTimestamp.h>
 #include <media/MediaMetricsItem.h>
 #include <media/Modulo.h>
-#include <media/MicrophoneInfo.h>
 #include <media/RecordingActivityTracker.h>
 #include <utils/RefBase.h>
 #include <utils/threads.h>
@@ -46,27 +45,6 @@
 {
 public:
 
-    /* Events used by AudioRecord callback function (legacy_callback_t).
-     * Keep in sync with frameworks/base/media/java/android/media/AudioRecord.java NATIVE_EVENT_*.
-     */
-    enum event_type {
-        EVENT_MORE_DATA = 0,        // Request to read available data from buffer.
-                                    // If this event is delivered but the callback handler
-                                    // does not want to read the available data, the handler must
-                                    // explicitly ignore the event by setting frameCount to zero.
-        EVENT_OVERRUN = 1,          // Buffer overrun occurred.
-        EVENT_MARKER = 2,           // Record head is at the specified marker position
-                                    // (See setMarkerPosition()).
-        EVENT_NEW_POS = 3,          // Record head is at a new position
-                                    // (See setPositionUpdatePeriod()).
-        EVENT_NEW_IAUDIORECORD = 4, // IAudioRecord was re-created, either due to re-routing and
-                                    // voluntary invalidation by mediaserver, or mediaserver crash.
-    };
-
-    /* Client should declare a Buffer and pass address to obtainBuffer()
-     * and releaseBuffer().  See also legacy_callback_t for EVENT_MORE_DATA.
-     */
-
     class Buffer
     {
       friend AudioRecord;
@@ -122,7 +100,6 @@
      *          - EVENT_NEW_IAUDIORECORD: unused.
      */
 
-    typedef void (*legacy_callback_t)(int event, void* user, void *info);
 
     class IAudioRecordCallback : public virtual RefBase {
         friend AudioRecord;
@@ -226,24 +203,6 @@
                                     float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT);
 
 
-                        AudioRecord(audio_source_t inputSource,
-                                    uint32_t sampleRate,
-                                    audio_format_t format,
-                                    audio_channel_mask_t channelMask,
-                                    const android::content::AttributionSourceState& client,
-                                    size_t frameCount,
-                                    legacy_callback_t callback,
-                                    void* user,
-                                    uint32_t notificationFrames = 0,
-                                    audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
-                                    transfer_type transferType = TRANSFER_DEFAULT,
-                                    audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-                                    const audio_attributes_t* pAttributes = nullptr,
-                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
-                                    audio_microphone_direction_t
-                                        selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
-                                    float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT);
-
     /* Terminates the AudioRecord and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioRecord.
      */
@@ -286,27 +245,6 @@
                             float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT,
                             int32_t maxSharedAudioHistoryMs = 0);
 
-           status_t    set(audio_source_t inputSource,
-                            uint32_t sampleRate,
-                            audio_format_t format,
-                            audio_channel_mask_t channelMask,
-                            size_t frameCount,
-                            legacy_callback_t callback,
-                            void* user,
-                            uint32_t notificationFrames = 0,
-                            bool threadCanCallJava = false,
-                            audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
-                            transfer_type transferType = TRANSFER_DEFAULT,
-                            audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-                            uid_t uid = AUDIO_UID_INVALID,
-                            pid_t pid = -1,
-                            const audio_attributes_t* pAttributes = nullptr,
-                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
-                            audio_microphone_direction_t
-                                selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
-                            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT,
-                            int32_t maxSharedAudioHistoryMs = 0);
-
     /* Result of constructing the AudioRecord. This must be checked for successful initialization
      * before using any AudioRecord API (except for set()), because using
      * an uninitialized AudioRecord produces undefined results.
@@ -387,6 +325,15 @@
      */
             uint32_t    getSampleRate() const   { return mSampleRate; }
 
+    /* Return the sample rate from the AudioFlinger input thread. */
+            uint32_t    getHalSampleRate() const;
+
+    /* Return the channel count from the AudioFlinger input thread. */
+            uint32_t    getHalChannelCount() const;
+
+    /* Return the HAL format from the AudioFlinger input thread. */
+            audio_format_t    getHalFormat() const;
+
     /* Sets marker position. When record reaches the number of frames specified,
      * a callback with event type EVENT_MARKER is called. Calling setMarkerPosition
      * with marker == 0 cancels marker notification callback.
@@ -636,10 +583,11 @@
     /* Get the flags */
             audio_input_flags_t getFlags() const { AutoMutex _l(mLock); return mFlags; }
 
-    /* Get active microphones. A empty vector of MicrophoneInfo will be passed as a parameter,
+    /* Get active microphones. A empty vector of MicrophoneInfoFw will be passed as a parameter,
      * the data will be filled when querying the hal.
      */
-            status_t    getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
+            status_t    getActiveMicrophones(
+                    std::vector<media::MicrophoneInfoFw>* activeMicrophones);
 
     /* Set the Microphone direction (for processing purposes).
      */
@@ -831,6 +779,9 @@
     size_t                  mServerSampleSize;
     std::unique_ptr<uint8_t[]> mFormatConversionBufRaw;
     Buffer                  mFormatConversionBuffer;
+    uint32_t                mHalSampleRate;          // AudioFlinger thread sample rate
+    uint32_t                mHalChannelCount;        // AudioFlinger thread channel count
+    audio_format_t          mHalFormat;              // AudioFlinger thread format
 
 private:
     class DeathNotifier : public IBinder::DeathRecipient {
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 360b83d..8f8c9dd 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -23,11 +23,19 @@
 #include <vector>
 
 #include <android/content/AttributionSourceState.h>
+#include <android/media/AudioPolicyConfig.h>
+#include <android/media/AudioPortFw.h>
 #include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnAudioFlingerClient.h>
 #include <android/media/BnAudioPolicyServiceClient.h>
+#include <android/media/EffectDescriptor.h>
 #include <android/media/INativeSpatializerCallback.h>
+#include <android/media/ISoundDose.h>
+#include <android/media/ISoundDoseCallback.h>
 #include <android/media/ISpatializer.h>
+#include <android/media/MicrophoneInfoFw.h>
+#include <android/media/RecordClientInfo.h>
+#include <android/media/audio/common/AudioConfigBase.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <android/media/audio/common/AudioPort.h>
@@ -38,7 +46,6 @@
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
 #include <media/AudioIoDescriptor.h>
-#include <media/MicrophoneInfo.h>
 #include <system/audio.h>
 #include <system/audio_effect.h>
 #include <system/audio_policy.h>
@@ -121,6 +128,9 @@
     // set audio mode in audio hardware
     static status_t setMode(audio_mode_t mode);
 
+    // test API: switch HALs into the mode which simulates external device connections
+    static status_t setSimulateDeviceConnections(bool enabled);
+
     // returns true in *state if tracks are active on the specified stream or have been active
     // in the past inPastMs milliseconds
     static status_t isStreamActive(audio_stream_type_t stream, bool *state, uint32_t inPastMs);
@@ -162,6 +172,10 @@
     // HotwordDetectionService.
     static void setAudioFlingerBinder(const sp<IBinder>& audioFlinger);
 
+    // Sets a local AudioFlinger interface to be used by AudioSystem.
+    // This is used by audioserver main() to avoid binder AIDL translation.
+    static status_t setLocalAudioFlinger(const sp<IAudioFlinger>& af);
+
     // helper function to obtain AudioFlinger service handle
     static const sp<IAudioFlinger> get_audio_flinger();
 
@@ -279,29 +293,67 @@
     static status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config);
     static audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage);
 
+    /**
+     * Get output stream for given parameters.
+     *
+     * @param[in] attr the requested audio attributes
+     * @param[in|out] output the io handle of the output for the playback. It is specified when
+     *                       starting mmap thread.
+     * @param[in] session the session id for the client
+     * @param[in|out] stream the stream type used for the playback
+     * @param[in] attributionSource a source to which access to permission protected data
+     * @param[in|out] config the requested configuration client, the suggested configuration will
+     *                       be returned if no proper output is found for requested configuration
+     * @param[in] flags the requested output flag from client
+     * @param[in|out] selectedDeviceId the requested device id for playback, the actual device id
+     *                                 for playback will be returned
+     * @param[out] portId the generated port id to identify the client
+     * @param[out] secondaryOutputs collection of io handle for secondary outputs
+     * @param[out] isSpatialized true if the playback will be spatialized
+     * @param[out] isBitPerfect true if the playback will be bit-perfect
+     * @return if the call is successful or not
+     */
     static status_t getOutputForAttr(audio_attributes_t *attr,
                                      audio_io_handle_t *output,
                                      audio_session_t session,
                                      audio_stream_type_t *stream,
                                      const AttributionSourceState& attributionSource,
-                                     const audio_config_t *config,
+                                     audio_config_t *config,
                                      audio_output_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
                                      audio_port_handle_t *portId,
                                      std::vector<audio_io_handle_t> *secondaryOutputs,
-                                     bool *isSpatialized);
+                                     bool *isSpatialized,
+                                     bool *isBitPerfect);
     static status_t startOutput(audio_port_handle_t portId);
     static status_t stopOutput(audio_port_handle_t portId);
     static void releaseOutput(audio_port_handle_t portId);
 
-    // Client must successfully hand off the handle reference to AudioFlinger via createRecord(),
-    // or release it with releaseInput().
+    /**
+     * Get input stream for given parameters.
+     * Client must successfully hand off the handle reference to AudioFlinger via createRecord(),
+     * or release it with releaseInput().
+     *
+     * @param[in] attr the requested audio attributes
+     * @param[in|out] input the io handle of the input for the capture. It is specified when
+     *                      starting mmap thread.
+     * @param[in] riid an unique id to identify the record client
+     * @param[in] session the session id for the client
+     * @param[in] attributionSource a source to which access to permission protected data
+     * @param[in|out] config the requested configuration client, the suggested configuration will
+     *                       be returned if no proper input is found for requested configuration
+     * @param[in] flags the requested input flag from client
+     * @param[in|out] selectedDeviceId the requested device id for playback, the actual device id
+     *                                 for playback will be returned
+     * @param[out] portId the generated port id to identify the client
+     * @return if the call is successful or not
+     */
     static status_t getInputForAttr(const audio_attributes_t *attr,
                                     audio_io_handle_t *input,
                                     audio_unique_id_t riid,
                                     audio_session_t session,
-                                     const AttributionSourceState& attributionSource,
-                                    const audio_config_base_t *config,
+                                    const AttributionSourceState& attributionSource,
+                                    audio_config_base_t *config,
                                     audio_input_flags_t flags,
                                     audio_port_handle_t *selectedDeviceId,
                                     audio_port_handle_t *portId);
@@ -331,7 +383,7 @@
     static status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
 
     static product_strategy_t getStrategyForStream(audio_stream_type_t stream);
-    static status_t getDevicesForAttributes(const AudioAttributes &aa,
+    static status_t getDevicesForAttributes(const audio_attributes_t &aa,
                                             AudioDeviceTypeAddrVector *devices,
                                             bool forVolume);
 
@@ -378,6 +430,9 @@
                                    struct audio_port_v7 *ports,
                                    unsigned int *generation);
 
+    static status_t listDeclaredDevicePorts(media::AudioPortRole role,
+                                            std::vector<media::AudioPortFw>* result);
+
     /* Get attributes for a given audio port. On input, the port
      * only needs the 'id' field to be filled in. */
     static status_t getAudioPort(struct audio_port_v7 *port);
@@ -428,7 +483,7 @@
     static float    getStreamVolumeDB(
             audio_stream_type_t stream, int index, audio_devices_t device);
 
-    static status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+    static status_t getMicrophones(std::vector<media::MicrophoneInfoFw> *microphones);
 
     static status_t getHwOffloadFormatsSupportedForBluetoothMedia(
                                     audio_devices_t device, std::vector<audio_format_t> *formats);
@@ -455,7 +510,7 @@
 
     static status_t listAudioProductStrategies(AudioProductStrategyVector &strategies);
     static status_t getProductStrategyFromAudioAttributes(
-            const AudioAttributes &aa, product_strategy_t &productStrategy,
+            const audio_attributes_t &aa, product_strategy_t &productStrategy,
             bool fallbackOnDefault = true);
 
     static audio_attributes_t streamTypeToAttributes(audio_stream_type_t stream);
@@ -464,7 +519,8 @@
     static status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups);
 
     static status_t getVolumeGroupFromAudioAttributes(
-            const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault = true);
+            const audio_attributes_t &aa, volume_group_t &volumeGroup,
+            bool fallbackOnDefault = true);
 
     static status_t setRttEnabled(bool enabled);
 
@@ -479,7 +535,11 @@
     static status_t setDevicesRoleForStrategy(product_strategy_t strategy,
             device_role_t role, const AudioDeviceTypeAddrVector &devices);
 
-    static status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role);
+    static status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role, const AudioDeviceTypeAddrVector &devices);
+
+    static status_t clearDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role);
 
     static status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
             device_role_t role, AudioDeviceTypeAddrVector &devices);
@@ -546,6 +606,16 @@
                                      bool *canBeSpatialized);
 
     /**
+     * Registers the sound dose callback with the audio server and returns the ISoundDose
+     * interface.
+     *
+     * \param callback to send messages to the audio server
+     * \param soundDose binder to send messages to the AudioService
+     **/
+    static status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                          sp<media::ISoundDose>* soundDose);
+
+    /**
      * Query how the direct playback is currently supported on the device.
      * @param attr audio attributes describing the playback use case
      * @param config audio configuration for the playback
@@ -569,6 +639,33 @@
     static status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                             std::vector<audio_profile>* audioProfiles);
 
+    static status_t setRequestedLatencyMode(
+            audio_io_handle_t output, audio_latency_mode_t mode);
+
+    static status_t getSupportedLatencyModes(audio_io_handle_t output,
+            std::vector<audio_latency_mode_t>* modes);
+
+    static status_t setBluetoothVariableLatencyEnabled(bool enabled);
+
+    static status_t isBluetoothVariableLatencyEnabled(bool *enabled);
+
+    static status_t supportsBluetoothVariableLatency(bool *support);
+
+    static status_t getSupportedMixerAttributes(audio_port_handle_t portId,
+                                                std::vector<audio_mixer_attributes_t> *mixerAttrs);
+    static status_t setPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                audio_port_handle_t portId,
+                                                uid_t uid,
+                                                const audio_mixer_attributes_t *mixerAttr);
+    static status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                audio_port_handle_t portId,
+                                                std::optional<audio_mixer_attributes_t>* mixerAttr);
+    static status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                  audio_port_handle_t portId,
+                                                  uid_t uid);
+
+    static status_t getAudioPolicyConfig(media::AudioPolicyConfig *config);
+
     // A listener for capture state changes.
     class CaptureStateListener : public virtual RefBase {
     public:
@@ -644,6 +741,22 @@
                                               audio_io_handle_t audioIo,
                                               audio_port_handle_t portId);
 
+    class SupportedLatencyModesCallback : public virtual RefBase
+    {
+    public:
+
+                SupportedLatencyModesCallback() = default;
+        virtual ~SupportedLatencyModesCallback() = default;
+
+        virtual void onSupportedLatencyModesChanged(
+                audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) = 0;
+    };
+
+    static status_t addSupportedLatencyModesCallback(
+            const sp<SupportedLatencyModesCallback>& callback);
+    static status_t removeSupportedLatencyModesCallback(
+            const sp<SupportedLatencyModesCallback>& callback);
+
     static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
 
     static status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
@@ -682,6 +795,10 @@
                 media::AudioIoConfigEvent event,
                 const media::AudioIoDescriptor& ioDesc) override;
 
+        binder::Status onSupportedLatencyModesChanged(
+                int output,
+                const std::vector<media::audio::common::AudioLatencyMode>& latencyModes) override;
+
         status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
                                                audio_io_handle_t audioIo,
                                                audio_port_handle_t portId);
@@ -689,6 +806,11 @@
                                            audio_io_handle_t audioIo,
                                            audio_port_handle_t portId);
 
+        status_t addSupportedLatencyModesCallback(
+                        const sp<SupportedLatencyModesCallback>& callback);
+        status_t removeSupportedLatencyModesCallback(
+                        const sp<SupportedLatencyModesCallback>& callback);
+
         audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
 
     private:
@@ -697,6 +819,10 @@
 
         std::map<audio_io_handle_t, std::map<audio_port_handle_t, wp<AudioDeviceCallback>>>
                 mAudioDeviceCallbacks;
+
+        std::vector<wp<SupportedLatencyModesCallback>>
+                mSupportedLatencyModesCallbacks GUARDED_BY(mLock);
+
         // cached values for recording getInputBufferSize() queries
         size_t                              mInBuffSize;    // zero indicates cache is invalid
         uint32_t                            mInSamplingRate;
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 9f540e6..8f712db 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_AUDIOTRACK_H
 #define ANDROID_AUDIOTRACK_H
 
+#include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
 #include <cutils/sched_policy.h>
 #include <media/AudioSystem.h>
@@ -148,7 +149,6 @@
      *          - EVENT_NEW_TIMESTAMP: pointer to const AudioTimestamp.
      */
 
-    typedef void (*legacy_callback_t)(int event, void* user, void* info);
     class IAudioTrackCallback : public virtual RefBase {
       friend AudioTrack;
       protected:
@@ -343,26 +343,6 @@
                                     float maxRequiredSpeed = 1.0f,
                                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
 
-
-                        AudioTrack( audio_stream_type_t streamType,
-                                    uint32_t sampleRate,
-                                    audio_format_t format,
-                                    audio_channel_mask_t channelMask,
-                                    size_t frameCount,
-                                    audio_output_flags_t flags,
-                                    legacy_callback_t cbf,
-                                    void* user = nullptr,
-                                    int32_t notificationFrames = 0,
-                                    audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
-                                    transfer_type transferType = TRANSFER_DEFAULT,
-                                    const audio_offload_info_t *offloadInfo = nullptr,
-                                    const AttributionSourceState& attributionSource =
-                                        AttributionSourceState(),
-                                    const audio_attributes_t* pAttributes = nullptr,
-                                    bool doNotReconnect = false,
-                                    float maxRequiredSpeed = 1.0f,
-                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
-
     /* Creates an audio track and registers it with AudioFlinger.
      * With this constructor, the track is configured for static buffer mode.
      * Data to be rendered is passed in a shared memory buffer
@@ -391,25 +371,6 @@
                                     bool doNotReconnect = false,
                                     float maxRequiredSpeed = 1.0f);
 
-
-                        AudioTrack( audio_stream_type_t streamType,
-                                    uint32_t sampleRate,
-                                    audio_format_t format,
-                                    audio_channel_mask_t channelMask,
-                                    const sp<IMemory>& sharedBuffer,
-                                    audio_output_flags_t flags,
-                                    legacy_callback_t cbf,
-                                    void* user          = nullptr,
-                                    int32_t notificationFrames = 0,
-                                    audio_session_t sessionId   = AUDIO_SESSION_ALLOCATE,
-                                    transfer_type transferType = TRANSFER_DEFAULT,
-                                    const audio_offload_info_t *offloadInfo = nullptr,
-                                    const AttributionSourceState& attributionSource =
-                                        AttributionSourceState(),
-                                    const audio_attributes_t* pAttributes = nullptr,
-                                    bool doNotReconnect = false,
-                                    float maxRequiredSpeed = 1.0f);
-
     /* Terminates the AudioTrack and unregisters it from AudioFlinger.
      * Also destroys all resources associated with the AudioTrack.
      */
@@ -490,28 +451,8 @@
                         }
             void       onFirstRef() override;
         public:
-            status_t    set(audio_stream_type_t streamType,
-                            uint32_t sampleRate,
-                            audio_format_t format,
-                            audio_channel_mask_t channelMask,
-                            size_t frameCount,
-                            audio_output_flags_t flags,
-                            legacy_callback_t callback,
-                            void * user = nullptr,
-                            int32_t notificationFrames = 0,
-                            const sp<IMemory>& sharedBuffer = 0,
-                            bool threadCanCallJava = false,
-                            audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
-                            transfer_type transferType = TRANSFER_DEFAULT,
-                            const audio_offload_info_t *offloadInfo = nullptr,
-                            const AttributionSourceState& attributionSource =
-                                AttributionSourceState(),
-                            const audio_attributes_t* pAttributes = nullptr,
-                            bool doNotReconnect = false,
-                            float maxRequiredSpeed = 1.0f,
-                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
-
-    // FIXME(b/169889714): Vendor code depends on the old method signature at link time
+            typedef void (*legacy_callback_t)(int event, void* user, void* info);
+            // FIXME(b/169889714): Vendor code depends on the old method signature at link time
             status_t    set(audio_stream_type_t streamType,
                             uint32_t sampleRate,
                             audio_format_t format,
@@ -714,6 +655,15 @@
      */
             uint32_t    getOriginalSampleRate() const;
 
+    /* Return the sample rate from the AudioFlinger output thread. */
+            uint32_t    getHalSampleRate() const;
+
+    /* Return the channel count from the AudioFlinger output thread. */
+            uint32_t    getHalChannelCount() const;
+
+    /* Return the HAL format from the AudioFlinger output thread. */
+            audio_format_t    getHalFormat() const;
+
     /* Sets the Dual Mono mode presentation on the output device. */
             status_t    setDualMonoMode(audio_dual_mono_mode_t mode);
 
@@ -1176,6 +1126,9 @@
 
             bool isPlaying() {
                 AutoMutex lock(mLock);
+                return isPlaying_l();
+            }
+            bool isPlaying_l() {
                 return mState == STATE_ACTIVE || mState == STATE_STOPPING;
             }
 
@@ -1205,6 +1158,8 @@
             void setAudioTrackCallback(const sp<media::IAudioTrackCallback>& callback) {
                 mAudioTrackCallback->setAudioTrackCallback(callback);
             }
+ private:
+            void triggerPortIdUpdate_l();
 
  protected:
     /* copying audio tracks is not allowed */
@@ -1310,6 +1265,11 @@
     audio_track_cblk_t*     mCblk;                  // re-load after mLock.unlock()
     audio_io_handle_t       mOutput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getOutputForAttr()
 
+    // A copy of shared memory and proxy between obtainBuffer and releaseBuffer to keep the
+    // shared memory valid when processing data.
+    sp<IMemory>               mCblkMemoryObtainBufferRef GUARDED_BY(mLock);
+    sp<AudioTrackClientProxy> mProxyObtainBufferRef GUARDED_BY(mLock);
+
     sp<AudioTrackThread>    mAudioTrackThread;
     bool                    mThreadCanCallJava;
 
@@ -1327,12 +1287,14 @@
     size_t                  mReqFrameCount;         // frame count to request the first or next time
                                                     // a new IAudioTrack is needed, non-decreasing
 
-    // The following AudioFlinger server-side values are cached in createAudioTrack_l().
+    // The following AudioFlinger server-side values are cached in createTrack_l().
     // These values can be used for informational purposes until the track is invalidated,
     // whereupon restoreTrack_l() calls createTrack_l() to update the values.
     uint32_t                mAfLatency;             // AudioFlinger latency in ms
     size_t                  mAfFrameCount;          // AudioFlinger frame count
     uint32_t                mAfSampleRate;          // AudioFlinger sample rate
+    uint32_t                mAfChannelCount;        // AudioFlinger channel count
+    audio_format_t          mAfFormat;              // AudioFlinger format
 
     // constant after constructor or set()
     audio_format_t          mFormat;                // as requested by client, not forced to 16-bit
@@ -1471,7 +1433,7 @@
 
     audio_session_t         mSessionId;
     int                     mAuxEffectId;
-    audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
+    audio_port_handle_t     mPortId = AUDIO_PORT_HANDLE_NONE; // Id from Audio Policy Manager
 
     /**
      * mPlayerIId is the player id of the AudioTrack used by AudioManager.
@@ -1479,6 +1441,9 @@
      */
     int                     mPlayerIId = -1;  // AudioManager.h PLAYER_PIID_INVALID
 
+    /** Interface for interacting with the AudioService. */
+    sp<IAudioManager>       mAudioManager;
+
     /**
      * mLogSessionId is a string identifying this AudioTrack for the metrics service.
      * It may be unique or shared with other objects.  An empty string means the
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 3c3715d..2e2ef65 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -32,7 +32,6 @@
 #include <system/audio_effect.h>
 #include <system/audio_policy.h>
 #include <utils/String8.h>
-#include <media/MicrophoneInfo.h>
 #include <map>
 #include <string>
 #include <vector>
@@ -55,6 +54,8 @@
 #include "android/media/IAudioTrackCallback.h"
 #include "android/media/IEffect.h"
 #include "android/media/IEffectClient.h"
+#include "android/media/ISoundDose.h"
+#include "android/media/ISoundDoseCallback.h"
 #include "android/media/OpenInputRequest.h"
 #include "android/media/OpenInputResponse.h"
 #include "android/media/OpenOutputRequest.h"
@@ -116,6 +117,8 @@
         size_t   afFrameCount;
         uint32_t afSampleRate;
         uint32_t afLatencyMs;
+        audio_channel_mask_t afChannelMask;
+        audio_format_t afFormat;
         audio_io_handle_t outputId;
         audio_port_handle_t portId;
         sp<media::IAudioTrack> audioTrack;
@@ -169,6 +172,7 @@
         audio_port_handle_t portId;
         sp<media::IAudioRecord> audioRecord;
         audio_config_base_t serverConfig;
+        audio_config_base_t halConfig;
 
         ConversionResult<media::CreateRecordResponse> toAidl() const;
         static ConversionResult<CreateRecordOutput>
@@ -263,8 +267,6 @@
 
     virtual status_t closeInput(audio_io_handle_t input) = 0;
 
-    virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
-
     virtual status_t setVoiceVolume(float volume) = 0;
 
     virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
@@ -339,7 +341,7 @@
     virtual size_t frameCountHAL(audio_io_handle_t ioHandle) const = 0;
 
     /* List available microphones and their characteristics */
-    virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
+    virtual status_t getMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) = 0;
 
     virtual status_t setAudioHalPids(const std::vector<pid_t>& pids) = 0;
 
@@ -359,7 +361,29 @@
 
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
 
-    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                             media::DeviceConnectedState state) = 0;
+
+    virtual status_t setSimulateDeviceConnections(bool enabled) = 0;
+
+    virtual status_t setRequestedLatencyMode(
+            audio_io_handle_t output, audio_latency_mode_t mode) = 0;
+
+    virtual status_t getSupportedLatencyModes(audio_io_handle_t output,
+            std::vector<audio_latency_mode_t>* modes) = 0;
+
+    virtual status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                           sp<media::ISoundDose>* soundDose) = 0;
+
+    virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
+
+    virtual status_t setBluetoothVariableLatencyEnabled(bool enabled) = 0;
+
+    virtual status_t isBluetoothVariableLatencyEnabled(bool* enabled) = 0;
+
+    virtual status_t supportsBluetoothVariableLatency(bool* support) = 0;
+
+    virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) = 0;
 };
 
 /**
@@ -414,7 +438,6 @@
     status_t openInput(const media::OpenInputRequest& request,
                        media::OpenInputResponse* response) override;
     status_t closeInput(audio_io_handle_t input) override;
-    status_t invalidateStream(audio_stream_type_t stream) override;
     status_t setVoiceVolume(float volume) override;
     status_t getRenderPosition(uint32_t* halFrames, uint32_t* dspFrames,
                                audio_io_handle_t output) const override;
@@ -451,7 +474,7 @@
     status_t audioPolicyReady() override;
 
     size_t frameCountHAL(audio_io_handle_t ioHandle) const override;
-    status_t getMicrophones(std::vector<media::MicrophoneInfo>* microphones) override;
+    status_t getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) override;
     status_t setAudioHalPids(const std::vector<pid_t>& pids) override;
     status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
     status_t updateSecondaryOutputs(
@@ -461,7 +484,20 @@
             std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
     int32_t getAAudioMixerBurstCount() override;
     int32_t getAAudioHardwareBurstMinUsec() override;
-    status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) override;
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                     media::DeviceConnectedState state) override;
+    status_t setSimulateDeviceConnections(bool enabled) override;
+    status_t setRequestedLatencyMode(audio_io_handle_t output,
+            audio_latency_mode_t mode) override;
+    status_t getSupportedLatencyModes(
+            audio_io_handle_t output, std::vector<audio_latency_mode_t>* modes) override;
+    status_t setBluetoothVariableLatencyEnabled(bool enabled) override;
+    status_t isBluetoothVariableLatencyEnabled(bool* enabled) override;
+    status_t supportsBluetoothVariableLatency(bool* support) override;
+    status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                   sp<media::ISoundDose>* soundDose) override;
+    status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
+    status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -515,7 +551,6 @@
             RESTORE_OUTPUT = media::BnAudioFlingerService::TRANSACTION_restoreOutput,
             OPEN_INPUT = media::BnAudioFlingerService::TRANSACTION_openInput,
             CLOSE_INPUT = media::BnAudioFlingerService::TRANSACTION_closeInput,
-            INVALIDATE_STREAM = media::BnAudioFlingerService::TRANSACTION_invalidateStream,
             SET_VOICE_VOLUME = media::BnAudioFlingerService::TRANSACTION_setVoiceVolume,
             GET_RENDER_POSITION = media::BnAudioFlingerService::TRANSACTION_getRenderPosition,
             GET_INPUT_FRAMES_LOST = media::BnAudioFlingerService::TRANSACTION_getInputFramesLost,
@@ -551,6 +586,19 @@
             GET_AAUDIO_MIXER_BURST_COUNT = media::BnAudioFlingerService::TRANSACTION_getAAudioMixerBurstCount,
             GET_AAUDIO_HARDWARE_BURST_MIN_USEC = media::BnAudioFlingerService::TRANSACTION_getAAudioHardwareBurstMinUsec,
             SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
+            SET_SIMULATE_DEVICE_CONNECTIONS = media::BnAudioFlingerService::TRANSACTION_setSimulateDeviceConnections,
+            SET_REQUESTED_LATENCY_MODE = media::BnAudioFlingerService::TRANSACTION_setRequestedLatencyMode,
+            GET_SUPPORTED_LATENCY_MODES = media::BnAudioFlingerService::TRANSACTION_getSupportedLatencyModes,
+            SET_BLUETOOTH_VARIABLE_LATENCY_ENABLED =
+                    media::BnAudioFlingerService::TRANSACTION_setBluetoothVariableLatencyEnabled,
+            IS_BLUETOOTH_VARIABLE_LATENCY_ENABLED =
+                    media::BnAudioFlingerService::TRANSACTION_isBluetoothVariableLatencyEnabled,
+            SUPPORTS_BLUETOOTH_VARIABLE_LATENCY =
+                    media::BnAudioFlingerService::TRANSACTION_supportsBluetoothVariableLatency,
+            GET_SOUND_DOSE_INTERFACE = media::BnAudioFlingerService::TRANSACTION_getSoundDoseInterface,
+            INVALIDATE_TRACKS = media::BnAudioFlingerService::TRANSACTION_invalidateTracks,
+            GET_AUDIO_POLICY_CONFIG =
+                    media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
         };
 
     protected:
@@ -630,7 +678,6 @@
     Status openInput(const media::OpenInputRequest& request,
                      media::OpenInputResponse* _aidl_return) override;
     Status closeInput(int32_t input) override;
-    Status invalidateStream(media::audio::common::AudioStreamType stream) override;
     Status setVoiceVolume(float volume) override;
     Status getRenderPosition(int32_t output, media::RenderPosition* _aidl_return) override;
     Status getInputFramesLost(int32_t ioHandle, int32_t* _aidl_return) override;
@@ -651,17 +698,17 @@
     Status getPrimaryOutputSamplingRate(int32_t* _aidl_return) override;
     Status getPrimaryOutputFrameCount(int64_t* _aidl_return) override;
     Status setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override;
-    Status getAudioPort(const media::AudioPort& port, media::AudioPort* _aidl_return) override;
-    Status createAudioPatch(const media::AudioPatch& patch, int32_t* _aidl_return) override;
+    Status getAudioPort(const media::AudioPortFw& port, media::AudioPortFw* _aidl_return) override;
+    Status createAudioPatch(const media::AudioPatchFw& patch, int32_t* _aidl_return) override;
     Status releaseAudioPatch(int32_t handle) override;
     Status listAudioPatches(int32_t maxCount,
-                            std::vector<media::AudioPatch>* _aidl_return) override;
-    Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+                            std::vector<media::AudioPatchFw>* _aidl_return) override;
+    Status setAudioPortConfig(const media::AudioPortConfigFw& config) override;
     Status getAudioHwSyncForSession(int32_t sessionId, int32_t* _aidl_return) override;
     Status systemReady() override;
     Status audioPolicyReady() override;
     Status frameCountHAL(int32_t ioHandle, int64_t* _aidl_return) override;
-    Status getMicrophones(std::vector<media::MicrophoneInfoData>* _aidl_return) override;
+    Status getMicrophones(std::vector<media::MicrophoneInfoFw>* _aidl_return) override;
     Status setAudioHalPids(const std::vector<int32_t>& pids) override;
     Status setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
     Status updateSecondaryOutputs(
@@ -671,8 +718,20 @@
             std::vector<media::audio::common::AudioMMapPolicyInfo> *_aidl_return) override;
     Status getAAudioMixerBurstCount(int32_t* _aidl_return) override;
     Status getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
-    Status setDeviceConnectedState(const media::AudioPort& port, bool connected) override;
-
+    Status setDeviceConnectedState(const media::AudioPortFw& port,
+                                   media::DeviceConnectedState state) override;
+    Status setSimulateDeviceConnections(bool enabled) override;
+    Status setRequestedLatencyMode(
+            int output, media::audio::common::AudioLatencyMode mode) override;
+    Status getSupportedLatencyModes(int output,
+            std::vector<media::audio::common::AudioLatencyMode>* _aidl_return) override;
+    Status setBluetoothVariableLatencyEnabled(bool enabled) override;
+    Status isBluetoothVariableLatencyEnabled(bool* enabled) override;
+    Status supportsBluetoothVariableLatency(bool* support) override;
+    Status getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                 sp<media::ISoundDose>* _aidl_return) override;
+    Status invalidateTracks(const std::vector<int32_t>& portIds) override;
+    Status getAudioPolicyConfig(media::AudioPolicyConfig* _aidl_return) override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index 23b6bfd..5475f76 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -53,6 +53,10 @@
 
             void baseUpdateDeviceId(audio_port_handle_t deviceId);
 
+            /**
+             * Updates the mapping in the AudioService between portId and piid
+             */
+            void triggerPortIdUpdate(audio_port_handle_t portId) const;
 protected:
 
             void init(player_type_t playerType, audio_usage_t usage, audio_session_t sessionId);
@@ -74,7 +78,6 @@
     // player interface ID, uniquely identifies the player in the system
     // effectively const after PlayerBase::init().
     audio_unique_id_t mPIId;
-
 private:
             // report events to AudioService
             void servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId);
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
index 54e778e..ed9ddd6 100644
--- a/media/libaudioclient/include/media/PolicyAidlConversion.h
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -22,6 +22,8 @@
 #include <system/audio.h>
 
 #include <android/media/AudioMix.h>
+#include <android/media/AudioMixerAttributesInternal.h>
+#include <android/media/AudioMixerBehavior.h>
 #include <android/media/AudioMixCallbackFlag.h>
 #include <android/media/AudioMixRouteFlag.h>
 #include <android/media/AudioMixType.h>
@@ -102,4 +104,16 @@
 ConversionResult<media::AudioOffloadMode>
 legacy2aidl_audio_offload_mode_t_AudioOffloadMode(audio_offload_mode_t legacy);
 
+ConversionResult<audio_mixer_behavior_t>
+aidl2legacy_AudioMixerBehavior_audio_mixer_behavior_t(media::AudioMixerBehavior aidl);
+ConversionResult<media::AudioMixerBehavior>
+legacy2aidl_audio_mixer_behavior_t_AudioMixerBehavior(audio_mixer_behavior_t legacy);
+
+ConversionResult<audio_mixer_attributes_t>
+aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(
+        const media::AudioMixerAttributesInternal& aidl);
+ConversionResult<media::AudioMixerAttributesInternal>
+legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(
+        const audio_mixer_attributes_t& legacy);
+
 }  // namespace android
diff --git a/media/libaudioclient/include/media/AudioAttributes.h b/media/libaudioclient/include/media/VolumeGroupAttributes.h
similarity index 72%
rename from media/libaudioclient/include/media/AudioAttributes.h
rename to media/libaudioclient/include/media/VolumeGroupAttributes.h
index 24bd179..46b3612 100644
--- a/media/libaudioclient/include/media/AudioAttributes.h
+++ b/media/libaudioclient/include/media/VolumeGroupAttributes.h
@@ -26,15 +26,22 @@
 
 namespace android {
 
-class AudioAttributes : public Parcelable
+class VolumeGroupAttributes : public Parcelable
 {
 public:
-    AudioAttributes() = default;
-    AudioAttributes(const audio_attributes_t &attributes) : mAttributes(attributes) {} // NOLINT
-    AudioAttributes(volume_group_t groupId,
+    VolumeGroupAttributes() = default;
+    VolumeGroupAttributes(const audio_attributes_t &attributes)
+        : mAttributes(attributes) {} // NOLINT
+    VolumeGroupAttributes(volume_group_t groupId,
                     audio_stream_type_t stream,
                     const audio_attributes_t &attributes) :
-         mAttributes(attributes), mStreamType(stream), mGroupId(groupId) {}
+         mAttributes(attributes), mStreamType(stream), mGroupId(groupId) {
+        // TODO: align native & JAVA source initializer.
+        // As far as this class concerns attributes for volume group, it applies only to playback.
+        mAttributes.source = AUDIO_SOURCE_INVALID;
+    }
+
+    int matchesScore(const audio_attributes_t &attributes) const;
 
     audio_attributes_t getAttributes() const { return mAttributes; }
 
@@ -61,8 +68,8 @@
 
 // AIDL conversion routines.
 ConversionResult<media::AudioAttributesEx>
-legacy2aidl_AudioAttributes_AudioAttributesEx(const AudioAttributes& legacy);
-ConversionResult<AudioAttributes>
-aidl2legacy_AudioAttributesEx_AudioAttributes(const media::AudioAttributesEx& aidl);
+legacy2aidl_VolumeGroupAttributes_AudioAttributesEx(const VolumeGroupAttributes& legacy);
+ConversionResult<VolumeGroupAttributes>
+aidl2legacy_AudioAttributesEx_VolumeGroupAttributes(const media::AudioAttributesEx& aidl);
 
 } // namespace android
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 891293e..1e8dcca 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -14,6 +14,12 @@
         "-Wall",
         "-Werror",
     ],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -22,33 +28,35 @@
     },
 }
 
-cc_test {
-    name: "audio_aidl_conversion_tests",
-    defaults: ["libaudioclient_tests_defaults"],
-    srcs: ["audio_aidl_legacy_conversion_tests.cpp"],
-    shared_libs: [
-        "libbinder",
-        "libcutils",
-        "liblog",
-        "libutils",
+cc_defaults {
+    name: "audio_aidl_conversion_test_defaults",
+    defaults: [
+        "libaudioclient_tests_defaults",
+        "latest_android_media_audio_common_types_cpp_static",
     ],
     static_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "libaudio_aidl_conversion_common_cpp",
         "libaudioclient_aidl_conversion",
         "libstagefright_foundation",
     ],
 }
 
 cc_test {
+    name: "audio_aidl_conversion_tests",
+    defaults: [
+        "audio_aidl_conversion_test_defaults",
+    ],
+    srcs: ["audio_aidl_legacy_conversion_tests.cpp"],
+}
+
+cc_test {
     name: "audio_aidl_status_tests",
     defaults: ["libaudioclient_tests_defaults"],
     srcs: ["audio_aidl_status_tests.cpp"],
     shared_libs: [
         "libaudioclient_aidl_conversion",
-        "libbinder",
-        "libcutils",
-        "libutils",
     ],
 }
 
@@ -66,9 +74,6 @@
     shared_libs: [
         "framework-permission-aidl-cpp",
         "libaudioclient",
-        "libbinder",
-        "libcutils",
-        "libutils",
     ],
     data: ["track_test_input_*.txt"],
 }
@@ -85,11 +90,125 @@
         "libmediametrics_headers",
     ],
     shared_libs: [
-        "libaudioclient",
-        "libbinder",
-        "libcutils",
-        "libutils",
         "framework-permission-aidl-cpp",
+        "libaudioclient",
     ],
     data: ["record_test_input_*.txt"],
 }
+
+cc_defaults {
+    name: "libaudioclient_gtests_defaults",
+    defaults: [
+        "audio_aidl_conversion_test_defaults",
+    ],
+    shared_libs: [
+        "capture_state_listener-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libbase",
+        "libcgrouprc",
+        "libdl",
+        "libmedia",
+        "libmediametrics",
+        "libmediautils",
+        "libmedia_helper",
+        "libnblog",
+        "libprocessgroup",
+        "libshmemcompat",
+        "libxml2",
+        "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
+        "shared-file-region-aidl-cpp",
+    ],
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "effect-aidl-cpp",
+        "libaudioclient",
+        "libaudiofoundation",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
+    ],
+    data: ["bbb*.raw"],
+    test_config_template: "audio_test_template.xml",
+}
+
+cc_test {
+    name: "audiorecord_tests",
+    defaults: ["libaudioclient_gtests_defaults"],
+    srcs: [
+        "audiorecord_tests.cpp",
+        "audio_test_utils.cpp",
+    ],
+}
+
+cc_test {
+    name: "audiotrack_tests",
+    defaults: ["libaudioclient_gtests_defaults"],
+    srcs: [
+        "audiotrack_tests.cpp",
+        "audio_test_utils.cpp",
+    ],
+}
+
+cc_test {
+    name: "audioeffect_tests",
+    defaults: ["libaudioclient_gtests_defaults"],
+    srcs: [
+        "audioeffect_tests.cpp",
+        "audio_test_utils.cpp",
+    ],
+}
+
+cc_test {
+    name: "audioeffect_analysis",
+    defaults: ["libaudioclient_gtests_defaults"],
+    // flag needed for pfft/pffft.hpp
+    cflags: [
+        "-Wno-error=unused-parameter",
+    ],
+    srcs: [
+        "audioeffect_analyser.cpp",
+        "audio_test_utils.cpp",
+    ],
+    static_libs: [
+        "libpffft",
+    ],
+}
+
+cc_test {
+    name: "audiorouting_tests",
+    defaults: ["libaudioclient_gtests_defaults"],
+    srcs: [
+        "audiorouting_tests.cpp",
+        "audio_test_utils.cpp",
+    ],
+}
+
+cc_test {
+    name: "audioclient_serialization_tests",
+    defaults: ["libaudioclient_gtests_defaults"],
+    srcs: [
+        "audioclient_serialization_tests.cpp",
+        "audio_test_utils.cpp",
+    ],
+}
+
+cc_test {
+    name: "trackplayerbase_tests",
+    defaults: ["libaudioclient_gtests_defaults"],
+    srcs: ["trackplayerbase_tests.cpp"],
+}
+
+cc_test {
+    name: "audiosystem_tests",
+    defaults: ["libaudioclient_gtests_defaults"],
+    srcs: [
+        "audiosystem_tests.cpp",
+        "audio_test_utils.cpp",
+    ],
+}
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 997f62a..0d12f9d 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -14,24 +14,70 @@
  * limitations under the License.
  */
 
+#include <iostream>
+#include <string>
+
 #include <gtest/gtest.h>
 
-#include <media/AudioCommonTypes.h>
 #include <media/AidlConversion.h>
+#include <media/AudioCommonTypes.h>
 
 using namespace android;
 using namespace android::aidl_utils;
 
+using media::AudioDirectMode;
+using media::AudioPortConfigFw;
+using media::AudioPortDeviceExtSys;
+using media::AudioPortFw;
+using media::AudioPortRole;
+using media::AudioPortType;
 using media::audio::common::AudioChannelLayout;
+using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceAddress;
 using media::audio::common::AudioDeviceDescription;
 using media::audio::common::AudioDeviceType;
+using media::audio::common::AudioEncapsulationMetadataType;
+using media::audio::common::AudioEncapsulationType;
 using media::audio::common::AudioFormatDescription;
 using media::audio::common::AudioFormatType;
+using media::audio::common::AudioGain;
+using media::audio::common::AudioGainConfig;
+using media::audio::common::AudioGainMode;
+using media::audio::common::AudioIoFlags;
+using media::audio::common::AudioPortDeviceExt;
+using media::audio::common::AudioProfile;
+using media::audio::common::AudioStandard;
+using media::audio::common::ExtraAudioDescriptor;
+using media::audio::common::Int;
+using media::audio::common::MicrophoneDynamicInfo;
+using media::audio::common::MicrophoneInfo;
 using media::audio::common::PcmType;
 
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+namespace android::media {
+#define DEFINE_PRINTING_TEMPLATES()                                                               \
+    template <typename P>                                                                         \
+    std::enable_if_t<std::is_base_of_v<::android::Parcelable, P>, std::ostream&> operator<<(      \
+            std::ostream& os, const P& p) {                                                       \
+        return os << p.toString();                                                                \
+    }                                                                                             \
+    template <typename E>                                                                         \
+    std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) { \
+        return os << toString(e);                                                                 \
+    }
+DEFINE_PRINTING_TEMPLATES();
+
+namespace audio::common {
+DEFINE_PRINTING_TEMPLATES();
+}  // namespace audio::common
+#undef DEFINE_PRINTING_TEMPLATES
+}  // namespace android::media
+
 namespace {
 
-template<typename T> size_t hash(const T& t) {
+template <typename T>
+size_t hash(const T& t) {
     return std::hash<T>{}(t);
 }
 
@@ -52,10 +98,8 @@
     return AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
             // Use channels that exist both for input and output,
             // but doesn't form a known layout mask.
-            AudioChannelLayout::CHANNEL_FRONT_LEFT |
-            AudioChannelLayout::CHANNEL_FRONT_RIGHT |
-            AudioChannelLayout::CHANNEL_TOP_SIDE_LEFT |
-            AudioChannelLayout::CHANNEL_TOP_SIDE_RIGHT);
+            AudioChannelLayout::CHANNEL_FRONT_LEFT | AudioChannelLayout::CHANNEL_FRONT_RIGHT |
+            AudioChannelLayout::CHANNEL_TOP_SIDE_LEFT | AudioChannelLayout::CHANNEL_TOP_SIDE_RIGHT);
 }
 
 AudioChannelLayout make_ACL_ChannelIndex2() {
@@ -74,7 +118,7 @@
 }
 
 AudioDeviceDescription make_AudioDeviceDescription(AudioDeviceType type,
-        const std::string& connection = "") {
+                                                   const std::string& connection = "") {
     AudioDeviceDescription result;
     result.type = type;
     result.connection = connection;
@@ -89,18 +133,59 @@
     return make_AudioDeviceDescription(AudioDeviceType::IN_DEFAULT);
 }
 
+AudioDeviceDescription make_ADD_MicIn() {
+    return make_AudioDeviceDescription(AudioDeviceType::IN_MICROPHONE);
+}
+
+AudioDeviceDescription make_ADD_RSubmixIn() {
+    return make_AudioDeviceDescription(AudioDeviceType::IN_SUBMIX);
+}
+
 AudioDeviceDescription make_ADD_DefaultOut() {
     return make_AudioDeviceDescription(AudioDeviceType::OUT_DEFAULT);
 }
 
 AudioDeviceDescription make_ADD_WiredHeadset() {
     return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
-            AudioDeviceDescription::CONNECTION_ANALOG());
+                                       AudioDeviceDescription::CONNECTION_ANALOG());
 }
 
 AudioDeviceDescription make_ADD_BtScoHeadset() {
     return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
-            AudioDeviceDescription::CONNECTION_BT_SCO());
+                                       AudioDeviceDescription::CONNECTION_BT_SCO());
+}
+
+AudioDeviceDescription make_ADD_BtA2dpHeadphone() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADPHONE,
+                                       AudioDeviceDescription::CONNECTION_BT_A2DP());
+}
+
+AudioDeviceDescription make_ADD_BtLeHeadset() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+                                       AudioDeviceDescription::CONNECTION_BT_LE());
+}
+
+AudioDeviceDescription make_ADD_BtLeBroadcast() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_BROADCAST,
+                                       AudioDeviceDescription::CONNECTION_BT_LE());
+}
+
+AudioDeviceDescription make_ADD_IpV4Device() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
+                                       AudioDeviceDescription::CONNECTION_IP_V4());
+}
+
+AudioDeviceDescription make_ADD_UsbHeadset() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+                                       AudioDeviceDescription::CONNECTION_USB());
+}
+
+AudioDevice make_AudioDevice(const AudioDeviceDescription& type,
+                             const AudioDeviceAddress& address) {
+    AudioDevice result;
+    result.type = type;
+    result.address = address;
+    return result;
 }
 
 AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
@@ -121,8 +206,7 @@
     return result;
 }
 
-AudioFormatDescription make_AudioFormatDescription(PcmType transport,
-        const std::string& encoding) {
+AudioFormatDescription make_AudioFormatDescription(PcmType transport, const std::string& encoding) {
     auto result = make_AudioFormatDescription(encoding);
     result.pcm = transport;
     return result;
@@ -154,6 +238,22 @@
     return afd;
 }
 
+android::media::TrackSecondaryOutputInfo make_TrackSecondaryOutputInfo() {
+    android::media::TrackSecondaryOutputInfo result;
+    result.portId = 1;
+    result.secondaryOutputIds = {0, 5, 7};
+    return result;
+}
+
+ExtraAudioDescriptor make_ExtraAudioDescriptor(AudioStandard audioStandard,
+                                               AudioEncapsulationType audioEncapsulationType) {
+    ExtraAudioDescriptor result;
+    result.standard = audioStandard;
+    result.audioDescriptor = {0xb4, 0xaf, 0x98, 0x1a};
+    result.encapsulationType = audioEncapsulationType;
+    return result;
+}
+
 }  // namespace
 
 // Verify that two independently constructed ADDs/AFDs have the same hash.
@@ -163,7 +263,8 @@
 // is identical to the same format description constructed by the framework.
 class HashIdentityTest : public ::testing::Test {
   public:
-    template<typename T> void verifyHashIdentity(const std::vector<std::function<T()>>& valueGens) {
+    template <typename T>
+    void verifyHashIdentity(const std::vector<std::function<T()>>& valueGens) {
         for (size_t i = 0; i < valueGens.size(); ++i) {
             for (size_t j = 0; j < valueGens.size(); ++j) {
                 if (i == j) {
@@ -177,27 +278,25 @@
 };
 
 TEST_F(HashIdentityTest, AudioChannelLayoutHashIdentity) {
-    verifyHashIdentity<AudioChannelLayout>({
-            make_ACL_None, make_ACL_Invalid, make_ACL_Stereo,
-            make_ACL_LayoutArbitrary, make_ACL_ChannelIndex2,
-            make_ACL_ChannelIndexArbitrary, make_ACL_VoiceCall});
+    verifyHashIdentity<AudioChannelLayout>({make_ACL_None, make_ACL_Invalid, make_ACL_Stereo,
+                                            make_ACL_LayoutArbitrary, make_ACL_ChannelIndex2,
+                                            make_ACL_ChannelIndexArbitrary, make_ACL_VoiceCall});
 }
 
 TEST_F(HashIdentityTest, AudioDeviceDescriptionHashIdentity) {
-    verifyHashIdentity<AudioDeviceDescription>({
-            make_ADD_None, make_ADD_DefaultIn, make_ADD_DefaultOut, make_ADD_WiredHeadset,
-            make_ADD_BtScoHeadset});
+    verifyHashIdentity<AudioDeviceDescription>({make_ADD_None, make_ADD_DefaultIn,
+                                                make_ADD_DefaultOut, make_ADD_WiredHeadset,
+                                                make_ADD_BtScoHeadset});
 }
 
 TEST_F(HashIdentityTest, AudioFormatDescriptionHashIdentity) {
-    verifyHashIdentity<AudioFormatDescription>({
-            make_AFD_Default, make_AFD_Invalid, make_AFD_Pcm16Bit, make_AFD_Bitstream,
-            make_AFD_Encap, make_AFD_Encap_with_Enc});
+    verifyHashIdentity<AudioFormatDescription>({make_AFD_Default, make_AFD_Invalid,
+                                                make_AFD_Pcm16Bit, make_AFD_Bitstream,
+                                                make_AFD_Encap, make_AFD_Encap_with_Enc});
 }
 
 using ChannelLayoutParam = std::tuple<AudioChannelLayout, bool /*isInput*/>;
-class AudioChannelLayoutRoundTripTest :
-        public testing::TestWithParam<ChannelLayoutParam> {};
+class AudioChannelLayoutRoundTripTest : public testing::TestWithParam<ChannelLayoutParam> {};
 TEST_P(AudioChannelLayoutRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = std::get<0>(GetParam());
     const bool isInput = std::get<1>(GetParam());
@@ -207,21 +306,82 @@
     ASSERT_TRUE(convBack.ok());
     EXPECT_EQ(initial, convBack.value());
 }
-INSTANTIATE_TEST_SUITE_P(AudioChannelLayoutRoundTrip,
-        AudioChannelLayoutRoundTripTest,
+
+INSTANTIATE_TEST_SUITE_P(
+        AudioChannelLayoutRoundTrip, AudioChannelLayoutRoundTripTest,
         testing::Combine(
                 testing::Values(AudioChannelLayout{}, make_ACL_Invalid(), make_ACL_Stereo(),
-                        make_ACL_LayoutArbitrary(), make_ACL_ChannelIndex2(),
-                        make_ACL_ChannelIndexArbitrary()),
+                                make_ACL_LayoutArbitrary(), make_ACL_ChannelIndex2(),
+                                make_ACL_ChannelIndexArbitrary(),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_FRONT_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_FRONT_RIGHT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_BACK_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_BACK_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_BACK_RIGHT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_FRONT_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_LOW_FREQUENCY),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_SIDE_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_SIDE_RIGHT)),
                 testing::Values(false, true)));
-INSTANTIATE_TEST_SUITE_P(AudioChannelVoiceRoundTrip,
-        AudioChannelLayoutRoundTripTest,
-        // In legacy constants the voice call is only defined for input.
-        testing::Combine(testing::Values(make_ACL_VoiceCall()), testing::Values(true)));
+INSTANTIATE_TEST_SUITE_P(AudioChannelVoiceRoundTrip, AudioChannelLayoutRoundTripTest,
+                         // In legacy constants the voice call is only defined for input.
+                         testing::Combine(testing::Values(make_ACL_VoiceCall()),
+                                          testing::Values(true)));
+
+INSTANTIATE_TEST_SUITE_P(
+        OutAudioChannelLayoutLayoutRoundTrip, AudioChannelLayoutRoundTripTest,
+        testing::Combine(
+                testing::Values(AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_FRONT_LEFT_OF_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_FRONT_RIGHT_OF_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_SIDE_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_SIDE_RIGHT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_FRONT_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_FRONT_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_FRONT_RIGHT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_BACK_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_BACK_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_TOP_BACK_RIGHT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_BOTTOM_FRONT_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_BOTTOM_FRONT_CENTER),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_BOTTOM_FRONT_RIGHT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_LOW_FREQUENCY_2),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_HAPTIC_A),
+                                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+                                        AudioChannelLayout::CHANNEL_HAPTIC_B)),
+                testing::Values(false)));
 
 using ChannelLayoutEdgeCaseParam = std::tuple<int /*legacy*/, bool /*isInput*/, bool /*isValid*/>;
-class AudioChannelLayoutEdgeCaseTest :
-        public testing::TestWithParam<ChannelLayoutEdgeCaseParam> {};
+class AudioChannelLayoutEdgeCaseTest : public testing::TestWithParam<ChannelLayoutEdgeCaseParam> {};
 TEST_P(AudioChannelLayoutEdgeCaseTest, Legacy2Aidl) {
     const audio_channel_mask_t legacy = static_cast<audio_channel_mask_t>(std::get<0>(GetParam()));
     const bool isInput = std::get<1>(GetParam());
@@ -229,8 +389,8 @@
     auto conv = legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy, isInput);
     EXPECT_EQ(isValid, conv.ok());
 }
-INSTANTIATE_TEST_SUITE_P(AudioChannelLayoutEdgeCase,
-        AudioChannelLayoutEdgeCaseTest,
+INSTANTIATE_TEST_SUITE_P(
+        AudioChannelLayoutEdgeCase, AudioChannelLayoutEdgeCaseTest,
         testing::Values(
                 // Valid legacy input masks.
                 std::make_tuple(AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO, true, true),
@@ -240,25 +400,26 @@
                 std::make_tuple(
                         // This has the same numerical representation as Mask 'A' below
                         AUDIO_CHANNEL_OUT_FRONT_CENTER | AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
-                        AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT, false, true),
+                                AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT,
+                        false, true),
                 std::make_tuple(
                         // This has the same numerical representation as Mask 'B' below
                         AUDIO_CHANNEL_OUT_FRONT_CENTER | AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
-                        AUDIO_CHANNEL_OUT_TOP_BACK_LEFT, false, true),
+                                AUDIO_CHANNEL_OUT_TOP_BACK_LEFT,
+                        false, true),
                 // Invalid legacy input masks.
                 std::make_tuple(AUDIO_CHANNEL_IN_6, true, false),
-                std::make_tuple(
-                        AUDIO_CHANNEL_IN_6 | AUDIO_CHANNEL_IN_FRONT_PROCESSED, true, false),
-                std::make_tuple(
-                        AUDIO_CHANNEL_IN_PRESSURE | AUDIO_CHANNEL_IN_X_AXIS |
-                        AUDIO_CHANNEL_IN_Y_AXIS | AUDIO_CHANNEL_IN_Z_AXIS, true, false),
+                std::make_tuple(AUDIO_CHANNEL_IN_6 | AUDIO_CHANNEL_IN_FRONT_PROCESSED, true, false),
+                std::make_tuple(AUDIO_CHANNEL_IN_PRESSURE | AUDIO_CHANNEL_IN_X_AXIS |
+                                        AUDIO_CHANNEL_IN_Y_AXIS | AUDIO_CHANNEL_IN_Z_AXIS,
+                                true, false),
                 std::make_tuple(  // Mask 'A'
                         AUDIO_CHANNEL_IN_STEREO | AUDIO_CHANNEL_IN_VOICE_UPLINK, true, false),
                 std::make_tuple(  // Mask 'B'
                         AUDIO_CHANNEL_IN_STEREO | AUDIO_CHANNEL_IN_VOICE_DNLINK, true, false)));
 
-class AudioDeviceDescriptionRoundTripTest :
-        public testing::TestWithParam<AudioDeviceDescription> {};
+class AudioDeviceDescriptionRoundTripTest : public testing::TestWithParam<AudioDeviceDescription> {
+};
 TEST_P(AudioDeviceDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
     auto conv = aidl2legacy_AudioDeviceDescription_audio_devices_t(initial);
@@ -267,13 +428,55 @@
     ASSERT_TRUE(convBack.ok());
     EXPECT_EQ(initial, convBack.value());
 }
-INSTANTIATE_TEST_SUITE_P(AudioDeviceDescriptionRoundTrip,
-        AudioDeviceDescriptionRoundTripTest,
-        testing::Values(AudioDeviceDescription{}, make_ADD_DefaultIn(),
-                make_ADD_DefaultOut(), make_ADD_WiredHeadset(), make_ADD_BtScoHeadset()));
+INSTANTIATE_TEST_SUITE_P(AudioDeviceDescriptionRoundTrip, AudioDeviceDescriptionRoundTripTest,
+                         testing::Values(AudioDeviceDescription{}, make_ADD_DefaultIn(),
+                                         make_ADD_DefaultOut(), make_ADD_WiredHeadset(),
+                                         make_ADD_BtScoHeadset()));
 
-class AudioFormatDescriptionRoundTripTest :
-        public testing::TestWithParam<AudioFormatDescription> {};
+class AudioDeviceRoundTripTest : public testing::TestWithParam<AudioDevice> {};
+TEST_P(AudioDeviceRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    audio_devices_t legacyType;
+    String8 legacyAddress;
+    status_t status = aidl2legacy_AudioDevice_audio_device(initial, &legacyType, &legacyAddress);
+    ASSERT_EQ(OK, status);
+    auto convBack = legacy2aidl_audio_device_AudioDevice(legacyType, legacyAddress);
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(
+        AudioDeviceRoundTrip, AudioDeviceRoundTripTest,
+        testing::Values(
+                make_AudioDevice(make_ADD_MicIn(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("bottom")),
+                make_AudioDevice(make_ADD_RSubmixIn(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("1:2-in-3")),
+                // The case of a "blueprint" device port for an external device.
+                make_AudioDevice(make_ADD_BtScoHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("")),
+                make_AudioDevice(make_ADD_BtScoHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+                                         std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+                // Another "blueprint"
+                make_AudioDevice(make_ADD_BtA2dpHeadphone(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("")),
+                make_AudioDevice(make_ADD_BtA2dpHeadphone(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+                                         std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+                make_AudioDevice(make_ADD_BtLeHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+                                         std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+                make_AudioDevice(make_ADD_BtLeBroadcast(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("42")),
+                make_AudioDevice(make_ADD_IpV4Device(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::ipv4>(
+                                         std::vector<uint8_t>{192, 168, 0, 1})),
+                make_AudioDevice(make_ADD_UsbHeadset(),
+                                 AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
+                                         std::vector<int32_t>{1, 2}))));
+
+class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
+};
 TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
     auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
@@ -282,6 +485,343 @@
     ASSERT_TRUE(convBack.ok());
     EXPECT_EQ(initial, convBack.value());
 }
-INSTANTIATE_TEST_SUITE_P(AudioFormatDescriptionRoundTrip,
-        AudioFormatDescriptionRoundTripTest,
-        testing::Values(make_AFD_Invalid(), AudioFormatDescription{}, make_AFD_Pcm16Bit()));
+INSTANTIATE_TEST_SUITE_P(AudioFormatDescriptionRoundTrip, AudioFormatDescriptionRoundTripTest,
+                         testing::Values(make_AFD_Invalid(), AudioFormatDescription{},
+                                         make_AFD_Pcm16Bit()));
+
+AudioPortConfigFw createAudioPortConfigFw(const AudioChannelLayout& layout,
+                                          const AudioFormatDescription& format,
+                                          const AudioDeviceDescription& device) {
+    const bool isInput = device.type < AudioDeviceType::OUT_DEFAULT;
+    AudioPortConfigFw result;
+    result.hal.id = 43;
+    result.hal.portId = 42;
+    Int sr44100;
+    sr44100.value = 44100;
+    result.hal.sampleRate = sr44100;
+    result.hal.channelMask = layout;
+    result.hal.format = format;
+    AudioGainConfig gain;
+    gain.mode = 1 << static_cast<int>(AudioGainMode::JOINT);
+    gain.values = std::vector<int32_t>({100});
+    result.hal.gain = gain;
+    AudioPortDeviceExt ext;
+    AudioDevice audioDevice;
+    audioDevice.type = device;
+    ext.device = audioDevice;
+    result.hal.ext = ext;
+    result.sys.role = isInput ? AudioPortRole::SOURCE : AudioPortRole::SINK;
+    result.sys.type = AudioPortType::DEVICE;
+    AudioPortDeviceExtSys sysDevice;
+    sysDevice.hwModule = 1;
+    result.sys.ext = sysDevice;
+    return result;
+}
+
+using AudioPortConfigParam =
+        std::tuple<AudioChannelLayout, AudioFormatDescription, AudioDeviceDescription>;
+class AudioPortConfigRoundTripTest : public testing::TestWithParam<AudioPortConfigParam> {};
+TEST_P(AudioPortConfigRoundTripTest, Aidl2Legacy2Aidl) {
+    const AudioChannelLayout layout = std::get<0>(GetParam());
+    const AudioFormatDescription format = std::get<1>(GetParam());
+    const AudioDeviceDescription device = std::get<2>(GetParam());
+    const bool isInput = device.type < AudioDeviceType::OUT_DEFAULT;
+    AudioPortConfigFw initial = createAudioPortConfigFw(layout, format, device);
+    {
+        audio_port_config conv{};
+        int32_t portId = -1;
+        status_t status =
+                aidl2legacy_AudioPortConfig_audio_port_config(initial.hal, isInput, &conv, &portId);
+        ASSERT_EQ(OK, status);
+        EXPECT_NE(-1, portId);
+        auto convBack = legacy2aidl_audio_port_config_AudioPortConfig(conv, isInput, portId);
+        ASSERT_TRUE(convBack.ok());
+        EXPECT_EQ(initial.hal, convBack.value());
+    }
+    {
+        int32_t portId = -1;
+        auto conv = aidl2legacy_AudioPortConfigFw_audio_port_config(initial, &portId);
+        ASSERT_TRUE(conv.ok());
+        EXPECT_NE(-1, portId);
+        auto convBack = legacy2aidl_audio_port_config_AudioPortConfigFw(conv.value(), portId);
+        ASSERT_TRUE(convBack.ok());
+        EXPECT_EQ(initial, convBack.value());
+    }
+}
+INSTANTIATE_TEST_SUITE_P(
+        AudioPortConfig, AudioPortConfigRoundTripTest,
+        testing::Combine(testing::Values(make_ACL_Stereo(), make_ACL_ChannelIndex2()),
+                         testing::Values(make_AFD_Pcm16Bit()),
+                         testing::Values(make_ADD_DefaultIn(), make_ADD_DefaultOut(),
+                                         make_ADD_WiredHeadset())));
+
+class AudioPortFwRoundTripTest : public testing::TestWithParam<AudioDeviceDescription> {
+  public:
+    AudioProfile createProfile(const AudioFormatDescription& format,
+                               const std::vector<AudioChannelLayout>& channelMasks,
+                               const std::vector<int32_t>& sampleRates) {
+        AudioProfile profile;
+        profile.format = format;
+        profile.channelMasks = channelMasks;
+        profile.sampleRates = sampleRates;
+        return profile;
+    }
+};
+TEST_P(AudioPortFwRoundTripTest, Aidl2Legacy2Aidl) {
+    const AudioDeviceDescription device = GetParam();
+    const bool isInput = device.type < AudioDeviceType::OUT_DEFAULT;
+    AudioPortFw initial;
+    initial.hal.id = 42;
+    initial.hal.profiles.push_back(createProfile(
+            make_AFD_Pcm16Bit(), {make_ACL_Stereo(), make_ACL_ChannelIndex2()}, {44100, 48000}));
+    if (isInput) {
+        initial.hal.flags = AudioIoFlags::make<AudioIoFlags::Tag::input>(0);
+    } else {
+        initial.hal.flags = AudioIoFlags::make<AudioIoFlags::Tag::output>(0);
+    }
+    AudioGain initialGain;
+    initialGain.mode = 1 << static_cast<int>(AudioGainMode::JOINT);
+    initialGain.channelMask = make_ACL_Stereo();
+    initial.hal.gains.push_back(initialGain);
+    AudioPortDeviceExt initialExt;
+    AudioDevice initialDevice;
+    initialDevice.type = device;
+    initialExt.device = initialDevice;
+    initial.hal.ext = initialExt;
+    {
+        auto conv = aidl2legacy_AudioPort_audio_port_v7(initial.hal, isInput);
+        ASSERT_TRUE(conv.ok());
+        auto convBack = legacy2aidl_audio_port_v7_AudioPort(conv.value(), isInput);
+        ASSERT_TRUE(convBack.ok());
+        EXPECT_EQ(initial.hal, convBack.value());
+    }
+    initial.sys.role = isInput ? AudioPortRole::SOURCE : AudioPortRole::SINK;
+    initial.sys.type = AudioPortType::DEVICE;
+    initial.sys.profiles.resize(initial.hal.profiles.size());
+    initial.sys.gains.resize(initial.hal.gains.size());
+    initial.sys.activeConfig =
+            createAudioPortConfigFw(make_ACL_Stereo(), make_AFD_Pcm16Bit(), device);
+    initial.sys.activeConfig.hal.flags = initial.hal.flags;
+    AudioPortDeviceExtSys initialSysDevice;
+    initialSysDevice.hwModule = 1;
+    initial.sys.ext = initialSysDevice;
+    {
+        auto conv = aidl2legacy_AudioPortFw_audio_port_v7(initial);
+        ASSERT_TRUE(conv.ok());
+        auto convBack = legacy2aidl_audio_port_v7_AudioPortFw(conv.value());
+        ASSERT_TRUE(convBack.ok());
+        EXPECT_EQ(initial, convBack.value());
+    }
+}
+INSTANTIATE_TEST_SUITE_P(AudioPortFw, AudioPortFwRoundTripTest,
+                         testing::Values(make_ADD_DefaultIn(), make_ADD_DefaultOut(),
+                                         make_ADD_WiredHeadset()));
+
+class AudioDirectModeRoundTripTest : public testing::TestWithParam<AudioDirectMode> {};
+TEST_P(AudioDirectModeRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    auto conv = aidl2legacy_AudioDirectMode_audio_direct_mode_t(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_direct_mode_t_AudioDirectMode(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioDirectMode, AudioDirectModeRoundTripTest,
+                         testing::Values(AudioDirectMode::NONE, AudioDirectMode::OFFLOAD,
+                                         AudioDirectMode::OFFLOAD_GAPLESS,
+                                         AudioDirectMode::BITSTREAM));
+
+class AudioStandardRoundTripTest : public testing::TestWithParam<AudioStandard> {};
+TEST_P(AudioStandardRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    auto conv = aidl2legacy_AudioStandard_audio_standard_t(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_standard_t_AudioStandard(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioStandard, AudioStandardRoundTripTest,
+                         testing::Values(AudioStandard::NONE, AudioStandard::EDID,
+                                         AudioStandard::SADB, AudioStandard::VSADB));
+
+class AudioEncapsulationMetadataTypeRoundTripTest
+    : public testing::TestWithParam<AudioEncapsulationMetadataType> {};
+TEST_P(AudioEncapsulationMetadataTypeRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    auto conv =
+            aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+            conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioEncapsulationMetadataType,
+                         AudioEncapsulationMetadataTypeRoundTripTest,
+                         testing::Values(AudioEncapsulationMetadataType::NONE,
+                                         AudioEncapsulationMetadataType::FRAMEWORK_TUNER,
+                                         AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR));
+
+class AudioGainModeRoundTripTest : public testing::TestWithParam<AudioGainMode> {};
+TEST_P(AudioGainModeRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    auto conv = aidl2legacy_AudioGainMode_audio_gain_mode_t(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_gain_mode_t_AudioGainMode(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioGainMode, AudioGainModeRoundTripTest,
+                         testing::Values(AudioGainMode::JOINT, AudioGainMode::CHANNELS,
+                                         AudioGainMode::RAMP));
+
+TEST(AudioTrackSecondaryOutputInfoRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = make_TrackSecondaryOutputInfo();
+    auto conv = aidl2legacy_TrackSecondaryOutputInfo_TrackSecondaryOutputInfoPair(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_TrackSecondaryOutputInfoPair_TrackSecondaryOutputInfo(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+using ExtraAudioDescriptorParam = std::tuple<AudioStandard, AudioEncapsulationType>;
+class ExtraAudioDescriptorRoundTripTest : public testing::TestWithParam<ExtraAudioDescriptorParam> {
+};
+TEST_P(ExtraAudioDescriptorRoundTripTest, Aidl2Legacy2Aidl) {
+    ExtraAudioDescriptor initial =
+            make_ExtraAudioDescriptor(std::get<0>(GetParam()), std::get<1>(GetParam()));
+    auto conv = aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        ExtraAudioDescriptor, ExtraAudioDescriptorRoundTripTest,
+        testing::Values(std::make_tuple(AudioStandard::NONE, AudioEncapsulationType::NONE),
+                        std::make_tuple(AudioStandard::EDID, AudioEncapsulationType::NONE),
+                        std::make_tuple(AudioStandard::EDID, AudioEncapsulationType::IEC61937),
+                        std::make_tuple(AudioStandard::SADB, AudioEncapsulationType::NONE),
+                        std::make_tuple(AudioStandard::SADB, AudioEncapsulationType::IEC61937),
+                        std::make_tuple(AudioStandard::VSADB, AudioEncapsulationType::NONE),
+                        std::make_tuple(AudioStandard::VSADB, AudioEncapsulationType::IEC61937)));
+
+TEST(AudioPortSessionExtRoundTripTest, Aidl2Legacy2Aidl) {
+    const int32_t initial = 7;
+    auto conv = aidl2legacy_int32_t_audio_port_session_ext(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_port_session_ext_int32_t(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+class AudioGainTest : public testing::TestWithParam<bool> {};
+TEST_P(AudioGainTest, Legacy2Aidl2Legacy) {
+    audio_port_v7 port;
+    port.num_gains = 2;
+    port.gains[0] = {.mode = AUDIO_GAIN_MODE_JOINT,
+                     .channel_mask = AUDIO_CHANNEL_IN_STEREO,
+                     .min_value = -3200,
+                     .max_value = 600,
+                     .default_value = 0,
+                     .step_value = 100,
+                     .min_ramp_ms = 10,
+                     .max_ramp_ms = 20};
+    port.gains[1] = {.mode = AUDIO_GAIN_MODE_JOINT,
+                     .channel_mask = AUDIO_CHANNEL_IN_MONO,
+                     .min_value = -8800,
+                     .max_value = 4000,
+                     .default_value = 0,
+                     .step_value = 100,
+                     .min_ramp_ms = 192,
+                     .max_ramp_ms = 224};
+
+    const auto isInput = GetParam();
+    for (int i = 0; i < port.num_gains; i++) {
+        auto initial = port.gains[i];
+        auto conv = legacy2aidl_audio_gain_AudioGain(initial, isInput);
+        ASSERT_TRUE(conv.ok());
+        auto convBack = aidl2legacy_AudioGain_audio_gain(conv.value(), isInput);
+        ASSERT_TRUE(convBack.ok());
+        EXPECT_EQ(initial.mode, convBack.value().mode);
+        EXPECT_EQ(initial.channel_mask, convBack.value().channel_mask);
+        EXPECT_EQ(initial.min_value, convBack.value().min_value);
+        EXPECT_EQ(initial.max_value, convBack.value().max_value);
+        EXPECT_EQ(initial.default_value, convBack.value().default_value);
+        EXPECT_EQ(initial.step_value, convBack.value().step_value);
+        EXPECT_EQ(initial.min_ramp_ms, convBack.value().min_ramp_ms);
+        EXPECT_EQ(initial.max_ramp_ms, convBack.value().max_ramp_ms);
+    }
+}
+INSTANTIATE_TEST_SUITE_P(AudioGain, AudioGainTest, testing::Values(true, false));
+
+TEST(AudioMicrophoneInfoFw, Aidl2Legacy2Aidl) {
+    media::MicrophoneInfoFw initial{};
+    // HALs must return at least 1 element in channelMapping. The zero value is 'UNUSED'.
+    initial.dynamic.channelMapping.resize(1);
+    auto conv = aidl2legacy_MicrophoneInfoFw_audio_microphone_characteristic_t(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioMicrophoneInfoFw, UnknownValues) {
+    {
+        media::MicrophoneInfoFw initial;
+        initial.dynamic.channelMapping.resize(1);
+        initial.info.indexInTheGroup = MicrophoneInfo::INDEX_IN_THE_GROUP_UNKNOWN;
+        auto conv = aidl2legacy_MicrophoneInfoFw_audio_microphone_characteristic_t(initial);
+        ASSERT_TRUE(conv.ok());
+        auto convBack =
+                legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(conv.value());
+        ASSERT_TRUE(convBack.ok());
+        EXPECT_EQ(initial, convBack.value());
+    }
+    for (const auto f : {&audio_microphone_characteristic_t::sensitivity,
+                         &audio_microphone_characteristic_t::max_spl,
+                         &audio_microphone_characteristic_t::min_spl}) {
+        audio_microphone_characteristic_t mic{};
+        if (f == &audio_microphone_characteristic_t::sensitivity) {
+            mic.*f = AUDIO_MICROPHONE_SENSITIVITY_UNKNOWN;
+        } else {
+            mic.*f = AUDIO_MICROPHONE_SPL_UNKNOWN;
+        }
+        auto aidl = legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(mic);
+        ASSERT_TRUE(aidl.ok());
+        EXPECT_FALSE(aidl.value().info.sensitivity.has_value());
+    }
+    for (const auto f : {&audio_microphone_characteristic_t::geometric_location,
+                         &audio_microphone_characteristic_t::orientation}) {
+        for (const auto c : {&audio_microphone_coordinate::x, &audio_microphone_coordinate::y,
+                             &audio_microphone_coordinate::z}) {
+            audio_microphone_characteristic_t mic{};
+            mic.*f.*c = AUDIO_MICROPHONE_COORDINATE_UNKNOWN;
+            auto conv = legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(mic);
+            ASSERT_TRUE(conv.ok());
+            const auto& aidl = conv.value();
+            if (f == &audio_microphone_characteristic_t::geometric_location) {
+                EXPECT_FALSE(aidl.info.position.has_value());
+                EXPECT_TRUE(aidl.info.orientation.has_value());
+            } else {
+                EXPECT_TRUE(aidl.info.position.has_value());
+                EXPECT_FALSE(aidl.info.orientation.has_value());
+            }
+        }
+    }
+}
+
+TEST(AudioMicrophoneInfoFw, ChannelMapping) {
+    audio_microphone_characteristic_t mic{};
+    mic.channel_mapping[1] = AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT;
+    mic.channel_mapping[3] = AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED;
+    auto conv = legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(mic);
+    ASSERT_TRUE(conv.ok());
+    const auto& aidl = conv.value();
+    EXPECT_EQ(4, aidl.dynamic.channelMapping.size());
+    EXPECT_EQ(MicrophoneDynamicInfo::ChannelMapping::UNUSED, aidl.dynamic.channelMapping[0]);
+    EXPECT_EQ(MicrophoneDynamicInfo::ChannelMapping::DIRECT, aidl.dynamic.channelMapping[1]);
+    EXPECT_EQ(MicrophoneDynamicInfo::ChannelMapping::UNUSED, aidl.dynamic.channelMapping[2]);
+    EXPECT_EQ(MicrophoneDynamicInfo::ChannelMapping::PROCESSED, aidl.dynamic.channelMapping[3]);
+}
diff --git a/media/libaudioclient/tests/audio_aidl_status_tests.cpp b/media/libaudioclient/tests/audio_aidl_status_tests.cpp
index 5517091..8a7e6c1 100644
--- a/media/libaudioclient/tests/audio_aidl_status_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_status_tests.cpp
@@ -37,25 +37,10 @@
 
 // Special status values are preserved on round trip.
 TEST(audio_aidl_status_tests, statusRoundTripSpecialValues) {
-    for (status_t status : {
-            OK,
-            UNKNOWN_ERROR,
-            NO_MEMORY,
-            INVALID_OPERATION,
-            BAD_VALUE,
-            BAD_TYPE,
-            NAME_NOT_FOUND,
-            PERMISSION_DENIED,
-            NO_INIT,
-            ALREADY_EXISTS,
-            DEAD_OBJECT,
-            FAILED_TRANSACTION,
-            BAD_INDEX,
-            NOT_ENOUGH_DATA,
-            WOULD_BLOCK,
-            TIMED_OUT,
-            UNKNOWN_TRANSACTION,
-            FDS_NOT_ALLOWED}) {
+    for (status_t status :
+         {OK, UNKNOWN_ERROR, NO_MEMORY, INVALID_OPERATION, BAD_VALUE, BAD_TYPE, NAME_NOT_FOUND,
+          PERMISSION_DENIED, NO_INIT, ALREADY_EXISTS, DEAD_OBJECT, FAILED_TRANSACTION, BAD_INDEX,
+          NOT_ENOUGH_DATA, WOULD_BLOCK, TIMED_OUT, UNKNOWN_TRANSACTION, FDS_NOT_ALLOWED}) {
         ASSERT_EQ(status, statusTFromBinderStatus(binderStatusFromStatusT(status)));
     }
 }
@@ -63,47 +48,29 @@
 // Binder exceptions show as an error (not fixed at this time); these come fromExceptionCode().
 TEST(audio_aidl_status_tests, binderStatusExceptions) {
     for (int exceptionCode : {
-            //Status::EX_NONE,
-            Status::EX_SECURITY,
-            Status::EX_BAD_PARCELABLE,
-            Status::EX_ILLEGAL_ARGUMENT,
-            Status::EX_NULL_POINTER,
-            Status::EX_ILLEGAL_STATE,
-            Status::EX_NETWORK_MAIN_THREAD,
-            Status::EX_UNSUPPORTED_OPERATION,
-            //Status::EX_SERVICE_SPECIFIC, -- tested fromServiceSpecificError()
-            Status::EX_PARCELABLE,
-            // This is special and Java specific; see Parcel.java.
-            Status::EX_HAS_REPLY_HEADER,
-            // This is special, and indicates to C++ binder proxies that the
-            // transaction has failed at a low level.
-            //Status::EX_TRANSACTION_FAILED, -- tested fromStatusT().
-            }) {
+                 // Status::EX_NONE,
+                 Status::EX_SECURITY, Status::EX_BAD_PARCELABLE, Status::EX_ILLEGAL_ARGUMENT,
+                 Status::EX_NULL_POINTER, Status::EX_ILLEGAL_STATE, Status::EX_NETWORK_MAIN_THREAD,
+                 Status::EX_UNSUPPORTED_OPERATION,
+                 // Status::EX_SERVICE_SPECIFIC, -- tested fromServiceSpecificError()
+                 Status::EX_PARCELABLE,
+                 // This is special and Java specific; see Parcel.java.
+                 Status::EX_HAS_REPLY_HEADER,
+                 // This is special, and indicates to C++ binder proxies that the
+                 // transaction has failed at a low level.
+                 // Status::EX_TRANSACTION_FAILED, -- tested fromStatusT().
+         }) {
         ASSERT_NE(OK, statusTFromBinderStatus(Status::fromExceptionCode(exceptionCode)));
     }
 }
 
 // Binder transaction errors show exactly in status_t; these come fromStatusT().
 TEST(audio_aidl_status_tests, binderStatusTransactionError) {
-    for (status_t status : {
-            OK, // Note: fromStatusT does check if this is 0, so this is no error.
-            UNKNOWN_ERROR,
-            NO_MEMORY,
-            INVALID_OPERATION,
-            BAD_VALUE,
-            BAD_TYPE,
-            NAME_NOT_FOUND,
-            PERMISSION_DENIED,
-            NO_INIT,
-            ALREADY_EXISTS,
-            DEAD_OBJECT,
-            FAILED_TRANSACTION,
-            BAD_INDEX,
-            NOT_ENOUGH_DATA,
-            WOULD_BLOCK,
-            TIMED_OUT,
-            UNKNOWN_TRANSACTION,
-            FDS_NOT_ALLOWED}) {
+    for (status_t status :
+         {OK,  // Note: fromStatusT does check if this is 0, so this is no error.
+          UNKNOWN_ERROR, NO_MEMORY, INVALID_OPERATION, BAD_VALUE, BAD_TYPE, NAME_NOT_FOUND,
+          PERMISSION_DENIED, NO_INIT, ALREADY_EXISTS, DEAD_OBJECT, FAILED_TRANSACTION, BAD_INDEX,
+          NOT_ENOUGH_DATA, WOULD_BLOCK, TIMED_OUT, UNKNOWN_TRANSACTION, FDS_NOT_ALLOWED}) {
         ASSERT_EQ(status, statusTFromBinderStatus(Status::fromStatusT(status)));
     }
 }
diff --git a/media/libaudioclient/tests/audio_test_template.xml b/media/libaudioclient/tests/audio_test_template.xml
new file mode 100644
index 0000000..ed0cb21
--- /dev/null
+++ b/media/libaudioclient/tests/audio_test_template.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2022 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the"License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an"AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Unit test configuration for {MODULE}">
+    <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer" />
+
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push-file" key="{MODULE}" value="/data/local/tmp/{MODULE}" />
+
+        <!-- Files used for audio testing -->
+        <option name="push-file" key="bbb_1ch_8kHz_s16le.raw" value="/data/local/tmp/bbb_1ch_8kHz_s16le.raw" />
+        <option name="push-file" key="bbb_2ch_24kHz_s16le.raw" value="/data/local/tmp/bbb_2ch_24kHz_s16le.raw" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="{MODULE}" />
+    </test>
+</configuration>
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
new file mode 100644
index 0000000..1e26ff6
--- /dev/null
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -0,0 +1,910 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioTestUtils"
+
+#include <android-base/file.h>
+#include <system/audio_config.h>
+#include <utils/Log.h>
+
+#include "audio_test_utils.h"
+
+#define WAIT_PERIOD_MS 10  // from AudioTrack.cpp
+#define MAX_WAIT_TIME_MS 5000
+
+template <class T>
+constexpr void (*xmlDeleter)(T* t);
+template <>
+constexpr auto xmlDeleter<xmlDoc> = xmlFreeDoc;
+template <>
+constexpr auto xmlDeleter<xmlChar> = [](xmlChar* s) { xmlFree(s); };
+
+/** @return a unique_ptr with the correct deleter for the libxml2 object. */
+template <class T>
+constexpr auto make_xmlUnique(T* t) {
+    // Wrap deleter in lambda to enable empty base optimization
+    auto deleter = [](T* t) { xmlDeleter<T>(t); };
+    return std::unique_ptr<T, decltype(deleter)>{t, deleter};
+}
+
+void OnAudioDeviceUpdateNotifier::onAudioDeviceUpdate(audio_io_handle_t audioIo,
+                                                      audio_port_handle_t deviceId) {
+    std::unique_lock<std::mutex> lock{mMutex};
+    ALOGD("%s  audioIo=%d deviceId=%d", __func__, audioIo, deviceId);
+    mAudioIo = audioIo;
+    mDeviceId = deviceId;
+    mCondition.notify_all();
+}
+
+status_t OnAudioDeviceUpdateNotifier::waitForAudioDeviceCb(audio_port_handle_t expDeviceId) {
+    std::unique_lock<std::mutex> lock{mMutex};
+    if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
+        (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
+        mCondition.wait_for(lock, std::chrono::milliseconds(500));
+        if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
+            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId))
+            return TIMED_OUT;
+    }
+    return OK;
+}
+
+AudioPlayback::AudioPlayback(uint32_t sampleRate, audio_format_t format,
+                             audio_channel_mask_t channelMask, audio_output_flags_t flags,
+                             audio_session_t sessionId, AudioTrack::transfer_type transferType,
+                             audio_attributes_t* attributes, audio_offload_info_t* info)
+    : mSampleRate(sampleRate),
+      mFormat(format),
+      mChannelMask(channelMask),
+      mFlags(flags),
+      mSessionId(sessionId),
+      mTransferType(transferType),
+      mAttributes(attributes),
+      mOffloadInfo(info) {
+    mStopPlaying = false;
+    mBytesUsedSoFar = 0;
+    mState = PLAY_NO_INIT;
+    mMemCapacity = 0;
+    mMemoryDealer = nullptr;
+    mMemory = nullptr;
+}
+
+AudioPlayback::~AudioPlayback() {
+    stop();
+}
+
+status_t AudioPlayback::create() {
+    if (mState != PLAY_NO_INIT) return INVALID_OPERATION;
+    std::string packageName{"AudioPlayback"};
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = packageName;
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    if (mTransferType == AudioTrack::TRANSFER_OBTAIN) {
+        mTrack = new AudioTrack(attributionSource);
+        mTrack->set(AUDIO_STREAM_MUSIC, mSampleRate, mFormat, mChannelMask, 0 /* frameCount */,
+                    mFlags, nullptr /* callback */, 0 /* notificationFrames */,
+                    nullptr /* sharedBuffer */, false /*canCallJava */, mSessionId, mTransferType,
+                    mOffloadInfo, attributionSource, mAttributes);
+    } else if (mTransferType == AudioTrack::TRANSFER_SHARED) {
+        mTrack = new AudioTrack(AUDIO_STREAM_MUSIC, mSampleRate, mFormat, mChannelMask, mMemory,
+                                mFlags, wp<AudioTrack::IAudioTrackCallback>::fromExisting(this), 0,
+                                mSessionId, mTransferType, nullptr, attributionSource, mAttributes);
+    } else {
+        ALOGE("Test application is not handling transfer type %s",
+              AudioTrack::convertTransferToText(mTransferType));
+        return INVALID_OPERATION;
+    }
+    mTrack->setCallerName(packageName);
+    status_t status = mTrack->initCheck();
+    if (NO_ERROR == status) mState = PLAY_READY;
+    return status;
+}
+
+status_t AudioPlayback::loadResource(const char* name) {
+    status_t status = OK;
+    FILE* fp = fopen(name, "rbe");
+    struct stat buf {};
+    if (fp && !fstat(fileno(fp), &buf)) {
+        mMemCapacity = buf.st_size;
+        mMemoryDealer = new MemoryDealer(mMemCapacity, "AudioPlayback");
+        if (nullptr == mMemoryDealer.get()) {
+            ALOGE("couldn't get MemoryDealer!");
+            fclose(fp);
+            return NO_MEMORY;
+        }
+        mMemory = mMemoryDealer->allocate(mMemCapacity);
+        if (nullptr == mMemory.get()) {
+            ALOGE("couldn't get IMemory!");
+            fclose(fp);
+            return NO_MEMORY;
+        }
+        uint8_t* ipBuffer = static_cast<uint8_t*>(static_cast<void*>(mMemory->unsecurePointer()));
+        fread(ipBuffer, sizeof(uint8_t), mMemCapacity, fp);
+    } else {
+        ALOGE("unable to open input file %s", name);
+        status = NAME_NOT_FOUND;
+    }
+    if (fp) fclose(fp);
+    return status;
+}
+
+sp<AudioTrack> AudioPlayback::getAudioTrackHandle() {
+    return (PLAY_NO_INIT != mState) ? mTrack : nullptr;
+}
+
+status_t AudioPlayback::start() {
+    status_t status;
+    if (PLAY_READY != mState) {
+        return INVALID_OPERATION;
+    } else {
+        status = mTrack->start();
+        if (OK == status) {
+            mState = PLAY_STARTED;
+            LOG_FATAL_IF(false != mTrack->stopped());
+        }
+    }
+    return status;
+}
+
+void AudioPlayback::onBufferEnd() {
+    std::unique_lock<std::mutex> lock{mMutex};
+    mStopPlaying = true;
+    mCondition.notify_all();
+}
+
+status_t AudioPlayback::fillBuffer() {
+    if (PLAY_STARTED != mState) return INVALID_OPERATION;
+    const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
+    int counter = 0;
+    uint8_t* ipBuffer = static_cast<uint8_t*>(static_cast<void*>(mMemory->unsecurePointer()));
+    size_t nonContig = 0;
+    size_t bytesAvailable = mMemCapacity - mBytesUsedSoFar;
+    while (bytesAvailable > 0) {
+        AudioTrack::Buffer trackBuffer;
+        trackBuffer.frameCount = mTrack->frameCount() * 2;
+        status_t status = mTrack->obtainBuffer(&trackBuffer, 1, &nonContig);
+        if (OK == status) {
+            size_t bytesToCopy = std::min(bytesAvailable, trackBuffer.size());
+            if (bytesToCopy > 0) {
+                memcpy(trackBuffer.data(), ipBuffer + mBytesUsedSoFar, bytesToCopy);
+            }
+            mTrack->releaseBuffer(&trackBuffer);
+            mBytesUsedSoFar += bytesToCopy;
+            bytesAvailable = mMemCapacity - mBytesUsedSoFar;
+            counter = 0;
+        } else if (WOULD_BLOCK == status) {
+            // if not received a buffer for MAX_WAIT_TIME_MS, something has gone wrong
+            if (counter == maxTries) return TIMED_OUT;
+            counter++;
+        }
+    }
+    return OK;
+}
+
+status_t AudioPlayback::waitForConsumption(bool testSeek) {
+    if (PLAY_STARTED != mState) return INVALID_OPERATION;
+
+    const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
+    int counter = 0;
+    size_t totalFrameCount = mMemCapacity / mTrack->frameSize();
+    while (!mStopPlaying && counter < maxTries) {
+        uint32_t currPosition;
+        mTrack->getPosition(&currPosition);
+        if (currPosition >= totalFrameCount) counter++;
+
+        if (testSeek && (currPosition > totalFrameCount * 0.6)) {
+            testSeek = false;
+            if (!mTrack->hasStarted()) return BAD_VALUE;
+            mTrack->pauseAndWait(std::chrono::seconds(2));
+            if (mTrack->hasStarted()) return BAD_VALUE;
+            mTrack->reload();
+            mTrack->getPosition(&currPosition);
+            if (currPosition != 0) return BAD_VALUE;
+            mTrack->start();
+            while (currPosition < totalFrameCount * 0.3) {
+                mTrack->getPosition(&currPosition);
+            }
+            mTrack->pauseAndWait(std::chrono::seconds(2));
+            uint32_t setPosition = totalFrameCount * 0.9;
+            mTrack->setPosition(setPosition);
+            uint32_t bufferPosition;
+            mTrack->getBufferPosition(&bufferPosition);
+            if (bufferPosition != setPosition) return BAD_VALUE;
+            mTrack->start();
+        }
+        std::this_thread::sleep_for(std::chrono::milliseconds(WAIT_PERIOD_MS));
+    }
+    if (!mStopPlaying && counter == maxTries) return TIMED_OUT;
+    return OK;
+}
+
+status_t AudioPlayback::onProcess(bool testSeek) {
+    if (mTransferType == AudioTrack::TRANSFER_SHARED)
+        return waitForConsumption(testSeek);
+    else if (mTransferType == AudioTrack::TRANSFER_OBTAIN)
+        return fillBuffer();
+    else
+        return INVALID_OPERATION;
+}
+
+void AudioPlayback::stop() {
+    std::unique_lock<std::mutex> lock{mMutex};
+    mStopPlaying = true;
+    if (mState != PLAY_STOPPED && mState != PLAY_NO_INIT) {
+        int32_t msec = 0;
+        (void)mTrack->pendingDuration(&msec);
+        mTrack->stopAndJoinCallbacks();
+        LOG_FATAL_IF(true != mTrack->stopped());
+        mState = PLAY_STOPPED;
+        if (msec > 0) {
+            ALOGD("deleting recycled track, waiting for data drain (%d msec)", msec);
+            usleep(msec * 1000LL);
+        }
+    }
+}
+
+// hold pcm data sent by AudioRecord
+RawBuffer::RawBuffer(int64_t ptsPipeline, int64_t ptsManual, int32_t capacity)
+    : mData(capacity > 0 ? new uint8_t[capacity] : nullptr),
+      mPtsPipeline(ptsPipeline),
+      mPtsManual(ptsManual),
+      mCapacity(capacity) {}
+
+// Simple AudioCapture
+size_t AudioCapture::onMoreData(const AudioRecord::Buffer& buffer) {
+    if (mState != REC_STARTED) {
+        ALOGE("Unexpected Callback from audiorecord, not reading data");
+        return 0;
+    }
+
+    // no more frames to read
+    if (mNumFramesReceived >= mNumFramesToRecord || mStopRecording) {
+        mStopRecording = true;
+        return 0;
+    }
+
+    int64_t timeUs = 0, position = 0, timeNs = 0;
+    ExtendedTimestamp ts;
+    ExtendedTimestamp::Location location;
+    const int32_t usPerSec = 1000000;
+
+    if (mRecord->getTimestamp(&ts) == OK &&
+        ts.getBestTimestamp(&position, &timeNs, ExtendedTimestamp::TIMEBASE_MONOTONIC, &location) ==
+                OK) {
+        // Use audio timestamp.
+        timeUs = timeNs / 1000 -
+                 (position - mNumFramesReceived + mNumFramesLost) * usPerSec / mSampleRate;
+    } else {
+        // This should not happen in normal case.
+        ALOGW("Failed to get audio timestamp, fallback to use systemclock");
+        timeUs = systemTime() / 1000LL;
+        // Estimate the real sampling time of the 1st sample in this buffer
+        // from AudioRecord's latency. (Apply this adjustment first so that
+        // the start time logic is not affected.)
+        timeUs -= mRecord->latency() * 1000LL;
+    }
+
+    ALOGV("dataCallbackTimestamp: %" PRId64 " us", timeUs);
+
+    const size_t frameSize = mRecord->frameSize();
+    uint64_t numLostBytes = (uint64_t)mRecord->getInputFramesLost() * frameSize;
+    if (numLostBytes > 0) {
+        ALOGW("Lost audio record data: %" PRIu64 " bytes", numLostBytes);
+    }
+    std::deque<RawBuffer> tmpQueue;
+    while (numLostBytes > 0) {
+        uint64_t bufferSize = numLostBytes;
+        if (numLostBytes > mMaxBytesPerCallback) {
+            numLostBytes -= mMaxBytesPerCallback;
+            bufferSize = mMaxBytesPerCallback;
+        } else {
+            numLostBytes = 0;
+        }
+        const int64_t timestampUs =
+                ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
+                mRecord->getSampleRate();
+        RawBuffer emptyBuffer{timeUs, timestampUs, static_cast<int32_t>(bufferSize)};
+        memset(emptyBuffer.mData.get(), 0, bufferSize);
+        mNumFramesLost += bufferSize / frameSize;
+        mNumFramesReceived += bufferSize / frameSize;
+        tmpQueue.push_back(std::move(emptyBuffer));
+    }
+
+    if (buffer.size() == 0) {
+        ALOGW("Nothing is available from AudioRecord callback buffer");
+    } else {
+        const size_t bufferSize = buffer.size();
+        const int64_t timestampUs =
+                ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
+                mRecord->getSampleRate();
+        RawBuffer audioBuffer{timeUs, timestampUs, static_cast<int32_t>(bufferSize)};
+        memcpy(audioBuffer.mData.get(), buffer.data(), bufferSize);
+        mNumFramesReceived += bufferSize / frameSize;
+        tmpQueue.push_back(std::move(audioBuffer));
+    }
+
+    if (tmpQueue.size() > 0) {
+        std::unique_lock<std::mutex> lock{mMutex};
+        for (auto it = tmpQueue.begin(); it != tmpQueue.end(); it++)
+            mBuffersReceived.push_back(std::move(*it));
+        mCondition.notify_all();
+    }
+    return buffer.size();
+}
+
+void AudioCapture::onOverrun() {
+    ALOGV("received event overrun");
+    mBufferOverrun = true;
+}
+
+void AudioCapture::onMarker(uint32_t markerPosition) {
+    ALOGV("received Callback at position %d", markerPosition);
+    mReceivedCbMarkerAtPosition = markerPosition;
+}
+
+void AudioCapture::onNewPos(uint32_t markerPosition) {
+    ALOGV("received Callback at position %d", markerPosition);
+    mReceivedCbMarkerCount++;
+}
+
+void AudioCapture::onNewIAudioRecord() {
+    ALOGV("IAudioRecord is re-created");
+}
+
+AudioCapture::AudioCapture(audio_source_t inputSource, uint32_t sampleRate, audio_format_t format,
+                           audio_channel_mask_t channelMask, audio_input_flags_t flags,
+                           audio_session_t sessionId, AudioRecord::transfer_type transferType,
+                           const audio_attributes_t* attributes)
+    : mInputSource(inputSource),
+      mSampleRate(sampleRate),
+      mFormat(format),
+      mChannelMask(channelMask),
+      mFlags(flags),
+      mSessionId(sessionId),
+      mTransferType(transferType),
+      mAttributes(attributes) {
+    mFrameCount = 0;
+    mNotificationFrames = 0;
+    mNumFramesToRecord = 0;
+    mNumFramesReceived = 0;
+    mNumFramesLost = 0;
+    mBufferOverrun = false;
+    mMarkerPosition = 0;
+    mMarkerPeriod = 0;
+    mReceivedCbMarkerAtPosition = -1;
+    mReceivedCbMarkerCount = 0;
+    mState = REC_NO_INIT;
+    mStopRecording = false;
+}
+
+AudioCapture::~AudioCapture() {
+    if (mOutFileFd > 0) close(mOutFileFd);
+    stop();
+}
+
+status_t AudioCapture::create() {
+    if (mState != REC_NO_INIT) return INVALID_OPERATION;
+    // get Min Frame Count
+    size_t minFrameCount;
+    status_t status =
+            AudioRecord::getMinFrameCount(&minFrameCount, mSampleRate, mFormat, mChannelMask);
+    if (NO_ERROR != status) return status;
+    // Limit notificationFrames basing on client bufferSize
+    const int samplesPerFrame = audio_channel_count_from_in_mask(mChannelMask);
+    const int bytesPerSample = audio_bytes_per_sample(mFormat);
+    mNotificationFrames = mMaxBytesPerCallback / (samplesPerFrame * bytesPerSample);
+    // select frameCount to be at least minFrameCount
+    mFrameCount = 2 * mNotificationFrames;
+    while (mFrameCount < minFrameCount) {
+        mFrameCount += mNotificationFrames;
+    }
+    if (mFlags & AUDIO_INPUT_FLAG_FAST) {
+        ALOGW("Overriding all previous computations");
+        mFrameCount = 0;
+        mNotificationFrames = 0;
+    }
+    mNumFramesToRecord = (mSampleRate * 0.25);  // record .25 sec
+    std::string packageName{"AudioCapture"};
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = packageName;
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    if (mTransferType == AudioRecord::TRANSFER_OBTAIN) {
+        if (mSampleRate == 48000) {  // test all available constructors
+            mRecord = new AudioRecord(mInputSource, mSampleRate, mFormat, mChannelMask,
+                                      attributionSource, mFrameCount, nullptr /* callback */,
+                                      mNotificationFrames, mSessionId, mTransferType, mFlags,
+                                      mAttributes);
+        } else {
+            mRecord = new AudioRecord(attributionSource);
+            status = mRecord->set(mInputSource, mSampleRate, mFormat, mChannelMask, mFrameCount,
+                                  nullptr /* callback */, 0 /* notificationFrames */,
+                                  false /* canCallJava */, mSessionId, mTransferType, mFlags,
+                                  attributionSource.uid, attributionSource.pid, mAttributes);
+        }
+        if (NO_ERROR != status) return status;
+    } else if (mTransferType == AudioRecord::TRANSFER_CALLBACK) {
+        mRecord = new AudioRecord(mInputSource, mSampleRate, mFormat, mChannelMask,
+                                  attributionSource, mFrameCount, this, mNotificationFrames,
+                                  mSessionId, mTransferType, mFlags, mAttributes);
+    } else {
+        ALOGE("Test application is not handling transfer type %s",
+              AudioRecord::convertTransferToText(mTransferType));
+        return NO_INIT;
+    }
+    mRecord->setCallerName(packageName);
+    status = mRecord->initCheck();
+    if (NO_ERROR == status) mState = REC_READY;
+    if (mFlags & AUDIO_INPUT_FLAG_FAST) {
+        mFrameCount = mRecord->frameCount();
+        mNotificationFrames = mRecord->getNotificationPeriodInFrames();
+        mMaxBytesPerCallback = mNotificationFrames * samplesPerFrame * bytesPerSample;
+    }
+    return status;
+}
+
+status_t AudioCapture::setRecordDuration(float durationInSec) {
+    if (REC_READY != mState) {
+        return INVALID_OPERATION;
+    }
+    uint32_t sampleRate = mSampleRate == 0 ? mRecord->getSampleRate() : mSampleRate;
+    mNumFramesToRecord = (sampleRate * durationInSec);
+    return OK;
+}
+
+status_t AudioCapture::enableRecordDump() {
+    if (mOutFileFd != -1) {
+        return INVALID_OPERATION;
+    }
+    TemporaryFile tf("/data/local/tmp");
+    tf.DoNotRemove();
+    mOutFileFd = tf.release();
+    mFileName = std::string{tf.path};
+    return OK;
+}
+
+sp<AudioRecord> AudioCapture::getAudioRecordHandle() {
+    return (REC_NO_INIT == mState) ? nullptr : mRecord;
+}
+
+status_t AudioCapture::start(AudioSystem::sync_event_t event, audio_session_t triggerSession) {
+    status_t status;
+    if (REC_READY != mState) {
+        return INVALID_OPERATION;
+    } else {
+        status = mRecord->start(event, triggerSession);
+        if (OK == status) {
+            mState = REC_STARTED;
+            LOG_FATAL_IF(false != mRecord->stopped());
+        }
+    }
+    return status;
+}
+
+status_t AudioCapture::stop() {
+    status_t status = OK;
+    mStopRecording = true;
+    if (mState != REC_STOPPED && mState != REC_NO_INIT) {
+        if (mInputSource != AUDIO_SOURCE_DEFAULT) {
+            bool state = false;
+            status = AudioSystem::isSourceActive(mInputSource, &state);
+            if (status == OK && !state) status = BAD_VALUE;
+        }
+        mRecord->stopAndJoinCallbacks();
+        mState = REC_STOPPED;
+        LOG_FATAL_IF(true != mRecord->stopped());
+    }
+    return status;
+}
+
+status_t AudioCapture::obtainBuffer(RawBuffer& buffer) {
+    if (REC_STARTED != mState) return INVALID_OPERATION;
+    const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
+    int counter = 0;
+    size_t nonContig = 0;
+    while (mNumFramesReceived < mNumFramesToRecord) {
+        AudioRecord::Buffer recordBuffer;
+        recordBuffer.frameCount = mNotificationFrames;
+        status_t status = mRecord->obtainBuffer(&recordBuffer, 1, &nonContig);
+        if (OK == status) {
+            const int64_t timestampUs =
+                    ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
+                    mRecord->getSampleRate();
+            RawBuffer buff{-1, timestampUs, static_cast<int32_t>(recordBuffer.size())};
+            memcpy(buff.mData.get(), recordBuffer.data(), recordBuffer.size());
+            buffer = std::move(buff);
+            mNumFramesReceived += recordBuffer.size() / mRecord->frameSize();
+            mRecord->releaseBuffer(&recordBuffer);
+            counter = 0;
+        } else if (WOULD_BLOCK == status) {
+            // if not received a buffer for MAX_WAIT_TIME_MS, something has gone wrong
+            if (counter == maxTries) return TIMED_OUT;
+            counter++;
+        }
+    }
+    return OK;
+}
+
+status_t AudioCapture::obtainBufferCb(RawBuffer& buffer) {
+    if (REC_STARTED != mState) return INVALID_OPERATION;
+    const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
+    int counter = 0;
+    std::unique_lock<std::mutex> lock{mMutex};
+    while (mBuffersReceived.empty() && !mStopRecording && counter < maxTries) {
+        mCondition.wait_for(lock, std::chrono::milliseconds(WAIT_PERIOD_MS));
+        counter++;
+    }
+    if (!mBuffersReceived.empty()) {
+        auto it = mBuffersReceived.begin();
+        buffer = std::move(*it);
+        mBuffersReceived.erase(it);
+    } else {
+        if (!mStopRecording && counter == maxTries) return TIMED_OUT;
+    }
+    return OK;
+}
+
+status_t AudioCapture::audioProcess() {
+    RawBuffer buffer;
+    status_t status = OK;
+    while (mNumFramesReceived < mNumFramesToRecord && status == OK) {
+        if (mTransferType == AudioRecord::TRANSFER_CALLBACK)
+            status = obtainBufferCb(buffer);
+        else
+            status = obtainBuffer(buffer);
+        if (OK == status && mOutFileFd > 0) {
+            const char* ptr = static_cast<const char*>(static_cast<void*>(buffer.mData.get()));
+            write(mOutFileFd, ptr, buffer.mCapacity);
+        }
+    }
+    return OK;
+}
+
+status_t listAudioPorts(std::vector<audio_port_v7>& portsVec) {
+    int attempts = 5;
+    status_t status;
+    unsigned int generation1, generation;
+    unsigned int numPorts = 0;
+    do {
+        if (attempts-- < 0) {
+            status = TIMED_OUT;
+            break;
+        }
+        status = AudioSystem::listAudioPorts(AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_NONE, &numPorts,
+                                             nullptr, &generation1);
+        if (status != NO_ERROR) {
+            ALOGE("AudioSystem::listAudioPorts returned error %d", status);
+            break;
+        }
+        portsVec.resize(numPorts);
+        status = AudioSystem::listAudioPorts(AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_NONE, &numPorts,
+                                             portsVec.data(), &generation);
+    } while (generation1 != generation && status == NO_ERROR);
+    if (status != NO_ERROR) {
+        numPorts = 0;
+        portsVec.clear();
+    }
+    return status;
+}
+
+status_t getPortById(const audio_port_handle_t portId, audio_port_v7& port) {
+    std::vector<struct audio_port_v7> ports;
+    status_t status = listAudioPorts(ports);
+    if (status != OK) return status;
+    for (auto i = 0; i < ports.size(); i++) {
+        if (ports[i].id == portId) {
+            port = ports[i];
+            return OK;
+        }
+    }
+    return BAD_VALUE;
+}
+
+status_t getPortByAttributes(audio_port_role_t role, audio_port_type_t type,
+                             audio_devices_t deviceType, const std::string& address,
+                             audio_port_v7& port) {
+    std::vector<struct audio_port_v7> ports;
+    status_t status = listAudioPorts(ports);
+    if (status != OK) return status;
+    for (auto i = 0; i < ports.size(); i++) {
+        if (ports[i].role == role && ports[i].type == type &&
+            ports[i].ext.device.type == deviceType &&
+            !strncmp(ports[i].ext.device.address, address.c_str(), AUDIO_DEVICE_MAX_ADDRESS_LEN)) {
+            port = ports[i];
+            return OK;
+        }
+    }
+    return BAD_VALUE;
+}
+
+status_t listAudioPatches(std::vector<struct audio_patch>& patchesVec) {
+    int attempts = 5;
+    status_t status;
+    unsigned int generation1, generation;
+    unsigned int numPatches = 0;
+    do {
+        if (attempts-- < 0) {
+            status = TIMED_OUT;
+            break;
+        }
+        status = AudioSystem::listAudioPatches(&numPatches, nullptr, &generation1);
+        if (status != NO_ERROR) {
+            ALOGE("AudioSystem::listAudioPatches returned error %d", status);
+            break;
+        }
+        patchesVec.resize(numPatches);
+        status = AudioSystem::listAudioPatches(&numPatches, patchesVec.data(), &generation);
+    } while (generation1 != generation && status == NO_ERROR);
+    if (status != NO_ERROR) {
+        numPatches = 0;
+        patchesVec.clear();
+    }
+    return status;
+}
+
+status_t getPatchForOutputMix(audio_io_handle_t audioIo, audio_patch& patch) {
+    std::vector<struct audio_patch> patches;
+    status_t status = listAudioPatches(patches);
+    if (status != OK) return status;
+
+    for (auto i = 0; i < patches.size(); i++) {
+        for (auto j = 0; j < patches[i].num_sources; j++) {
+            if (patches[i].sources[j].type == AUDIO_PORT_TYPE_MIX &&
+                patches[i].sources[j].ext.mix.handle == audioIo) {
+                patch = patches[i];
+                return OK;
+            }
+        }
+    }
+    return BAD_VALUE;
+}
+
+status_t getPatchForInputMix(audio_io_handle_t audioIo, audio_patch& patch) {
+    std::vector<struct audio_patch> patches;
+    status_t status = listAudioPatches(patches);
+    if (status != OK) return status;
+
+    for (auto i = 0; i < patches.size(); i++) {
+        for (auto j = 0; j < patches[i].num_sinks; j++) {
+            if (patches[i].sinks[j].type == AUDIO_PORT_TYPE_MIX &&
+                patches[i].sinks[j].ext.mix.handle == audioIo) {
+                patch = patches[i];
+                return OK;
+            }
+        }
+    }
+    return BAD_VALUE;
+}
+
+bool patchContainsOutputDevice(audio_port_handle_t deviceId, audio_patch patch) {
+    for (auto j = 0; j < patch.num_sinks; j++) {
+        if (patch.sinks[j].type == AUDIO_PORT_TYPE_DEVICE && patch.sinks[j].id == deviceId) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool patchContainsInputDevice(audio_port_handle_t deviceId, audio_patch patch) {
+    for (auto j = 0; j < patch.num_sources; j++) {
+        if (patch.sources[j].type == AUDIO_PORT_TYPE_DEVICE && patch.sources[j].id == deviceId) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool checkPatchPlayback(audio_io_handle_t audioIo, audio_port_handle_t deviceId) {
+    struct audio_patch patch;
+    if (getPatchForOutputMix(audioIo, patch) == OK) {
+        return patchContainsOutputDevice(deviceId, patch);
+    }
+    return false;
+}
+
+bool checkPatchCapture(audio_io_handle_t audioIo, audio_port_handle_t deviceId) {
+    struct audio_patch patch;
+    if (getPatchForInputMix(audioIo, patch) == OK) {
+        return patchContainsInputDevice(deviceId, patch);
+    }
+    return false;
+}
+
+std::string dumpPortConfig(const audio_port_config& port) {
+    std::ostringstream result;
+    std::string deviceInfo;
+    if (port.type == AUDIO_PORT_TYPE_DEVICE) {
+        if (port.ext.device.type & AUDIO_DEVICE_BIT_IN) {
+            InputDeviceConverter::maskToString(port.ext.device.type, deviceInfo);
+        } else {
+            OutputDeviceConverter::maskToString(port.ext.device.type, deviceInfo);
+        }
+        deviceInfo += std::string(", address = ") + port.ext.device.address;
+    }
+    result << "audio_port_handle_t = " << port.id << ", "
+           << "Role = " << (port.role == AUDIO_PORT_ROLE_SOURCE ? "source" : "sink") << ", "
+           << "Type = " << (port.type == AUDIO_PORT_TYPE_DEVICE ? "device" : "mix") << ", "
+           << "deviceInfo = " << (port.type == AUDIO_PORT_TYPE_DEVICE ? deviceInfo : "") << ", "
+           << "config_mask = 0x" << std::hex << port.config_mask << std::dec << ", ";
+    if (port.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
+        result << "sample rate = " << port.sample_rate << ", ";
+    }
+    if (port.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
+        result << "channel mask = " << port.channel_mask << ", ";
+    }
+    if (port.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
+        result << "format = " << port.format << ", ";
+    }
+    result << "input flags = " << port.flags.input << ", ";
+    result << "output flags = " << port.flags.output << ", ";
+    result << "mix io handle = " << (port.type == AUDIO_PORT_TYPE_DEVICE ? 0 : port.ext.mix.handle)
+           << "\n";
+    return result.str();
+}
+
+std::string dumpPatch(const audio_patch& patch) {
+    std::ostringstream result;
+    result << "----------------- Dumping Patch ------------ \n";
+    result << "Patch Handle: " << patch.id << ", sources: " << patch.num_sources
+           << ", sink: " << patch.num_sinks << "\n";
+    audio_port_v7 port;
+    for (uint32_t i = 0; i < patch.num_sources; i++) {
+        result << "----------------- Dumping Source Port Config @ index " << i
+               << " ------------ \n";
+        result << dumpPortConfig(patch.sources[i]);
+        result << "----------------- Dumping Source Port for id " << patch.sources[i].id
+               << " ------------ \n";
+        getPortById(patch.sources[i].id, port);
+        result << dumpPort(port);
+    }
+    for (uint32_t i = 0; i < patch.num_sinks; i++) {
+        result << "----------------- Dumping Sink Port Config @ index " << i << " ------------ \n";
+        result << dumpPortConfig(patch.sinks[i]);
+        result << "----------------- Dumping Sink Port for id " << patch.sinks[i].id
+               << " ------------ \n";
+        getPortById(patch.sinks[i].id, port);
+        result << dumpPort(port);
+    }
+    return result.str();
+}
+
+std::string dumpPort(const audio_port_v7& port) {
+    std::ostringstream result;
+    std::string deviceInfo;
+    if (port.type == AUDIO_PORT_TYPE_DEVICE) {
+        if (port.ext.device.type & AUDIO_DEVICE_BIT_IN) {
+            InputDeviceConverter::maskToString(port.ext.device.type, deviceInfo);
+        } else {
+            OutputDeviceConverter::maskToString(port.ext.device.type, deviceInfo);
+        }
+        deviceInfo += std::string(", address = ") + port.ext.device.address;
+    }
+    result << "audio_port_handle_t = " << port.id << ", "
+           << "Role = " << (port.role == AUDIO_PORT_ROLE_SOURCE ? "source" : "sink") << ", "
+           << "Type = " << (port.type == AUDIO_PORT_TYPE_DEVICE ? "device" : "mix") << ", "
+           << "deviceInfo = " << (port.type == AUDIO_PORT_TYPE_DEVICE ? deviceInfo : "") << ", "
+           << "Name = " << port.name << ", "
+           << "num profiles = " << port.num_audio_profiles << ", "
+           << "mix io handle = " << (port.type == AUDIO_PORT_TYPE_DEVICE ? 0 : port.ext.mix.handle)
+           << ", ";
+    for (int i = 0; i < port.num_audio_profiles; i++) {
+        result << "AudioProfile = " << i << " {";
+        result << "format = " << port.audio_profiles[i].format << ", ";
+        result << "samplerates = ";
+        for (int j = 0; j < port.audio_profiles[i].num_sample_rates; j++) {
+            result << port.audio_profiles[i].sample_rates[j] << ", ";
+        }
+        result << "channelmasks = ";
+        for (int j = 0; j < port.audio_profiles[i].num_channel_masks; j++) {
+            result << "0x" << std::hex << port.audio_profiles[i].channel_masks[j] << std::dec
+                   << ", ";
+        }
+        result << "} ";
+    }
+    result << dumpPortConfig(port.active_config);
+    return result.str();
+}
+
+std::string getXmlAttribute(const xmlNode* cur, const char* attribute) {
+    auto charPtr = make_xmlUnique(xmlGetProp(cur, reinterpret_cast<const xmlChar*>(attribute)));
+    if (charPtr == NULL) {
+        return "";
+    }
+    std::string value(reinterpret_cast<const char*>(charPtr.get()));
+    return value;
+}
+
+status_t parse_audio_policy_configuration_xml(std::vector<std::string>& attachedDevices,
+                                              std::vector<MixPort>& mixPorts,
+                                              std::vector<Route>& routes) {
+    std::string path = audio_find_readable_configuration_file("audio_policy_configuration.xml");
+    if (path.length() == 0) return UNKNOWN_ERROR;
+    auto doc = make_xmlUnique(xmlParseFile(path.c_str()));
+    if (doc == nullptr) return UNKNOWN_ERROR;
+    xmlNode* root = xmlDocGetRootElement(doc.get());
+    if (root == nullptr) return UNKNOWN_ERROR;
+    if (xmlXIncludeProcess(doc.get()) < 0) return UNKNOWN_ERROR;
+    mixPorts.clear();
+    if (!xmlStrcmp(root->name, reinterpret_cast<const xmlChar*>("audioPolicyConfiguration"))) {
+        std::string raw{getXmlAttribute(root, "version")};
+        for (auto* child = root->xmlChildrenNode; child != nullptr; child = child->next) {
+            if (!xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>("modules"))) {
+                xmlNode* root = child;
+                for (auto* child = root->xmlChildrenNode; child != nullptr; child = child->next) {
+                    if (!xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>("module"))) {
+                        xmlNode* root = child;
+                        for (auto* child = root->xmlChildrenNode; child != nullptr;
+                             child = child->next) {
+                            if (!xmlStrcmp(child->name,
+                                           reinterpret_cast<const xmlChar*>("mixPorts"))) {
+                                xmlNode* root = child;
+                                for (auto* child = root->xmlChildrenNode; child != nullptr;
+                                     child = child->next) {
+                                    if (!xmlStrcmp(child->name,
+                                                   reinterpret_cast<const xmlChar*>("mixPort"))) {
+                                        MixPort mixPort;
+                                        xmlNode* root = child;
+                                        mixPort.name = getXmlAttribute(root, "name");
+                                        mixPort.role = getXmlAttribute(root, "role");
+                                        mixPort.flags = getXmlAttribute(root, "flags");
+                                        if (mixPort.role == "source") mixPorts.push_back(mixPort);
+                                    }
+                                }
+                            } else if (!xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>(
+                                                                       "attachedDevices"))) {
+                                xmlNode* root = child;
+                                for (auto* child = root->xmlChildrenNode; child != nullptr;
+                                     child = child->next) {
+                                    if (!xmlStrcmp(child->name,
+                                                   reinterpret_cast<const xmlChar*>("item"))) {
+                                        auto xmlValue = make_xmlUnique(xmlNodeListGetString(
+                                                child->doc, child->xmlChildrenNode, 1));
+                                        if (xmlValue == nullptr) {
+                                            raw = "";
+                                        } else {
+                                            raw = reinterpret_cast<const char*>(xmlValue.get());
+                                        }
+                                        std::string& value = raw;
+                                        attachedDevices.push_back(std::move(value));
+                                    }
+                                }
+                            } else if (!xmlStrcmp(child->name,
+                                                  reinterpret_cast<const xmlChar*>("routes"))) {
+                                xmlNode* root = child;
+                                for (auto* child = root->xmlChildrenNode; child != nullptr;
+                                     child = child->next) {
+                                    if (!xmlStrcmp(child->name,
+                                                   reinterpret_cast<const xmlChar*>("route"))) {
+                                        Route route;
+                                        xmlNode* root = child;
+                                        route.name = getXmlAttribute(root, "name");
+                                        route.sources = getXmlAttribute(root, "sources");
+                                        route.sink = getXmlAttribute(root, "sink");
+                                        routes.push_back(route);
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+    return OK;
+}
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
new file mode 100644
index 0000000..90c30c2
--- /dev/null
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -0,0 +1,209 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AUDIO_TEST_UTILS_H_
+#define AUDIO_TEST_UTILS_H_
+
+#include <sys/stat.h>
+#include <unistd.h>
+#include <atomic>
+#include <chrono>
+#include <cinttypes>
+#include <deque>
+#include <memory>
+#include <mutex>
+#include <thread>
+
+#include <binder/MemoryDealer.h>
+#include <libxml/parser.h>
+#include <libxml/xinclude.h>
+#include <media/AidlConversion.h>
+#include <media/AudioRecord.h>
+#include <media/AudioTrack.h>
+
+using namespace android;
+
+struct MixPort {
+    std::string name;
+    std::string role;
+    std::string flags;
+};
+
+struct Route {
+    std::string name;
+    std::string sources;
+    std::string sink;
+};
+
+status_t parse_audio_policy_configuration_xml(std::vector<std::string>& attachedDevices,
+                                              std::vector<MixPort>& mixPorts,
+                                              std::vector<Route>& routes);
+status_t listAudioPorts(std::vector<audio_port_v7>& portsVec);
+status_t listAudioPatches(std::vector<struct audio_patch>& patchesVec);
+status_t getPortByAttributes(audio_port_role_t role, audio_port_type_t type,
+                             audio_devices_t deviceType, const std::string& address,
+                             audio_port_v7& port);
+status_t getPatchForOutputMix(audio_io_handle_t audioIo, audio_patch& patch);
+status_t getPatchForInputMix(audio_io_handle_t audioIo, audio_patch& patch);
+bool patchContainsOutputDevice(audio_port_handle_t deviceId, audio_patch patch);
+bool patchContainsInputDevice(audio_port_handle_t deviceId, audio_patch patch);
+bool checkPatchPlayback(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+bool checkPatchCapture(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+std::string dumpPort(const audio_port_v7& port);
+std::string dumpPortConfig(const audio_port_config& port);
+std::string dumpPatch(const audio_patch& patch);
+
+class OnAudioDeviceUpdateNotifier : public AudioSystem::AudioDeviceCallback {
+  public:
+    audio_io_handle_t mAudioIo = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mDeviceId = AUDIO_PORT_HANDLE_NONE;
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+    status_t waitForAudioDeviceCb(audio_port_handle_t expDeviceId = AUDIO_PORT_HANDLE_NONE);
+};
+
+// Simple AudioPlayback class.
+class AudioPlayback : public AudioTrack::IAudioTrackCallback {
+    friend sp<AudioPlayback>;
+    AudioPlayback(uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask,
+                  audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
+                  audio_session_t sessionId = AUDIO_SESSION_NONE,
+                  AudioTrack::transfer_type transferType = AudioTrack::TRANSFER_SHARED,
+                  audio_attributes_t* attributes = nullptr, audio_offload_info_t* info = nullptr);
+
+  public:
+    status_t loadResource(const char* name);
+    status_t create();
+    sp<AudioTrack> getAudioTrackHandle();
+    status_t start();
+    status_t waitForConsumption(bool testSeek = false);
+    status_t fillBuffer();
+    status_t onProcess(bool testSeek = false);
+    virtual void onBufferEnd() override;
+    void stop();
+
+    bool mStopPlaying;
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+
+    enum State {
+        PLAY_NO_INIT,
+        PLAY_READY,
+        PLAY_STARTED,
+        PLAY_STOPPED,
+    };
+
+  private:
+    ~AudioPlayback();
+    const uint32_t mSampleRate;
+    const audio_format_t mFormat;
+    const audio_channel_mask_t mChannelMask;
+    const audio_output_flags_t mFlags;
+    const audio_session_t mSessionId;
+    const AudioTrack::transfer_type mTransferType;
+    const audio_attributes_t* mAttributes;
+    const audio_offload_info_t* mOffloadInfo;
+
+    size_t mBytesUsedSoFar;
+    State mState;
+    size_t mMemCapacity;
+    sp<MemoryDealer> mMemoryDealer;
+    sp<IMemory> mMemory;
+
+    sp<AudioTrack> mTrack;
+};
+
+// hold pcm data sent by AudioRecord
+class RawBuffer {
+  public:
+    RawBuffer(int64_t ptsPipeline = -1, int64_t ptsManual = -1, int32_t capacity = 0);
+
+    std::unique_ptr<uint8_t[]> mData;
+    int64_t mPtsPipeline;
+    int64_t mPtsManual;
+    int32_t mCapacity;
+};
+
+// Simple AudioCapture
+class AudioCapture : public AudioRecord::IAudioRecordCallback {
+  public:
+    AudioCapture(audio_source_t inputSource, uint32_t sampleRate, audio_format_t format,
+                 audio_channel_mask_t channelMask,
+                 audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
+                 audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
+                 AudioRecord::transfer_type transferType = AudioRecord::TRANSFER_CALLBACK,
+                 const audio_attributes_t* attributes = nullptr);
+    ~AudioCapture();
+    size_t onMoreData(const AudioRecord::Buffer& buffer) override;
+    void onOverrun() override;
+    void onMarker(uint32_t markerPosition) override;
+    void onNewPos(uint32_t newPos) override;
+    void onNewIAudioRecord() override;
+    status_t create();
+    status_t setRecordDuration(float durationInSec);
+    status_t enableRecordDump();
+    std::string getRecordDumpFileName() const { return mFileName; }
+    sp<AudioRecord> getAudioRecordHandle();
+    status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE,
+                   audio_session_t triggerSession = AUDIO_SESSION_NONE);
+    status_t obtainBufferCb(RawBuffer& buffer);
+    status_t obtainBuffer(RawBuffer& buffer);
+    status_t audioProcess();
+    status_t stop();
+
+    uint32_t mFrameCount;
+    uint32_t mNotificationFrames;
+    int64_t mNumFramesToRecord;
+    int64_t mNumFramesReceived;
+    int64_t mNumFramesLost;
+    uint32_t mMarkerPosition;
+    uint32_t mMarkerPeriod;
+    uint32_t mReceivedCbMarkerAtPosition;
+    uint32_t mReceivedCbMarkerCount;
+    bool mBufferOverrun;
+
+    enum State {
+        REC_NO_INIT,
+        REC_READY,
+        REC_STARTED,
+        REC_STOPPED,
+    };
+
+  private:
+    const audio_source_t mInputSource;
+    const uint32_t mSampleRate;
+    const audio_format_t mFormat;
+    const audio_channel_mask_t mChannelMask;
+    const audio_input_flags_t mFlags;
+    const audio_session_t mSessionId;
+    const AudioRecord::transfer_type mTransferType;
+    const audio_attributes_t* mAttributes;
+
+    size_t mMaxBytesPerCallback = 2048;
+    sp<AudioRecord> mRecord;
+    State mState;
+    bool mStopRecording;
+    std::string mFileName;
+    int mOutFileFd = -1;
+
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+    std::deque<RawBuffer> mBuffersReceived;
+};
+
+#endif  // AUDIO_TEST_UTILS_H_
diff --git a/media/libaudioclient/tests/audioclient_serialization_tests.cpp b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
new file mode 100644
index 0000000..d1e3d16
--- /dev/null
+++ b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
@@ -0,0 +1,312 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioClientSerializationUnitTests"
+
+#include <cstdint>
+#include <cstdlib>
+#include <ctime>
+
+#include <gtest/gtest.h>
+
+#include <android_audio_policy_configuration_V7_0-enums.h>
+#include <xsdc/XsdcSupport.h>
+
+#include "audio_test_utils.h"
+
+using namespace android;
+namespace xsd {
+using namespace ::android::audio::policy::configuration::V7_0;
+}
+
+template <typename T, typename X, typename FUNC>
+std::vector<T> getFlags(const xsdc_enum_range<X>& range, const FUNC& func,
+                        const std::string& findString = {}) {
+    std::vector<T> vec;
+    for (const auto& xsdEnumVal : range) {
+        T enumVal;
+        std::string enumString = toString(xsdEnumVal);
+        if (enumString.find(findString) != std::string::npos &&
+            func(enumString.c_str(), &enumVal)) {
+            vec.push_back(enumVal);
+        }
+    }
+    return vec;
+}
+
+static const std::vector<audio_usage_t> kUsages =
+        getFlags<audio_usage_t, xsd::AudioUsage, decltype(audio_usage_from_string)>(
+                xsdc_enum_range<xsd::AudioUsage>{}, audio_usage_from_string);
+
+static const std::vector<audio_content_type_t> kContentType =
+        getFlags<audio_content_type_t, xsd::AudioContentType,
+                 decltype(audio_content_type_from_string)>(xsdc_enum_range<xsd::AudioContentType>{},
+                                                           audio_content_type_from_string);
+
+static const std::vector<audio_source_t> kInputSources =
+        getFlags<audio_source_t, xsd::AudioSource, decltype(audio_source_from_string)>(
+                xsdc_enum_range<xsd::AudioSource>{}, audio_source_from_string);
+
+static const std::vector<audio_stream_type_t> kStreamtypes =
+        getFlags<audio_stream_type_t, xsd::AudioStreamType,
+                 decltype(audio_stream_type_from_string)>(xsdc_enum_range<xsd::AudioStreamType>{},
+                                                          audio_stream_type_from_string);
+
+static const std::vector<uint32_t> kMixMatchRules = {RULE_MATCH_ATTRIBUTE_USAGE,
+                                                     RULE_EXCLUDE_ATTRIBUTE_USAGE,
+                                                     RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET,
+                                                     RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET,
+                                                     RULE_MATCH_UID,
+                                                     RULE_EXCLUDE_UID,
+                                                     RULE_MATCH_USERID,
+                                                     RULE_EXCLUDE_USERID,
+                                                     RULE_MATCH_AUDIO_SESSION_ID,
+                                                     RULE_EXCLUDE_AUDIO_SESSION_ID};
+
+// Generates a random string.
+std::string CreateRandomString(size_t n) {
+    std::string data =
+            "abcdefghijklmnopqrstuvwxyz"
+            "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+            "0123456789";
+    srand(static_cast<unsigned int>(time(0)));
+    std::string s(n, ' ');
+    for (size_t i = 0; i < n; ++i) {
+        s[i] = data[rand() % data.size()];
+    }
+    return s;
+}
+
+class FillAudioAttributes {
+  public:
+    void fillAudioAttributes(audio_attributes_t& attr);
+
+    unsigned int mSeed;
+};
+
+void FillAudioAttributes::fillAudioAttributes(audio_attributes_t& attr) {
+    attr.content_type = kContentType[rand() % kContentType.size()];
+    attr.usage = kUsages[rand() % kUsages.size()];
+    attr.source = kInputSources[rand() % kInputSources.size()];
+    // attr.flags -> [0, (1 << (CAPTURE_PRIVATE + 1) - 1)]
+    attr.flags = static_cast<audio_flags_mask_t>(rand() & 0x3fff);
+    sprintf(attr.tags, "%s",
+            CreateRandomString((int)rand() % (AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1)).c_str());
+}
+
+class SerializationTest : public FillAudioAttributes, public ::testing::Test {
+    void SetUp() override {
+        mSeed = static_cast<unsigned int>(time(0));
+        srand(mSeed);
+    }
+};
+
+// UNIT TESTS
+TEST_F(SerializationTest, AudioProductStrategyBinderization) {
+    for (int j = 0; j < 512; j++) {
+        const std::string name{"Test APSBinderization for seed::" + std::to_string(mSeed)};
+        std::vector<VolumeGroupAttributes> volumeGroupAttrVector;
+        for (auto i = 0; i < 16; i++) {
+            audio_attributes_t attributes;
+            fillAudioAttributes(attributes);
+            VolumeGroupAttributes volumeGroupAttr{static_cast<volume_group_t>(rand()),
+                                                  kStreamtypes[rand() % kStreamtypes.size()],
+                                                  attributes};
+            volumeGroupAttrVector.push_back(volumeGroupAttr);
+        }
+        product_strategy_t psId = static_cast<product_strategy_t>(rand());
+        AudioProductStrategy aps{name, volumeGroupAttrVector, psId};
+
+        Parcel p;
+        EXPECT_EQ(NO_ERROR, aps.writeToParcel(&p)) << name;
+
+        AudioProductStrategy apsCopy;
+        p.setDataPosition(0);
+        EXPECT_EQ(NO_ERROR, apsCopy.readFromParcel(&p)) << name;
+        EXPECT_EQ(apsCopy.getName(), name) << name;
+        EXPECT_EQ(apsCopy.getId(), psId) << name;
+        auto avec = apsCopy.getVolumeGroupAttributes();
+        EXPECT_EQ(avec.size(), volumeGroupAttrVector.size()) << name;
+        for (int i = 0; i < volumeGroupAttrVector.size(); i++) {
+            EXPECT_EQ(avec[i].getGroupId(), volumeGroupAttrVector[i].getGroupId()) << name;
+            EXPECT_EQ(avec[i].getStreamType(), volumeGroupAttrVector[i].getStreamType()) << name;
+            EXPECT_TRUE(avec[i].getAttributes() == volumeGroupAttrVector[i].getAttributes())
+                    << name;
+        }
+    }
+}
+
+TEST_F(SerializationTest, AudioVolumeGroupBinderization) {
+    for (int j = 0; j < 512; j++) {
+        const std::string name{"Test AVGBinderization for seed::" + std::to_string(mSeed)};
+        volume_group_t groupId = static_cast<volume_group_t>(rand());
+        std::vector<audio_attributes_t> attributesvector;
+        for (auto i = 0; i < 16; i++) {
+            audio_attributes_t attributes;
+            fillAudioAttributes(attributes);
+            attributesvector.push_back(attributes);
+        }
+        std::vector<audio_stream_type_t> streamsvector;
+        for (auto i = 0; i < 8; i++) {
+            streamsvector.push_back(kStreamtypes[rand() % kStreamtypes.size()]);
+        }
+        AudioVolumeGroup avg{name, groupId, attributesvector, streamsvector};
+
+        Parcel p;
+        EXPECT_EQ(NO_ERROR, avg.writeToParcel(&p));
+
+        AudioVolumeGroup avgCopy;
+        p.setDataPosition(0);
+        EXPECT_EQ(NO_ERROR, avgCopy.readFromParcel(&p)) << name;
+        EXPECT_EQ(avgCopy.getName(), name) << name;
+        EXPECT_EQ(avgCopy.getId(), groupId) << name;
+        auto avec = avgCopy.getAudioAttributes();
+        EXPECT_EQ(avec.size(), attributesvector.size()) << name;
+        for (int i = 0; i < avec.size(); i++) {
+            EXPECT_TRUE(avec[i] == attributesvector[i]) << name;
+        }
+        StreamTypeVector svec = avgCopy.getStreamTypes();
+        EXPECT_EQ(svec.size(), streamsvector.size()) << name;
+        for (int i = 0; i < svec.size(); i++) {
+            EXPECT_EQ(svec[i], streamsvector[i]) << name;
+        }
+    }
+}
+
+TEST_F(SerializationTest, AudioMixBinderization) {
+    for (int j = 0; j < 512; j++) {
+        const std::string msg{"Test AMBinderization for seed::" + std::to_string(mSeed)};
+        std::vector<AudioMixMatchCriterion> criteria;
+        criteria.reserve(16);
+        for (int i = 0; i < 16; i++) {
+            AudioMixMatchCriterion ammc{kUsages[rand() % kUsages.size()],
+                                        kInputSources[rand() % kInputSources.size()],
+                                        kMixMatchRules[rand() % kMixMatchRules.size()]};
+            criteria.push_back(ammc);
+        }
+        audio_config_t config{};
+        config.sample_rate = 48000;
+        config.channel_mask = AUDIO_CHANNEL_IN_MONO;
+        config.format = AUDIO_FORMAT_PCM_16_BIT;
+        config.offload_info = AUDIO_INFO_INITIALIZER;
+        config.frame_count = 4800;
+        AudioMix am{criteria,
+                    static_cast<uint32_t>(rand()),
+                    config,
+                    static_cast<uint32_t>(rand()),
+                    String8(msg.c_str()),
+                    static_cast<uint32_t>(rand())};
+
+        Parcel p;
+        EXPECT_EQ(NO_ERROR, am.writeToParcel(&p)) << msg;
+
+        AudioMix amCopy;
+        p.setDataPosition(0);
+        EXPECT_EQ(NO_ERROR, amCopy.readFromParcel(&p)) << msg;
+        EXPECT_EQ(amCopy.mMixType, am.mMixType) << msg;
+        EXPECT_EQ(amCopy.mFormat.sample_rate, am.mFormat.sample_rate) << msg;
+        EXPECT_EQ(amCopy.mFormat.channel_mask, am.mFormat.channel_mask) << msg;
+        EXPECT_EQ(amCopy.mFormat.format, am.mFormat.format) << msg;
+        EXPECT_EQ(amCopy.mRouteFlags, am.mRouteFlags) << msg;
+        EXPECT_EQ(amCopy.mDeviceAddress, am.mDeviceAddress) << msg;
+        EXPECT_EQ(amCopy.mCbFlags, am.mCbFlags) << msg;
+        EXPECT_EQ(amCopy.mCriteria.size(), am.mCriteria.size()) << msg;
+        for (auto i = 0; i < amCopy.mCriteria.size(); i++) {
+            EXPECT_EQ(amCopy.mCriteria[i].mRule, am.mCriteria[i].mRule) << msg;
+            EXPECT_EQ(amCopy.mCriteria[i].mValue.mUserId, am.mCriteria[i].mValue.mUserId) << msg;
+        }
+    }
+}
+
+using MMCTestParams = std::tuple<audio_usage_t, audio_source_t, uint32_t>;
+
+class MMCParameterizedTest : public FillAudioAttributes,
+                             public ::testing::TestWithParam<MMCTestParams> {
+  public:
+    MMCParameterizedTest()
+        : mAudioUsage(std::get<0>(GetParam())),
+          mAudioSource(std::get<1>(GetParam())),
+          mAudioMixMatchRules(std::get<2>(GetParam())){};
+
+    const audio_usage_t mAudioUsage;
+    const audio_source_t mAudioSource;
+    const uint32_t mAudioMixMatchRules;
+
+    void SetUp() override {
+        mSeed = static_cast<unsigned int>(time(0));
+        srand(mSeed);
+    }
+};
+
+TEST_P(MMCParameterizedTest, AudioMixMatchCriterionBinderization) {
+    const std::string msg{"Test AMMCBinderization for seed::" + std::to_string(mSeed)};
+    AudioMixMatchCriterion ammc{mAudioUsage, mAudioSource, mAudioMixMatchRules};
+
+    Parcel p;
+    EXPECT_EQ(NO_ERROR, ammc.writeToParcel(&p)) << msg;
+
+    AudioMixMatchCriterion ammcCopy;
+    p.setDataPosition(0);
+    EXPECT_EQ(NO_ERROR, ammcCopy.readFromParcel(&p)) << msg;
+    EXPECT_EQ(ammcCopy.mRule, ammc.mRule) << msg;
+    EXPECT_EQ(ammcCopy.mValue.mUserId, ammc.mValue.mUserId) << msg;
+}
+
+// audioUsage, audioSource, audioMixMatchRules
+INSTANTIATE_TEST_SUITE_P(SerializationParameterizedTests, MMCParameterizedTest,
+                         ::testing::Combine(testing::ValuesIn(kUsages),
+                                            testing::ValuesIn(kInputSources),
+                                            testing::ValuesIn(kMixMatchRules)));
+
+using AudioAttributesTestParams = std::tuple<audio_stream_type_t>;
+
+class AudioAttributesParameterizedTest
+    : public FillAudioAttributes,
+      public ::testing::TestWithParam<AudioAttributesTestParams> {
+  public:
+    AudioAttributesParameterizedTest() : mAudioStream(std::get<0>(GetParam())){};
+
+    const audio_stream_type_t mAudioStream;
+
+    void SetUp() override {
+        mSeed = static_cast<unsigned int>(time(0));
+        srand(mSeed);
+    }
+};
+
+TEST_P(AudioAttributesParameterizedTest, AudioAttributesBinderization) {
+    const std::string msg{"Test AABinderization for seed::" + std::to_string(mSeed)};
+    volume_group_t groupId = static_cast<volume_group_t>(rand());
+    audio_stream_type_t stream = mAudioStream;
+    audio_attributes_t attributes;
+    fillAudioAttributes(attributes);
+    VolumeGroupAttributes volumeGroupAttr{groupId, stream, attributes};
+
+    Parcel p;
+    EXPECT_EQ(NO_ERROR, volumeGroupAttr.writeToParcel(&p)) << msg;
+
+    VolumeGroupAttributes volumeGroupAttrCopy;
+    p.setDataPosition(0);
+    EXPECT_EQ(NO_ERROR, volumeGroupAttrCopy.readFromParcel(&p)) << msg;
+    EXPECT_EQ(volumeGroupAttrCopy.getGroupId(), volumeGroupAttr.getGroupId()) << msg;
+    EXPECT_EQ(volumeGroupAttrCopy.getStreamType(), volumeGroupAttr.getStreamType()) << msg;
+    EXPECT_TRUE(volumeGroupAttrCopy.getAttributes() == attributes) << msg;
+}
+
+// audioStream
+INSTANTIATE_TEST_SUITE_P(SerializationParameterizedTests, AudioAttributesParameterizedTest,
+                         ::testing::Combine(testing::ValuesIn(kStreamtypes)));
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
new file mode 100644
index 0000000..94accae
--- /dev/null
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -0,0 +1,419 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "AudioEffectAnalyser"
+
+#include <android-base/file.h>
+#include <android-base/stringprintf.h>
+#include <gtest/gtest.h>
+#include <media/AudioEffect.h>
+#include <system/audio_effects/effect_bassboost.h>
+#include <system/audio_effects/effect_equalizer.h>
+#include <fstream>
+#include <iostream>
+#include <string>
+#include <tuple>
+#include <vector>
+
+#include "audio_test_utils.h"
+#include "pffft.hpp"
+
+#define CHECK_OK(expr, msg) \
+    mStatus = (expr);       \
+    if (OK != mStatus) {    \
+        mMsg = (msg);       \
+        return;             \
+    }
+
+using namespace android;
+
+constexpr float kDefAmplitude = 0.60f;
+
+constexpr float kPlayBackDurationSec = 1.5;
+constexpr float kCaptureDurationSec = 1.0;
+constexpr float kPrimeDurationInSec = 0.5;
+
+// chosen to safely sample largest center freq of eq bands
+constexpr uint32_t kSamplingFrequency = 48000;
+
+// allows no fmt conversion before fft
+constexpr audio_format_t kFormat = AUDIO_FORMAT_PCM_FLOAT;
+
+// playback and capture are done with channel mask configured to mono.
+// effect analysis should not depend on mask, mono makes it easier.
+
+constexpr int kNPointFFT = 16384;
+constexpr float kBinWidth = (float)kSamplingFrequency / kNPointFFT;
+
+const char* gPackageName = "AudioEffectAnalyser";
+
+static_assert(kPrimeDurationInSec + 2 * kNPointFFT / kSamplingFrequency < kCaptureDurationSec,
+              "capture at least, prime, pad, nPointFft size of samples");
+static_assert(kPrimeDurationInSec + 2 * kNPointFFT / kSamplingFrequency < kPlayBackDurationSec,
+              "playback needs to be active during capture");
+
+struct CaptureEnv {
+    // input args
+    uint32_t mSampleRate{kSamplingFrequency};
+    audio_format_t mFormat{kFormat};
+    audio_channel_mask_t mChannelMask{AUDIO_CHANNEL_IN_MONO};
+    float mCaptureDuration{kCaptureDurationSec};
+    // output val
+    status_t mStatus{OK};
+    std::string mMsg;
+    std::string mDumpFileName;
+
+    ~CaptureEnv();
+    void capture();
+};
+
+CaptureEnv::~CaptureEnv() {
+    if (!mDumpFileName.empty()) {
+        std::ifstream f(mDumpFileName);
+        if (f.good()) {
+            f.close();
+            remove(mDumpFileName.c_str());
+        }
+    }
+}
+
+void CaptureEnv::capture() {
+    audio_port_v7 port;
+    CHECK_OK(getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port),
+             "Could not find port")
+    const auto capture =
+            sp<AudioCapture>::make(AUDIO_SOURCE_REMOTE_SUBMIX, mSampleRate, mFormat, mChannelMask);
+    CHECK_OK(capture->create(), "record creation failed")
+    CHECK_OK(capture->setRecordDuration(mCaptureDuration), "set record duration failed")
+    CHECK_OK(capture->enableRecordDump(), "enable record dump failed")
+    auto cbCapture = sp<OnAudioDeviceUpdateNotifier>::make();
+    CHECK_OK(capture->getAudioRecordHandle()->addAudioDeviceCallback(cbCapture),
+             "addAudioDeviceCallback failed")
+    CHECK_OK(capture->start(), "start recording failed")
+    CHECK_OK(capture->audioProcess(), "recording process failed")
+    CHECK_OK(cbCapture->waitForAudioDeviceCb(), "audio device callback notification timed out");
+    if (port.id != capture->getAudioRecordHandle()->getRoutedDeviceId()) {
+        CHECK_OK(BAD_VALUE, "Capture NOT routed on expected port")
+    }
+    CHECK_OK(getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "0", port),
+             "Could not find port")
+    CHECK_OK(capture->stop(), "record stop failed")
+    mDumpFileName = capture->getRecordDumpFileName();
+}
+
+struct PlaybackEnv {
+    // input args
+    uint32_t mSampleRate{kSamplingFrequency};
+    audio_format_t mFormat{kFormat};
+    audio_channel_mask_t mChannelMask{AUDIO_CHANNEL_OUT_MONO};
+    audio_session_t mSessionId{AUDIO_SESSION_NONE};
+    std::string mRes;
+    // output val
+    status_t mStatus{OK};
+    std::string mMsg;
+
+    void play();
+};
+
+void PlaybackEnv::play() {
+    const auto ap =
+            sp<AudioPlayback>::make(mSampleRate, mFormat, mChannelMask, AUDIO_OUTPUT_FLAG_NONE,
+                                    mSessionId, AudioTrack::TRANSFER_OBTAIN);
+    CHECK_OK(ap->loadResource(mRes.c_str()), "Unable to open Resource")
+    const auto cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
+    CHECK_OK(ap->create(), "track creation failed")
+    ap->getAudioTrackHandle()->setVolume(1.0f);
+    CHECK_OK(ap->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback),
+             "addAudioDeviceCallback failed")
+    CHECK_OK(ap->start(), "audio track start failed")
+    CHECK_OK(cbPlayback->waitForAudioDeviceCb(), "audio device callback notification timed out")
+    CHECK_OK(ap->onProcess(), "playback process failed")
+    ap->stop();
+}
+
+void generateMultiTone(const std::vector<int>& toneFrequencies, float samplingFrequency,
+                       float duration, float amplitude, float* buffer, int numSamples) {
+    int totalFrameCount = (samplingFrequency * duration);
+    int limit = std::min(totalFrameCount, numSamples);
+
+    for (auto i = 0; i < limit; i++) {
+        buffer[i] = 0;
+        for (auto j = 0; j < toneFrequencies.size(); j++) {
+            buffer[i] += sin(2 * M_PI * toneFrequencies[j] * i / samplingFrequency);
+        }
+        buffer[i] *= (amplitude / toneFrequencies.size());
+    }
+}
+
+sp<AudioEffect> createEffect(const effect_uuid_t* type,
+                             audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX) {
+    std::string packageName{gPackageName};
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = packageName;
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    sp<AudioEffect> effect = sp<AudioEffect>::make(attributionSource);
+    effect->set(type, nullptr, 0, nullptr, sessionId, AUDIO_IO_HANDLE_NONE, {}, false, false);
+    return effect;
+}
+
+void computeFilterGainsAtTones(float captureDuration, int nPointFft, std::vector<int>& binOffsets,
+                               float* inputMag, float* gaindB, const char* res,
+                               audio_session_t sessionId) {
+    int totalFrameCount = captureDuration * kSamplingFrequency;
+    auto output = pffft::AlignedVector<float>(totalFrameCount);
+    auto fftOutput = pffft::AlignedVector<float>(nPointFft);
+    PlaybackEnv argsP;
+    argsP.mRes = std::string{res};
+    argsP.mSessionId = sessionId;
+    CaptureEnv argsR;
+    argsR.mCaptureDuration = captureDuration;
+    std::thread playbackThread(&PlaybackEnv::play, &argsP);
+    std::thread captureThread(&CaptureEnv::capture, &argsR);
+    captureThread.join();
+    playbackThread.join();
+    ASSERT_EQ(OK, argsR.mStatus) << argsR.mMsg;
+    ASSERT_EQ(OK, argsP.mStatus) << argsP.mMsg;
+    ASSERT_FALSE(argsR.mDumpFileName.empty()) << "recorded not written to file";
+    std::ifstream fin(argsR.mDumpFileName, std::ios::in | std::ios::binary);
+    fin.read((char*)output.data(), totalFrameCount * sizeof(output[0]));
+    fin.close();
+    PFFFT_Setup* handle = pffft_new_setup(nPointFft, PFFFT_REAL);
+    // ignore first few samples. This is to not analyse until audio track is re-routed to remote
+    // submix source, also for the effect filter response to reach steady-state (priming / pruning
+    // samples).
+    int rerouteOffset = kPrimeDurationInSec * kSamplingFrequency;
+    pffft_transform_ordered(handle, output.data() + rerouteOffset, fftOutput.data(), nullptr,
+                            PFFFT_FORWARD);
+    pffft_destroy_setup(handle);
+    for (auto i = 0; i < binOffsets.size(); i++) {
+        auto k = binOffsets[i];
+        auto outputMag = sqrt((fftOutput[k * 2] * fftOutput[k * 2]) +
+                              (fftOutput[k * 2 + 1] * fftOutput[k * 2 + 1]));
+        gaindB[i] = 20 * log10(outputMag / inputMag[i]);
+    }
+}
+
+std::tuple<int, int> roundToFreqCenteredToFftBin(float binWidth, float freq) {
+    int bin_index = std::round(freq / binWidth);
+    int cfreq = std::round(bin_index * binWidth);
+    return std::make_tuple(bin_index, cfreq);
+}
+
+TEST(AudioEffectTest, CheckEqualizerEffect) {
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffect> equalizer = createEffect(SL_IID_EQUALIZER, sessionId);
+    ASSERT_EQ(OK, equalizer->initCheck());
+    ASSERT_EQ(NO_ERROR, equalizer->setEnabled(true));
+    if ((equalizer->descriptor().flags & EFFECT_FLAG_HW_ACC_MASK) != 0) {
+        GTEST_SKIP() << "effect processed output inaccessible, skipping test";
+    }
+#define MAX_PARAMS 64
+    uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + MAX_PARAMS];
+    effect_param_t* eqParam = (effect_param_t*)(&buf32);
+
+    // get num of presets
+    eqParam->psize = sizeof(uint32_t);
+    eqParam->vsize = sizeof(uint16_t);
+    *(int32_t*)eqParam->data = EQ_PARAM_GET_NUM_OF_PRESETS;
+    EXPECT_EQ(0, equalizer->getParameter(eqParam));
+    EXPECT_EQ(0, eqParam->status);
+    int numPresets = *((uint16_t*)((int32_t*)eqParam->data + 1));
+
+    // get num of bands
+    eqParam->psize = sizeof(uint32_t);
+    eqParam->vsize = sizeof(uint16_t);
+    *(int32_t*)eqParam->data = EQ_PARAM_NUM_BANDS;
+    EXPECT_EQ(0, equalizer->getParameter(eqParam));
+    EXPECT_EQ(0, eqParam->status);
+    int numBands = *((uint16_t*)((int32_t*)eqParam->data + 1));
+
+    const int totalFrameCount = kSamplingFrequency * kPlayBackDurationSec;
+
+    // get band center frequencies
+    std::vector<int> centerFrequencies;
+    std::vector<int> binOffsets;
+    for (auto i = 0; i < numBands; i++) {
+        eqParam->psize = sizeof(uint32_t) * 2;
+        eqParam->vsize = sizeof(uint32_t);
+        *(int32_t*)eqParam->data = EQ_PARAM_CENTER_FREQ;
+        *((uint16_t*)((int32_t*)eqParam->data + 1)) = i;
+        EXPECT_EQ(0, equalizer->getParameter(eqParam));
+        EXPECT_EQ(0, eqParam->status);
+        float cfreq = *((int32_t*)eqParam->data + 2) / 1000;  // milli hz
+        // pick frequency close to bin center frequency
+        auto [bin_index, bin_freq] = roundToFreqCenteredToFftBin(kBinWidth, cfreq);
+        centerFrequencies.push_back(bin_freq);
+        binOffsets.push_back(bin_index);
+    }
+
+    // input for effect module
+    auto input = pffft::AlignedVector<float>(totalFrameCount);
+    generateMultiTone(centerFrequencies, kSamplingFrequency, kPlayBackDurationSec, kDefAmplitude,
+                      input.data(), totalFrameCount);
+    auto fftInput = pffft::AlignedVector<float>(kNPointFFT);
+    PFFFT_Setup* handle = pffft_new_setup(kNPointFFT, PFFFT_REAL);
+    pffft_transform_ordered(handle, input.data(), fftInput.data(), nullptr, PFFFT_FORWARD);
+    pffft_destroy_setup(handle);
+    float inputMag[numBands];
+    for (auto i = 0; i < numBands; i++) {
+        auto k = binOffsets[i];
+        inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
+                           (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
+    }
+    TemporaryFile tf("/data/local/tmp");
+    close(tf.release());
+    std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
+    fout.write((char*)input.data(), input.size() * sizeof(input[0]));
+    fout.close();
+
+    float expGaindB[numBands], actGaindB[numBands];
+
+    std::string msg = "";
+    int numPresetsOk = 0;
+    for (auto preset = 0; preset < numPresets; preset++) {
+        // set preset
+        eqParam->psize = sizeof(uint32_t);
+        eqParam->vsize = sizeof(uint32_t);
+        *(int32_t*)eqParam->data = EQ_PARAM_CUR_PRESET;
+        *((uint16_t*)((int32_t*)eqParam->data + 1)) = preset;
+        EXPECT_EQ(0, equalizer->setParameter(eqParam));
+        EXPECT_EQ(0, eqParam->status);
+        // get preset gains
+        eqParam->psize = sizeof(uint32_t);
+        eqParam->vsize = (numBands + 1) * sizeof(uint32_t);
+        *(int32_t*)eqParam->data = EQ_PARAM_PROPERTIES;
+        EXPECT_EQ(0, equalizer->getParameter(eqParam));
+        EXPECT_EQ(0, eqParam->status);
+        t_equalizer_settings* settings =
+                reinterpret_cast<t_equalizer_settings*>((int32_t*)eqParam->data + 1);
+        EXPECT_EQ(preset, settings->curPreset);
+        EXPECT_EQ(numBands, settings->numBands);
+        for (auto i = 0; i < numBands; i++) {
+            expGaindB[i] = ((int16_t)settings->bandLevels[i]) / 100.0f;  // gain in milli bels
+        }
+        memset(actGaindB, 0, sizeof(actGaindB));
+        ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT,
+                                                          binOffsets, inputMag, actGaindB, tf.path,
+                                                          sessionId));
+        bool isOk = true;
+        for (auto i = 0; i < numBands - 1; i++) {
+            auto diffA = expGaindB[i] - expGaindB[i + 1];
+            auto diffB = actGaindB[i] - actGaindB[i + 1];
+            if (diffA == 0 && fabs(diffA - diffB) > 1.0f) {
+                msg += (android::base::StringPrintf(
+                        "For eq preset : %d, between bands %d and %d, expected relative gain is : "
+                        "%f, got relative gain is : %f, error : %f \n",
+                        preset, i, i + 1, diffA, diffB, diffA - diffB));
+                isOk = false;
+            } else if (diffA * diffB < 0) {
+                msg += (android::base::StringPrintf(
+                        "For eq preset : %d, between bands %d and %d, expected relative gain and "
+                        "seen relative gain are of opposite signs \n. Expected relative gain is : "
+                        "%f, seen relative gain is : %f \n",
+                        preset, i, i + 1, diffA, diffB));
+                isOk = false;
+            }
+        }
+        if (isOk) numPresetsOk++;
+    }
+    EXPECT_EQ(numPresetsOk, numPresets) << msg;
+}
+
+TEST(AudioEffectTest, CheckBassBoostEffect) {
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffect> bassboost = createEffect(SL_IID_BASSBOOST, sessionId);
+    ASSERT_EQ(OK, bassboost->initCheck());
+    ASSERT_EQ(NO_ERROR, bassboost->setEnabled(true));
+    if ((bassboost->descriptor().flags & EFFECT_FLAG_HW_ACC_MASK) != 0) {
+        GTEST_SKIP() << "effect processed output inaccessible, skipping test";
+    }
+    int32_t buf32[sizeof(effect_param_t) / sizeof(int32_t) + MAX_PARAMS];
+    effect_param_t* bbParam = (effect_param_t*)(&buf32);
+
+    bbParam->psize = sizeof(int32_t);
+    bbParam->vsize = sizeof(int32_t);
+    *(int32_t*)bbParam->data = BASSBOOST_PARAM_STRENGTH_SUPPORTED;
+    EXPECT_EQ(0, bassboost->getParameter(bbParam));
+    EXPECT_EQ(0, bbParam->status);
+    bool strengthSupported = *((int32_t*)bbParam->data + 1);
+
+    const int totalFrameCount = kSamplingFrequency * kPlayBackDurationSec;
+
+    // selecting bass frequency, speech tone (for relative gain)
+    std::vector<int> testFrequencies{100, 1200};
+    std::vector<int> binOffsets;
+    for (auto i = 0; i < testFrequencies.size(); i++) {
+        // pick frequency close to bin center frequency
+        auto [bin_index, bin_freq] = roundToFreqCenteredToFftBin(kBinWidth, testFrequencies[i]);
+        testFrequencies[i] = bin_freq;
+        binOffsets.push_back(bin_index);
+    }
+
+    // input for effect module
+    auto input = pffft::AlignedVector<float>(totalFrameCount);
+    generateMultiTone(testFrequencies, kSamplingFrequency, kPlayBackDurationSec, kDefAmplitude,
+                      input.data(), totalFrameCount);
+    auto fftInput = pffft::AlignedVector<float>(kNPointFFT);
+    PFFFT_Setup* handle = pffft_new_setup(kNPointFFT, PFFFT_REAL);
+    pffft_transform_ordered(handle, input.data(), fftInput.data(), nullptr, PFFFT_FORWARD);
+    pffft_destroy_setup(handle);
+    float inputMag[testFrequencies.size()];
+    for (auto i = 0; i < testFrequencies.size(); i++) {
+        auto k = binOffsets[i];
+        inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
+                           (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
+    }
+    TemporaryFile tf("/data/local/tmp");
+    close(tf.release());
+    std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
+    fout.write((char*)input.data(), input.size() * sizeof(input[0]));
+    fout.close();
+
+    float gainWithOutFilter[testFrequencies.size()];
+    memset(gainWithOutFilter, 0, sizeof(gainWithOutFilter));
+    ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT, binOffsets,
+                                                      inputMag, gainWithOutFilter, tf.path,
+                                                      AUDIO_SESSION_OUTPUT_MIX));
+    float diffA = gainWithOutFilter[0] - gainWithOutFilter[1];
+    float prevGain = -100.f;
+    for (auto strength = 150; strength < 1000; strength += strengthSupported ? 150 : 1000) {
+        // configure filter strength
+        if (strengthSupported) {
+            bbParam->psize = sizeof(int32_t);
+            bbParam->vsize = sizeof(int16_t);
+            *(int32_t*)bbParam->data = BASSBOOST_PARAM_STRENGTH;
+            *((int16_t*)((int32_t*)bbParam->data + 1)) = strength;
+            EXPECT_EQ(0, bassboost->setParameter(bbParam));
+            EXPECT_EQ(0, bbParam->status);
+        }
+        float gainWithFilter[testFrequencies.size()];
+        memset(gainWithFilter, 0, sizeof(gainWithFilter));
+        ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT,
+                                                          binOffsets, inputMag, gainWithFilter,
+                                                          tf.path, sessionId));
+        float diffB = gainWithFilter[0] - gainWithFilter[1];
+        EXPECT_GT(diffB, diffA) << "bassboost effect not seen";
+        EXPECT_GE(diffB, prevGain) << "increase in boost strength causing fall in gain";
+        prevGain = diffB;
+    }
+}
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
new file mode 100644
index 0000000..e6149e4
--- /dev/null
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -0,0 +1,559 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioEffectUnitTests"
+
+#include <gtest/gtest.h>
+#include <media/AudioEffect.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+#include <system/audio_effects/effect_spatializer.h>
+#include <system/audio_effects/effect_visualizer.h>
+
+#include "audio_test_utils.h"
+
+using namespace android;
+
+class AudioEffectCallback : public AudioEffect::IAudioEffectCallback {
+  public:
+    bool receivedFramesProcessed = false;
+
+    void onFramesProcessed(int32_t framesProcessed) override {
+        ALOGE("number of frames processed %d", framesProcessed);
+        receivedFramesProcessed = true;
+    }
+};
+
+static constexpr int kDefaultInputEffectPriority = -1;
+static constexpr int kDefaultOutputEffectPriority = 0;
+
+static const char* gPackageName = "AudioEffectTest";
+
+bool doesDeviceSupportLowLatencyMode(std::vector<struct audio_port_v7>& ports) {
+    for (const auto& port : ports) {
+        if (port.role == AUDIO_PORT_ROLE_SOURCE && port.type == AUDIO_PORT_TYPE_MIX) {
+            if ((port.active_config.flags.output & AUDIO_OUTPUT_FLAG_FAST) != 0) {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+sp<AudioEffect> createEffect(const effect_uuid_t* type, const effect_uuid_t* uuid = nullptr,
+                             int priority = 0, audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+                             const wp<AudioEffectCallback>& callback = nullptr) {
+    std::string packageName{gPackageName};
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = packageName;
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    sp<AudioEffect> effect = new AudioEffect(attributionSource);
+    effect->set(type, uuid, priority, callback, sessionId, AUDIO_IO_HANDLE_NONE, {}, false,
+                (callback != nullptr));
+    return effect;
+}
+
+status_t isEffectExistsOnAudioSession(const effect_uuid_t* type, const effect_uuid_t* uuid,
+                                      int priority, audio_session_t sessionId) {
+    sp<AudioEffect> effect = createEffect(type, uuid, priority, sessionId);
+    return effect->initCheck();
+}
+
+bool isEffectDefaultOnRecord(const effect_uuid_t* type, const effect_uuid_t* uuid,
+                             const sp<AudioRecord>& audioRecord) {
+    effect_descriptor_t descriptors[AudioEffect::kMaxPreProcessing];
+    uint32_t numEffects = AudioEffect::kMaxPreProcessing;
+    status_t ret = AudioEffect::queryDefaultPreProcessing(audioRecord->getSessionId(), descriptors,
+                                                          &numEffects);
+    if (ret != OK) {
+        return false;
+    }
+    for (int i = 0; i < numEffects; i++) {
+        if ((memcmp(&descriptors[i].type, type, sizeof(effect_uuid_t)) == 0) &&
+            (memcmp(&descriptors[i].uuid, uuid, sizeof(effect_uuid_t)) == 0)) {
+            return true;
+        }
+    }
+    return false;
+}
+
+void listEffectsAvailable(std::vector<effect_descriptor_t>& descriptors) {
+    uint32_t numEffects = 0;
+    ASSERT_EQ(NO_ERROR, AudioEffect::queryNumberEffects(&numEffects));
+    for (auto i = 0; i < numEffects; i++) {
+        effect_descriptor_t des;
+        ASSERT_EQ(NO_ERROR, AudioEffect::queryEffect(i, &des));
+        descriptors.push_back(des);
+    }
+}
+
+bool isPreprocessing(effect_descriptor_t& descriptor) {
+    return ((descriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC);
+}
+
+bool isInsert(effect_descriptor_t& descriptor) {
+    return ((descriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_INSERT);
+}
+
+bool isAux(effect_descriptor_t& descriptor) {
+    return ((descriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY);
+}
+
+bool isPostproc(effect_descriptor_t& descriptor) {
+    return ((descriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC);
+}
+
+bool isFastCompatible(effect_descriptor_t& descriptor) {
+    return !(((descriptor.flags & EFFECT_FLAG_HW_ACC_MASK) == 0) &&
+             ((descriptor.flags & EFFECT_FLAG_NO_PROCESS) == 0));
+}
+
+bool isSpatializer(effect_descriptor_t& descriptor) {
+    return (memcmp(&descriptor.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0);
+}
+
+bool isHapticGenerator(effect_descriptor_t& descriptor) {
+    return (memcmp(&descriptor.type, FX_IID_HAPTICGENERATOR, sizeof(effect_uuid_t)) == 0);
+}
+
+std::tuple<std::string, std::string> typeAndUuidToString(const effect_descriptor_t& desc) {
+    char type[512];
+    AudioEffect::guidToString(&desc.type, type, sizeof(type));
+    char uuid[512];
+    AudioEffect::guidToString(&desc.uuid, uuid, sizeof(uuid));
+    return std::make_tuple(type, uuid);
+}
+
+// UNIT TESTS
+TEST(AudioEffectTest, getEffectDescriptor) {
+    effect_uuid_t randomType = {
+            0x81781c08, 0x93dd, 0x11ec, 0xb909, {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
+    effect_uuid_t randomUuid = {
+            0x653730e1, 0x1be1, 0x438e, 0xa35a, {0xfc, 0x9b, 0xa1, 0x2a, 0x5e, 0xc9}};
+    effect_uuid_t empty = EFFECT_UUID_INITIALIZER;
+
+    effect_descriptor_t descriptor;
+    EXPECT_EQ(NAME_NOT_FOUND, AudioEffect::getEffectDescriptor(&randomUuid, &randomType,
+                                                               EFFECT_FLAG_TYPE_MASK, &descriptor));
+
+    std::vector<effect_descriptor_t> descriptors;
+    ASSERT_NO_FATAL_FAILURE(listEffectsAvailable(descriptors));
+
+    for (auto i = 0; i < descriptors.size(); i++) {
+        EXPECT_EQ(NO_ERROR,
+                  AudioEffect::getEffectDescriptor(&descriptors[i].uuid, &descriptors[i].type,
+                                                   EFFECT_FLAG_TYPE_MASK, &descriptor));
+        EXPECT_EQ(0, memcmp(&descriptor, &descriptors[i], sizeof(effect_uuid_t)));
+    }
+    // negative tests
+    if (descriptors.size() > 0) {
+        EXPECT_EQ(BAD_VALUE,
+                  AudioEffect::getEffectDescriptor(&descriptors[0].uuid, &descriptors[0].type,
+                                                   EFFECT_FLAG_TYPE_MASK, nullptr));
+    }
+    EXPECT_EQ(BAD_VALUE, AudioEffect::getEffectDescriptor(nullptr, nullptr,
+                                                          EFFECT_FLAG_TYPE_PRE_PROC, &descriptor));
+    EXPECT_EQ(BAD_VALUE, AudioEffect::getEffectDescriptor(&empty, &randomType,
+                                                          EFFECT_FLAG_TYPE_MASK, nullptr));
+    EXPECT_EQ(BAD_VALUE, AudioEffect::getEffectDescriptor(nullptr, &randomType,
+                                                          EFFECT_FLAG_TYPE_POST_PROC, &descriptor));
+    EXPECT_EQ(BAD_VALUE, AudioEffect::getEffectDescriptor(&randomUuid, nullptr,
+                                                          EFFECT_FLAG_TYPE_INSERT, &descriptor));
+}
+
+TEST(AudioEffectTest, DISABLED_GetSetParameterForEffect) {
+    sp<AudioEffect> visualizer = createEffect(SL_IID_VISUALIZATION);
+    status_t status = visualizer->initCheck();
+    ASSERT_TRUE(status == NO_ERROR || status == ALREADY_EXISTS) << "Init check error";
+    ASSERT_EQ(NO_ERROR, visualizer->setEnabled(true)) << "visualizer not enabled";
+
+    uint32_t buf32[3][sizeof(effect_param_t) / sizeof(uint32_t) + 2];
+    effect_param_t* vis_none = (effect_param_t*)(buf32[0]);
+    effect_param_t* vis_rms = (effect_param_t*)(buf32[1]);
+    effect_param_t* vis_tmp = (effect_param_t*)(buf32[2]);
+
+    // Visualizer::setMeasurementMode()
+    vis_none->psize = sizeof(uint32_t);
+    vis_none->vsize = sizeof(uint32_t);
+    *(int32_t*)vis_none->data = VISUALIZER_PARAM_MEASUREMENT_MODE;
+    *((int32_t*)vis_none->data + 1) = MEASUREMENT_MODE_NONE;
+    EXPECT_EQ(NO_ERROR, visualizer->setParameter(vis_none))
+            << "setMeasurementMode doesn't report success";
+
+    // Visualizer::getMeasurementMode()
+    vis_tmp->psize = sizeof(uint32_t);
+    vis_tmp->vsize = sizeof(uint32_t);
+    *(int32_t*)vis_tmp->data = VISUALIZER_PARAM_MEASUREMENT_MODE;
+    *((int32_t*)vis_tmp->data + 1) = 23;
+    EXPECT_EQ(NO_ERROR, visualizer->getParameter(vis_tmp))
+            << "getMeasurementMode doesn't report success";
+    EXPECT_EQ(*((int32_t*)vis_tmp->data + 1), *((int32_t*)vis_none->data + 1))
+            << "target mode does not match set mode";
+
+    // Visualizer::setMeasurementModeDeferred()
+    vis_rms->psize = sizeof(uint32_t);
+    vis_rms->vsize = sizeof(uint32_t);
+    *(int32_t*)vis_rms->data = VISUALIZER_PARAM_MEASUREMENT_MODE;
+    *((int32_t*)vis_rms->data + 1) = MEASUREMENT_MODE_PEAK_RMS;
+    EXPECT_EQ(NO_ERROR, visualizer->setParameterDeferred(vis_rms))
+            << "setMeasurementModeDeferred doesn't report success";
+
+    *((int32_t*)vis_tmp->data + 1) = 23;
+    EXPECT_EQ(NO_ERROR, visualizer->getParameter(vis_tmp))
+            << "getMeasurementMode doesn't report success";
+    EXPECT_EQ(*((int32_t*)vis_tmp->data + 1), *((int32_t*)vis_none->data + 1))
+            << "target mode does not match set mode";
+
+    // setParameterCommit
+    EXPECT_EQ(NO_ERROR, visualizer->setParameterCommit())
+            << "setMeasurementModeCommit does not report success";
+
+    // validate Params
+    *((int32_t*)vis_tmp->data + 1) = 23;
+    EXPECT_EQ(NO_ERROR, visualizer->getParameter(vis_tmp))
+            << "getMeasurementMode doesn't report success";
+    EXPECT_EQ(*((int32_t*)vis_tmp->data + 1), *((int32_t*)vis_rms->data + 1))
+            << "target mode does not match set mode";
+}
+
+TEST(AudioEffectTest, ManageSourceDefaultEffects) {
+    int32_t selectedEffect = -1;
+
+    const uint32_t sampleRate = 44100;
+    const audio_format_t format = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_channel_mask_t channelMask = AUDIO_CHANNEL_IN_STEREO;
+    sp<AudioCapture> capture = nullptr;
+
+    std::vector<effect_descriptor_t> descriptors;
+    ASSERT_NO_FATAL_FAILURE(listEffectsAvailable(descriptors));
+    for (auto i = 0; i < descriptors.size(); i++) {
+        if (isPreprocessing(descriptors[i])) {
+            capture = new AudioCapture(AUDIO_SOURCE_MIC, sampleRate, format, channelMask);
+            ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
+            EXPECT_EQ(NO_ERROR, capture->create());
+            EXPECT_EQ(NO_ERROR, capture->start());
+            if (!isEffectDefaultOnRecord(&descriptors[i].type, &descriptors[i].uuid,
+                                         capture->getAudioRecordHandle())) {
+                selectedEffect = i;
+                break;
+            }
+        }
+    }
+    if (selectedEffect == -1) GTEST_SKIP() << " expected at least one preprocessing effect";
+
+    effect_uuid_t* selectedEffectType = &descriptors[selectedEffect].type;
+    effect_uuid_t* selectedEffectUuid = &descriptors[selectedEffect].uuid;
+    auto [type, uuid] = typeAndUuidToString(descriptors[selectedEffect]);
+    capture = new AudioCapture(AUDIO_SOURCE_MIC, sampleRate, format, channelMask);
+    ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
+    EXPECT_EQ(NO_ERROR, capture->create());
+    EXPECT_EQ(NO_ERROR, capture->start());
+    EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
+                                         capture->getAudioRecordHandle()))
+            << "Effect should not have been default on record. " << type;
+    EXPECT_EQ(NO_ERROR,
+              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
+                                           kDefaultInputEffectPriority - 1,
+                                           capture->getAudioRecordHandle()->getSessionId()))
+            << "Effect should not have been added. " << type;
+    EXPECT_EQ(OK, capture->audioProcess());
+    EXPECT_EQ(OK, capture->stop());
+
+    String16 name{gPackageName};
+    audio_unique_id_t effectId;
+    status_t status = AudioEffect::addSourceDefaultEffect(type.c_str(), name, uuid.c_str(),
+                                                          kDefaultInputEffectPriority,
+                                                          AUDIO_SOURCE_MIC, &effectId);
+    EXPECT_EQ(NO_ERROR, status) << "Adding default effect failed: " << type;
+
+    capture = new AudioCapture(AUDIO_SOURCE_MIC, sampleRate, format, channelMask);
+    ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
+    EXPECT_EQ(NO_ERROR, capture->create());
+    EXPECT_EQ(NO_ERROR, capture->start());
+    EXPECT_TRUE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
+                                        capture->getAudioRecordHandle()))
+            << "Effect should have been default on record. " << type;
+    EXPECT_EQ(ALREADY_EXISTS,
+              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
+                                           kDefaultInputEffectPriority - 1,
+                                           capture->getAudioRecordHandle()->getSessionId()))
+            << "Effect should have been added. " << type;
+    EXPECT_EQ(OK, capture->audioProcess());
+    EXPECT_EQ(OK, capture->stop());
+
+    status = AudioEffect::removeSourceDefaultEffect(effectId);
+    EXPECT_EQ(NO_ERROR, status);
+    capture = new AudioCapture(AUDIO_SOURCE_MIC, sampleRate, format, channelMask);
+    ASSERT_NE(capture, nullptr) << "Unable to create Record Application";
+    EXPECT_EQ(NO_ERROR, capture->create());
+    EXPECT_EQ(NO_ERROR, capture->start());
+    EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
+                                         capture->getAudioRecordHandle()))
+            << "Effect should not have been default on record. " << type;
+    EXPECT_EQ(NO_ERROR,
+              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
+                                           kDefaultInputEffectPriority - 1,
+                                           capture->getAudioRecordHandle()->getSessionId()))
+            << "Effect should not have been added. " << type;
+    EXPECT_EQ(OK, capture->audioProcess());
+    EXPECT_EQ(OK, capture->stop());
+}
+
+TEST(AudioEffectTest, AuxEffectSanityTest) {
+    int32_t selectedEffect = -1;
+    std::vector<effect_descriptor_t> descriptors;
+    ASSERT_NO_FATAL_FAILURE(listEffectsAvailable(descriptors));
+    for (auto i = 0; i < descriptors.size(); i++) {
+        if (isAux(descriptors[i])) {
+            selectedEffect = i;
+            break;
+        }
+    }
+    if (selectedEffect == -1) GTEST_SKIP() << "expected at least one aux effect";
+    effect_uuid_t* selectedEffectType = &descriptors[selectedEffect].type;
+    effect_uuid_t* selectedEffectUuid = &descriptors[selectedEffect].uuid;
+    auto [type, uuid] = typeAndUuidToString(descriptors[selectedEffect]);
+    String16 name{gPackageName};
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffect> audioEffect = createEffect(selectedEffectType, selectedEffectUuid,
+                                               kDefaultInputEffectPriority, sessionId);
+    EXPECT_EQ(NO_INIT, audioEffect->initCheck())
+            << "error, creating auxiliary effect (" << type << ") on session id " << (int)sessionId
+            << " successful ";
+    audio_unique_id_t id;
+    status_t status = AudioEffect::addStreamDefaultEffect(
+            type.c_str(), name, uuid.c_str(), kDefaultOutputEffectPriority, AUDIO_USAGE_MEDIA, &id);
+    if (status == NO_ERROR) {
+        EXPECT_EQ(NO_ERROR, AudioEffect::removeStreamDefaultEffect(id));
+        EXPECT_NE(NO_ERROR, status) << "error, adding auxiliary effect (" << type
+                                    << ") as stream default effect is successful";
+    }
+}
+
+class AudioPlaybackEffectTest : public ::testing::TestWithParam<bool> {
+  public:
+    AudioPlaybackEffectTest() : mSelectFastMode(GetParam()){};
+
+    const bool mSelectFastMode;
+
+    bool mIsFastCompatibleEffect;
+    effect_uuid_t mType;
+    effect_uuid_t mUuid;
+    std::string mTypeStr;
+    std::string mUuidStr;
+
+    void SetUp() override {
+        if (mSelectFastMode) {
+            std::vector<struct audio_port_v7> ports;
+            ASSERT_EQ(OK, listAudioPorts(ports));
+            if (!doesDeviceSupportLowLatencyMode(ports)) {
+                GTEST_SKIP() << "device does not support low latency mode";
+            }
+        }
+
+        int32_t selectedEffect = -1;
+        std::vector<effect_descriptor_t> descriptors;
+        ASSERT_NO_FATAL_FAILURE(listEffectsAvailable(descriptors));
+        for (auto i = 0; i < descriptors.size(); i++) {
+            if (isSpatializer(descriptors[i])) continue;
+            if (isHapticGenerator(descriptors[i]) && !AudioSystem::isHapticPlaybackSupported())
+                continue;
+            if (!isInsert(descriptors[i])) continue;
+            selectedEffect = i;
+            mIsFastCompatibleEffect = isFastCompatible(descriptors[i]);
+            // in fast mode, pick fast compatible effect if available
+            if (mSelectFastMode == mIsFastCompatibleEffect) break;
+        }
+        if (selectedEffect == -1) {
+            GTEST_SKIP() << "expected at least one valid effect";
+        }
+
+        mType = descriptors[selectedEffect].type;
+        mUuid = descriptors[selectedEffect].uuid;
+        std::tie(mTypeStr, mUuidStr) = typeAndUuidToString(descriptors[selectedEffect]);
+    }
+};
+
+TEST_P(AudioPlaybackEffectTest, StreamDefaultEffectTest) {
+    SCOPED_TRACE(testing::Message()
+                 << "\n selected effect type is :: " << mTypeStr
+                 << "\n selected effect uuid is :: " << mUuidStr
+                 << "\n audiotrack output flag : " << (mSelectFastMode ? "fast" : "default")
+                 << "\n audio effect is fast compatible : "
+                 << (mIsFastCompatibleEffect ? "yes" : "no"));
+
+    bool compatCheck = !mSelectFastMode || (mSelectFastMode && mIsFastCompatibleEffect);
+
+    // create track
+    audio_attributes_t attributes;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+    auto playback = sp<AudioPlayback>::make(
+            0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            mSelectFastMode ? AUDIO_OUTPUT_FLAG_FAST : AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE,
+            AudioTrack::TRANSFER_SHARED, &attributes);
+    ASSERT_NE(nullptr, playback);
+    ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
+    EXPECT_EQ(NO_ERROR, playback->create());
+    EXPECT_EQ(NO_ERROR, playback->start());
+    EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
+              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                           playback->getAudioTrackHandle()->getSessionId()))
+            << "Effect should not have been added. " << mTypeStr;
+    EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
+    playback->stop();
+    playback.clear();
+
+    String16 name{gPackageName};
+    audio_unique_id_t id;
+    status_t status = AudioEffect::addStreamDefaultEffect(mTypeStr.c_str(), name, mUuidStr.c_str(),
+                                                          kDefaultOutputEffectPriority,
+                                                          AUDIO_USAGE_MEDIA, &id);
+    EXPECT_EQ(NO_ERROR, status) << "Adding default effect failed: " << mTypeStr;
+
+    playback = sp<AudioPlayback>::make(
+            0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            mSelectFastMode ? AUDIO_OUTPUT_FLAG_FAST : AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE,
+            AudioTrack::TRANSFER_SHARED, &attributes);
+    ASSERT_NE(nullptr, playback);
+    ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
+    EXPECT_EQ(NO_ERROR, playback->create());
+    EXPECT_EQ(NO_ERROR, playback->start());
+    // If effect chosen is not compatible with the session, then effect won't be applied
+    EXPECT_EQ(compatCheck ? ALREADY_EXISTS : NO_INIT,
+              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                           playback->getAudioTrackHandle()->getSessionId()))
+            << "Effect should have been added. " << mTypeStr;
+    EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
+    if (mSelectFastMode) {
+        EXPECT_EQ(AUDIO_OUTPUT_FLAG_FAST,
+                  playback->getAudioTrackHandle()->getFlags() & AUDIO_OUTPUT_FLAG_FAST);
+    }
+    playback->stop();
+    playback.clear();
+
+    status = AudioEffect::removeStreamDefaultEffect(id);
+    EXPECT_EQ(NO_ERROR, status);
+    playback = sp<AudioPlayback>::make(
+            0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            mSelectFastMode ? AUDIO_OUTPUT_FLAG_FAST : AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE,
+            AudioTrack::TRANSFER_SHARED, &attributes);
+    ASSERT_NE(nullptr, playback);
+    ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
+    EXPECT_EQ(NO_ERROR, playback->create());
+    EXPECT_EQ(NO_ERROR, playback->start());
+    EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
+              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                           playback->getAudioTrackHandle()->getSessionId()))
+            << "Effect should not have been added. " << mTypeStr;
+    EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
+    playback->stop();
+    playback.clear();
+}
+
+TEST_P(AudioPlaybackEffectTest, CheckOutputFlagCompatibility) {
+    SCOPED_TRACE(testing::Message()
+                 << "\n selected effect type is :: " << mTypeStr
+                 << "\n selected effect uuid is :: " << mUuidStr
+                 << "\n audiotrack output flag : " << (mSelectFastMode ? "fast" : "default")
+                 << "\n audio effect is fast compatible : "
+                 << (mIsFastCompatibleEffect ? "yes" : "no"));
+
+    audio_attributes_t attributes;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffectCallback> cb = sp<AudioEffectCallback>::make();
+    sp<AudioEffect> audioEffect =
+            createEffect(&mType, &mUuid, kDefaultOutputEffectPriority, sessionId, cb);
+    ASSERT_EQ(OK, audioEffect->initCheck());
+    ASSERT_EQ(NO_ERROR, audioEffect->setEnabled(true));
+    auto playback = sp<AudioPlayback>::make(
+            0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_MONO,
+            mSelectFastMode ? AUDIO_OUTPUT_FLAG_FAST : AUDIO_OUTPUT_FLAG_NONE, sessionId,
+            AudioTrack::TRANSFER_SHARED, &attributes);
+    ASSERT_NE(nullptr, playback);
+    ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_1ch_8kHz_s16le.raw"));
+    EXPECT_EQ(NO_ERROR, playback->create());
+    EXPECT_EQ(NO_ERROR, playback->start());
+
+    EXPECT_EQ(ALREADY_EXISTS, isEffectExistsOnAudioSession(
+                                      &mType, &mUuid, kDefaultOutputEffectPriority - 1, sessionId))
+            << "Effect should have been added. " << mTypeStr;
+    if (mSelectFastMode) {
+        EXPECT_EQ(mIsFastCompatibleEffect ? AUDIO_OUTPUT_FLAG_FAST : 0,
+                  playback->getAudioTrackHandle()->getFlags() & AUDIO_OUTPUT_FLAG_FAST);
+    }
+    EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
+    EXPECT_EQ(NO_ERROR, playback->getAudioTrackHandle()->attachAuxEffect(0));
+    playback->stop();
+    playback.clear();
+    EXPECT_TRUE(cb->receivedFramesProcessed)
+            << "AudioEffect frames processed callback not received";
+}
+
+INSTANTIATE_TEST_SUITE_P(EffectParameterizedTests, AudioPlaybackEffectTest, ::testing::Bool());
+
+TEST(AudioEffectTest, TestHapticEffect) {
+    if (!AudioSystem::isHapticPlaybackSupported())
+        GTEST_SKIP() << "Haptic playback is not supported";
+    int32_t selectedEffect = -1;
+    std::vector<effect_descriptor_t> descriptors;
+    ASSERT_NO_FATAL_FAILURE(listEffectsAvailable(descriptors));
+    for (auto i = 0; i < descriptors.size(); i++) {
+        if (!isHapticGenerator(descriptors[i])) continue;
+        selectedEffect = i;
+        break;
+    }
+    if (selectedEffect == -1) GTEST_SKIP() << "expected at least one valid effect";
+
+    effect_uuid_t* selectedEffectType = &descriptors[selectedEffect].type;
+    effect_uuid_t* selectedEffectUuid = &descriptors[selectedEffect].uuid;
+    auto [type, uuid] = typeAndUuidToString(descriptors[selectedEffect]);
+
+    SCOPED_TRACE(testing::Message() << "\n selected effect type is :: " << type
+                                    << "\n selected effect uuid is :: " << uuid);
+
+    audio_attributes_t attributes;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffectCallback> cb = sp<AudioEffectCallback>::make();
+    sp<AudioEffect> audioEffect = createEffect(selectedEffectType, selectedEffectUuid,
+                                               kDefaultOutputEffectPriority, sessionId, cb);
+    ASSERT_EQ(OK, audioEffect->initCheck());
+    ASSERT_EQ(NO_ERROR, audioEffect->setEnabled(true));
+    auto playback = sp<AudioPlayback>::make(0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
+                                            AUDIO_CHANNEL_OUT_STEREO, AUDIO_OUTPUT_FLAG_NONE,
+                                            sessionId, AudioTrack::TRANSFER_SHARED, &attributes);
+    ASSERT_NE(nullptr, playback);
+    ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
+    EXPECT_EQ(NO_ERROR, playback->create());
+    EXPECT_EQ(NO_ERROR, playback->start());
+    EXPECT_TRUE(isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
+                                             kDefaultOutputEffectPriority - 1, sessionId))
+            << "Effect should have been added. " << type;
+    EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
+    playback->stop();
+    playback.clear();
+    EXPECT_TRUE(cb->receivedFramesProcessed)
+            << "AudioEffect frames processed callback not received";
+}
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
new file mode 100644
index 0000000..8c63a6d
--- /dev/null
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -0,0 +1,235 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioRecordTest"
+
+#include <gtest/gtest.h>
+
+#include "audio_test_utils.h"
+
+using namespace android;
+
+class AudioRecordTest : public ::testing::Test {
+  public:
+    virtual void SetUp() override {
+        mAC = new AudioCapture(AUDIO_SOURCE_DEFAULT, 44100, AUDIO_FORMAT_PCM_16_BIT,
+                               AUDIO_CHANNEL_IN_FRONT);
+        ASSERT_NE(nullptr, mAC);
+        ASSERT_EQ(OK, mAC->create()) << "record creation failed";
+    }
+
+    virtual void TearDown() override {
+        if (mAC) ASSERT_EQ(OK, mAC->stop());
+    }
+
+    sp<AudioCapture> mAC;
+};
+
+class AudioRecordCreateTest
+    : public ::testing::TestWithParam<
+              std::tuple<uint32_t, audio_format_t, audio_channel_mask_t, audio_input_flags_t,
+                         audio_session_t, audio_source_t>> {
+  public:
+    AudioRecordCreateTest()
+        : mSampleRate(std::get<0>(GetParam())),
+          mFormat(std::get<1>(GetParam())),
+          mChannelMask(std::get<2>(GetParam())),
+          mFlags(std::get<3>(GetParam())),
+          mSessionId(std::get<4>(GetParam())),
+          mInputSource(std::get<5>(GetParam())){};
+
+    const uint32_t mSampleRate;
+    const audio_format_t mFormat;
+    const audio_channel_mask_t mChannelMask;
+    const audio_input_flags_t mFlags;
+    const audio_session_t mSessionId;
+    const audio_source_t mInputSource;
+    const AudioRecord::transfer_type mTransferType = AudioRecord::TRANSFER_OBTAIN;
+
+    sp<AudioCapture> mAC;
+
+    virtual void SetUp() override {
+        mAC = new AudioCapture(mInputSource, mSampleRate, mFormat, mChannelMask, mFlags, mSessionId,
+                               mTransferType);
+        ASSERT_NE(nullptr, mAC);
+        ASSERT_EQ(OK, mAC->create()) << "record creation failed";
+    }
+
+    virtual void TearDown() override {
+        if (mAC) ASSERT_EQ(OK, mAC->stop());
+    }
+};
+
+TEST_F(AudioRecordTest, TestSimpleRecord) {
+    EXPECT_EQ(OK, mAC->start()) << "start recording failed";
+    EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
+}
+
+TEST_F(AudioRecordTest, TestAudioCbNotifier) {
+    EXPECT_EQ(BAD_VALUE, mAC->getAudioRecordHandle()->addAudioDeviceCallback(nullptr));
+    sp<OnAudioDeviceUpdateNotifier> cb = sp<OnAudioDeviceUpdateNotifier>::make();
+    sp<OnAudioDeviceUpdateNotifier> cbOld = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cbOld));
+    EXPECT_EQ(INVALID_OPERATION, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cbOld));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cb));
+    EXPECT_EQ(OK, mAC->start()) << "record creation failed";
+    EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
+    EXPECT_EQ(BAD_VALUE, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(nullptr));
+    EXPECT_EQ(INVALID_OPERATION, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cbOld));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cb));
+    mAC->stop();
+}
+
+TEST_F(AudioRecordTest, TestEventRecordTrackPause) {
+    const auto playback = sp<AudioPlayback>::make(
+            8000 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_MONO);
+    ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_1ch_8kHz_s16le.raw"))
+            << "Unable to open Resource";
+    EXPECT_EQ(OK, playback->create()) << "AudioTrack Creation failed";
+    audio_session_t audioTrackSession = playback->getAudioTrackHandle()->getSessionId();
+    EXPECT_EQ(OK, mAC->start(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE, audioTrackSession))
+            << "record creation failed";
+    EXPECT_EQ(OK, playback->start());
+    RawBuffer buffer;
+    status_t status = mAC->obtainBufferCb(buffer);
+    EXPECT_EQ(status, TIMED_OUT) << "Not expecting any callbacks until track sends Sync event";
+    playback->getAudioTrackHandle()->pause();
+    EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
+    playback->stop();
+}
+
+TEST_F(AudioRecordTest, TestEventRecordTrackStop) {
+    const auto playback = sp<AudioPlayback>::make(
+            8000 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_MONO);
+    ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_1ch_8kHz_s16le.raw"))
+            << "Unable to open Resource";
+    EXPECT_EQ(OK, playback->create()) << "AudioTrack Creation failed";
+    audio_session_t audioTrackSession = playback->getAudioTrackHandle()->getSessionId();
+    EXPECT_EQ(OK, mAC->start(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE, audioTrackSession))
+            << "record creation failed";
+    EXPECT_EQ(OK, playback->start());
+    RawBuffer buffer;
+    status_t status = mAC->obtainBufferCb(buffer);
+    EXPECT_EQ(status, TIMED_OUT) << "Not expecting any callbacks until track sends Sync event";
+    playback->stop();
+    EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
+}
+
+TEST_F(AudioRecordTest, TestGetSetMarker) {
+    mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition))
+            << "setMarkerPosition() failed";
+    uint32_t marker;
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker))
+            << "getMarkerPosition() failed";
+    EXPECT_EQ(OK, mAC->start()) << "start recording failed";
+    EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
+    EXPECT_EQ(marker, mAC->mMarkerPosition)
+            << "configured marker and received marker are different";
+    EXPECT_EQ(mAC->mReceivedCbMarkerAtPosition, mAC->mMarkerPosition)
+            << "configured marker and received cb marker are different";
+}
+
+TEST_F(AudioRecordTest, TestGetSetMarkerPeriodical) {
+    mAC->mMarkerPeriod = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPeriod))
+            << "setPositionUpdatePeriod() failed";
+    uint32_t marker;
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker))
+            << "getPositionUpdatePeriod() failed";
+    EXPECT_EQ(OK, mAC->start()) << "start recording failed";
+    EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
+    EXPECT_EQ(marker, mAC->mMarkerPeriod) << "configured marker and received marker are different";
+    EXPECT_EQ(mAC->mReceivedCbMarkerCount, mAC->mNumFramesToRecord / mAC->mMarkerPeriod)
+            << "configured marker and received cb marker are different";
+}
+
+TEST_F(AudioRecordTest, TestGetPosition) {
+    uint32_t position;
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPosition(&position)) << "getPosition() failed";
+    EXPECT_EQ(0, position);
+    EXPECT_EQ(OK, mAC->start()) << "start recording failed";
+    EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
+    EXPECT_EQ(OK, mAC->stop());
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPosition(&position)) << "getPosition() failed";
+}
+
+// TODO: Add checkPatchCapture(), verify the information of patch via dumpPort() and dumpPatch()
+TEST_P(AudioRecordCreateTest, TestCreateRecord) {
+    EXPECT_EQ(mFormat, mAC->getAudioRecordHandle()->format());
+    EXPECT_EQ(audio_channel_count_from_in_mask(mChannelMask),
+              mAC->getAudioRecordHandle()->channelCount());
+    if (mAC->mFrameCount != 0)
+        EXPECT_LE(mAC->mFrameCount, mAC->getAudioRecordHandle()->frameCount());
+    EXPECT_EQ(mInputSource, mAC->getAudioRecordHandle()->inputSource());
+    if (mSampleRate != 0) EXPECT_EQ(mSampleRate, mAC->getAudioRecordHandle()->getSampleRate());
+    if (mSessionId != AUDIO_SESSION_NONE)
+        EXPECT_EQ(mSessionId, mAC->getAudioRecordHandle()->getSessionId());
+    if (mTransferType != AudioRecord::TRANSFER_CALLBACK) {
+        uint32_t marker;
+        mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
+        EXPECT_EQ(INVALID_OPERATION,
+                  mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition));
+        EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker));
+        EXPECT_EQ(INVALID_OPERATION,
+                  mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPosition));
+        EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker));
+    }
+    EXPECT_EQ(OK, mAC->start()) << "start recording failed";
+    EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
+}
+
+// for port primary input
+INSTANTIATE_TEST_SUITE_P(AudioRecordPrimaryInput, AudioRecordCreateTest,
+                         ::testing::Combine(::testing::Values(8000, 11025, 12000, 16000, 22050,
+                                                              24000, 32000, 44100, 48000),
+                                            ::testing::Values(AUDIO_FORMAT_PCM_8_24_BIT),
+                                            ::testing::Values(AUDIO_CHANNEL_IN_MONO,
+                                                              AUDIO_CHANNEL_IN_STEREO,
+                                                              AUDIO_CHANNEL_IN_FRONT_BACK),
+                                            ::testing::Values(AUDIO_INPUT_FLAG_NONE),
+                                            ::testing::Values(AUDIO_SESSION_NONE),
+                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+
+// for port fast input
+INSTANTIATE_TEST_SUITE_P(AudioRecordFastInput, AudioRecordCreateTest,
+                         ::testing::Combine(::testing::Values(8000, 11025, 12000, 16000, 22050,
+                                                              24000, 32000, 44100, 48000),
+                                            ::testing::Values(AUDIO_FORMAT_PCM_8_24_BIT),
+                                            ::testing::Values(AUDIO_CHANNEL_IN_MONO,
+                                                              AUDIO_CHANNEL_IN_STEREO,
+                                                              AUDIO_CHANNEL_IN_FRONT_BACK),
+                                            ::testing::Values(AUDIO_INPUT_FLAG_FAST),
+                                            ::testing::Values(AUDIO_SESSION_NONE),
+                                            ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+
+// misc
+INSTANTIATE_TEST_SUITE_P(AudioRecordMiscInput, AudioRecordCreateTest,
+                         ::testing::Combine(::testing::Values(48000),
+                                            ::testing::Values(AUDIO_FORMAT_PCM_16_BIT),
+                                            ::testing::Values(AUDIO_CHANNEL_IN_MONO),
+                                            ::testing::Values(AUDIO_INPUT_FLAG_NONE),
+                                            ::testing::Values(AUDIO_SESSION_NONE),
+                                            ::testing::Values(AUDIO_SOURCE_MIC,
+                                                              AUDIO_SOURCE_CAMCORDER,
+                                                              AUDIO_SOURCE_VOICE_RECOGNITION,
+                                                              AUDIO_SOURCE_VOICE_COMMUNICATION,
+                                                              AUDIO_SOURCE_UNPROCESSED)));
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
new file mode 100644
index 0000000..19d1abc
--- /dev/null
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -0,0 +1,261 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+
+#include <cutils/properties.h>
+#include <gtest/gtest.h>
+#include <string.h>
+
+#include "audio_test_utils.h"
+
+using namespace android;
+
+// UNIT TEST
+TEST(AudioTrackTest, TestPerformanceMode) {
+    std::vector<struct audio_port_v7> ports;
+    ASSERT_EQ(OK, listAudioPorts(ports));
+    audio_output_flags_t output_flags[] = {AUDIO_OUTPUT_FLAG_FAST, AUDIO_OUTPUT_FLAG_DEEP_BUFFER};
+    audio_flags_mask_t flags[] = {AUDIO_FLAG_LOW_LATENCY, AUDIO_FLAG_DEEP_BUFFER};
+    bool hasFlag = false;
+    for (int i = 0; i < sizeof(flags) / sizeof(flags[0]); i++) {
+        hasFlag = false;
+        for (const auto& port : ports) {
+            if (port.role == AUDIO_PORT_ROLE_SOURCE && port.type == AUDIO_PORT_TYPE_MIX) {
+                if ((port.active_config.flags.output & output_flags[i]) != 0) {
+                    hasFlag = true;
+                    break;
+                }
+            }
+        }
+        if (!hasFlag) continue;
+        audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+        attributes.usage = AUDIO_USAGE_MEDIA;
+        attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+        attributes.flags = flags[i];
+        sp<AudioPlayback> ap = sp<AudioPlayback>::make(0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
+                                                       AUDIO_CHANNEL_OUT_STEREO,
+                                                       AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE,
+                                                       AudioTrack::TRANSFER_OBTAIN, &attributes);
+        ASSERT_NE(nullptr, ap);
+        ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
+                << "Unable to open Resource";
+        EXPECT_EQ(OK, ap->create()) << "track creation failed";
+        sp<OnAudioDeviceUpdateNotifier> cb = sp<OnAudioDeviceUpdateNotifier>::make();
+        EXPECT_EQ(OK, ap->getAudioTrackHandle()->addAudioDeviceCallback(cb));
+        EXPECT_EQ(OK, ap->start()) << "audio track start failed";
+        EXPECT_EQ(OK, ap->onProcess());
+        EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
+        EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+        EXPECT_NE(0, ap->getAudioTrackHandle()->getFlags() & output_flags[i]);
+        audio_patch patch;
+        EXPECT_EQ(OK, getPatchForOutputMix(cb->mAudioIo, patch));
+        for (auto j = 0; j < patch.num_sources; j++) {
+            if (patch.sources[j].type == AUDIO_PORT_TYPE_MIX &&
+                patch.sources[j].ext.mix.handle == cb->mAudioIo) {
+                if ((patch.sources[j].flags.output & output_flags[i]) == 0) {
+                    ADD_FAILURE() << "expected output flag " << output_flags[i] << " is absent";
+                    std::cerr << dumpPortConfig(patch.sources[j]);
+                }
+            }
+        }
+        ap->stop();
+    }
+}
+
+TEST(AudioTrackTest, DefaultRoutingTest) {
+    audio_port_v7 port;
+    if (OK != getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
+                                  AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port)) {
+        GTEST_SKIP() << "remote submix in device not connected";
+    }
+
+    // create record instance
+    sp<AudioCapture> capture = sp<AudioCapture>::make(
+            AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
+    ASSERT_NE(nullptr, capture);
+    EXPECT_EQ(OK, capture->create()) << "record creation failed";
+    sp<OnAudioDeviceUpdateNotifier> cbCapture = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, capture->getAudioRecordHandle()->addAudioDeviceCallback(cbCapture));
+
+    // create playback instance
+    sp<AudioPlayback> playback = sp<AudioPlayback>::make(
+            48000 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE);
+    ASSERT_NE(nullptr, playback);
+    ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
+            << "Unable to open Resource";
+    EXPECT_EQ(OK, playback->create()) << "track creation failed";
+    sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
+
+    // capture should be routed to submix in port
+    EXPECT_EQ(OK, capture->start()) << "start recording failed";
+    EXPECT_EQ(OK, cbCapture->waitForAudioDeviceCb());
+    EXPECT_EQ(port.id, capture->getAudioRecordHandle()->getRoutedDeviceId())
+            << "Capture NOT routed on expected port";
+
+    // capture start should create submix out port
+    status_t status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
+                                          AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "0", port);
+    EXPECT_EQ(OK, status) << "Could not find port";
+
+    // playback should be routed to submix out as long as capture is active
+    EXPECT_EQ(OK, playback->start()) << "audio track start failed";
+    EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb());
+    EXPECT_EQ(port.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
+            << "Playback NOT routed on expected port";
+
+    capture->stop();
+    playback->stop();
+}
+
+class AudioRoutingTest : public ::testing::Test {
+  public:
+    void SetUp() override {
+        audio_port_v7 port;
+        if (OK != getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
+                                      AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port)) {
+            GTEST_SKIP() << "remote submix in device not connected";
+        }
+        uint32_t mixType = MIX_TYPE_PLAYERS;
+        uint32_t mixFlag = MIX_ROUTE_FLAG_LOOP_BACK;
+        audio_devices_t deviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+        AudioMixMatchCriterion criterion(AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT,
+                                         RULE_MATCH_ATTRIBUTE_USAGE);
+        std::vector<AudioMixMatchCriterion> criteria{criterion};
+        audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+        config.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+        config.format = AUDIO_FORMAT_PCM_16_BIT;
+        config.sample_rate = 48000;
+        AudioMix mix(criteria, mixType, config, mixFlag, String8{mAddress.c_str()}, 0);
+        mix.mDeviceType = deviceType;
+        mMixes.push(mix);
+        if (OK == AudioSystem::registerPolicyMixes(mMixes, true)) {
+            mPolicyMixRegistered = true;
+        }
+        ASSERT_TRUE(mPolicyMixRegistered) << "register policy mix failed";
+    }
+
+    void TearDown() override {
+        if (mPolicyMixRegistered) {
+            EXPECT_EQ(OK, AudioSystem::registerPolicyMixes(mMixes, false));
+        }
+    }
+
+    bool mPolicyMixRegistered{false};
+    std::string mAddress{"mix_1"};
+    Vector<AudioMix> mMixes;
+};
+
+TEST_F(AudioRoutingTest, ConcurrentDynamicRoutingTest) {
+    audio_port_v7 port, port_mix;
+    // expect legacy submix in port to be connected
+    status_t status = getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
+                                          AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port);
+    EXPECT_EQ(OK, status) << "Could not find port";
+
+    // as policy mix is registered, expect submix in port with mAddress to be connected
+    status = getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_IN_REMOTE_SUBMIX, mAddress, port_mix);
+    EXPECT_EQ(OK, status) << "Could not find port";
+
+    // create playback instance
+    sp<AudioPlayback> playback = sp<AudioPlayback>::make(
+            48000 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE, AudioTrack::TRANSFER_OBTAIN);
+    ASSERT_NE(nullptr, playback);
+    ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
+            << "Unable to open Resource";
+    EXPECT_EQ(OK, playback->create()) << "track creation failed";
+    sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
+
+    // create capture instances on different ports
+    sp<AudioCapture> captureA = sp<AudioCapture>::make(
+            AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
+    ASSERT_NE(nullptr, captureA);
+    EXPECT_EQ(OK, captureA->create()) << "record creation failed";
+    sp<OnAudioDeviceUpdateNotifier> cbCaptureA = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, captureA->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureA));
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_REMOTE_SUBMIX;
+    sprintf(attr.tags, "addr=%s", mAddress.c_str());
+    sp<AudioCapture> captureB = sp<AudioCapture>::make(
+            AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+            AUDIO_INPUT_FLAG_NONE, AUDIO_SESSION_ALLOCATE, AudioRecord::TRANSFER_CALLBACK, &attr);
+    ASSERT_NE(nullptr, captureB);
+    EXPECT_EQ(OK, captureB->create()) << "record creation failed";
+    sp<OnAudioDeviceUpdateNotifier> cbCaptureB = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, captureB->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureB));
+
+    // launch
+    EXPECT_EQ(OK, captureA->start()) << "start recording failed";
+    EXPECT_EQ(OK, cbCaptureA->waitForAudioDeviceCb());
+    EXPECT_EQ(port.id, captureA->getAudioRecordHandle()->getRoutedDeviceId())
+            << "Capture NOT routed on expected port";
+
+    EXPECT_EQ(OK, captureB->start()) << "start recording failed";
+    EXPECT_EQ(OK, cbCaptureB->waitForAudioDeviceCb());
+    EXPECT_EQ(port_mix.id, captureB->getAudioRecordHandle()->getRoutedDeviceId())
+            << "Capture NOT routed on expected port";
+
+    // as record started, expect submix out ports to be connected
+    status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "0", port);
+    EXPECT_EQ(OK, status) << "unexpected submix out port found";
+
+    status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mAddress, port_mix);
+    EXPECT_EQ(OK, status) << "Could not find port";
+
+    // check if playback routed to desired port
+    EXPECT_EQ(OK, playback->start());
+    EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb());
+    EXPECT_EQ(port_mix.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
+            << "Playback NOT routed on expected port";
+
+    captureB->stop();
+
+    // check if mAddress submix out is disconnected as capture session is stopped
+    status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mAddress, port_mix);
+    EXPECT_NE(OK, status) << "unexpected submix in port found";
+
+    // check if legacy submix out is connected
+    status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "0", port);
+    EXPECT_EQ(OK, status) << "port not found";
+
+    // unregister policy
+    EXPECT_EQ(OK, AudioSystem::registerPolicyMixes(mMixes, false));
+    mPolicyMixRegistered = false;
+
+    // as policy mix is unregistered, expect submix in port with mAddress to be disconnected
+    status = getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
+                                 AUDIO_DEVICE_IN_REMOTE_SUBMIX, mAddress, port_mix);
+    EXPECT_NE(OK, status) << "unexpected submix in port found";
+
+    playback->onProcess();
+    // as captureA is active, it should re route to legacy submix
+    EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb(port.id));
+    EXPECT_EQ(port.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
+            << "Playback NOT routed on expected port";
+
+    captureA->stop();
+    playback->stop();
+}
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
new file mode 100644
index 0000000..45baa94
--- /dev/null
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -0,0 +1,691 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioSystemTest"
+
+#include <string.h>
+
+#include <set>
+
+#include <gtest/gtest.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/IAudioFlinger.h>
+#include <utils/Log.h>
+
+#include "audio_test_utils.h"
+
+using android::media::audio::common::AudioDeviceAddress;
+using android::media::audio::common::AudioDeviceDescription;
+using android::media::audio::common::AudioDeviceType;
+using android::media::audio::common::AudioPortExt;
+using namespace android;
+
+void anyPatchContainsInputDevice(audio_port_handle_t deviceId, bool& res) {
+    std::vector<struct audio_patch> patches;
+    status_t status = listAudioPatches(patches);
+    ASSERT_EQ(OK, status);
+    res = false;
+    for (const auto& patch : patches) {
+        if (patchContainsInputDevice(deviceId, patch)) {
+            res = true;
+            return;
+        }
+    }
+}
+
+class AudioSystemTest : public ::testing::Test {
+  public:
+    void SetUp() override {
+        mAF = AudioSystem::get_audio_flinger();
+        ASSERT_NE(mAF, nullptr) << "Permission denied";
+    }
+
+    void TearDown() override {
+        if (mPlayback) {
+            mPlayback->stop();
+            mCbPlayback.clear();
+            mPlayback.clear();
+        }
+        if (mCapture) {
+            mCapture->stop();
+            mCbRecord.clear();
+            mCapture.clear();
+        }
+    }
+
+    void createPlaybackSession(void);
+    void createRecordSession(void);
+
+    sp<IAudioFlinger> mAF;
+    sp<AudioPlayback> mPlayback;
+    sp<OnAudioDeviceUpdateNotifier> mCbPlayback;
+    sp<AudioCapture> mCapture;
+    sp<OnAudioDeviceUpdateNotifier> mCbRecord;
+};
+
+void AudioSystemTest::createPlaybackSession(void) {
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+    mPlayback = sp<AudioPlayback>::make(48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                                        AUDIO_OUTPUT_FLAG_FAST, AUDIO_SESSION_NONE,
+                                        AudioTrack::TRANSFER_SHARED, &attributes);
+    ASSERT_NE(nullptr, mPlayback);
+    ASSERT_EQ(NO_ERROR, mPlayback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
+    EXPECT_EQ(NO_ERROR, mPlayback->create());
+    mCbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, mPlayback->getAudioTrackHandle()->addAudioDeviceCallback(mCbPlayback));
+    EXPECT_EQ(NO_ERROR, mPlayback->start());
+    EXPECT_EQ(OK, mPlayback->onProcess());
+    EXPECT_EQ(OK, mCbPlayback->waitForAudioDeviceCb());
+}
+
+void AudioSystemTest::createRecordSession(void) {
+    mCapture = new AudioCapture(AUDIO_SOURCE_DEFAULT, 44100, AUDIO_FORMAT_PCM_8_24_BIT,
+                                AUDIO_CHANNEL_IN_MONO, AUDIO_INPUT_FLAG_FAST);
+    ASSERT_NE(nullptr, mCapture);
+    ASSERT_EQ(OK, mCapture->create()) << "record creation failed";
+    mCbRecord = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, mCapture->getAudioRecordHandle()->addAudioDeviceCallback(mCbRecord));
+    EXPECT_EQ(OK, mCapture->start()) << "record creation failed";
+    EXPECT_EQ(OK, mCbRecord->waitForAudioDeviceCb());
+}
+
+// UNIT TESTS
+TEST_F(AudioSystemTest, CheckServerSideValues) {
+    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
+    EXPECT_GT(mAF->sampleRate(mCbPlayback->mAudioIo), 0);
+    EXPECT_NE(mAF->format(mCbPlayback->mAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(mCbPlayback->mAudioIo), 0);
+    size_t frameCountHal, frameCountHalCache;
+    frameCountHal = mAF->frameCountHAL(mCbPlayback->mAudioIo);
+    EXPECT_GT(frameCountHal, 0);
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbPlayback->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(frameCountHal, frameCountHalCache);
+    EXPECT_GT(mAF->latency(mCbPlayback->mAudioIo), 0);
+    // client side latency is at least server side latency
+    EXPECT_LE(mAF->latency(mCbPlayback->mAudioIo), mPlayback->getAudioTrackHandle()->latency());
+
+    ASSERT_NO_FATAL_FAILURE(createRecordSession());
+    EXPECT_GT(mAF->sampleRate(mCbRecord->mAudioIo), 0);
+    // EXPECT_NE(mAF->format(mCbRecord->mAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(mCbRecord->mAudioIo), 0);
+    EXPECT_GT(mAF->frameCountHAL(mCbRecord->mAudioIo), 0);
+    frameCountHal = mAF->frameCountHAL(mCbRecord->mAudioIo);
+    EXPECT_GT(frameCountHal, 0);
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbRecord->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(frameCountHal, frameCountHalCache);
+    // EXPECT_GT(mAF->latency(mCbRecord->mAudioIo), 0);
+    // client side latency is at least server side latency
+    // EXPECT_LE(mAF->latency(mCbRecord->mAudioIo), mCapture->getAudioRecordHandle()->latency());
+
+    EXPECT_GT(AudioSystem::getPrimaryOutputSamplingRate(), 0);  // first fast mixer sample rate
+    EXPECT_GT(AudioSystem::getPrimaryOutputFrameCount(), 0);    // fast mixer frame count
+}
+
+TEST_F(AudioSystemTest, GetSetMasterVolume) {
+    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
+    float origVol, tstVol;
+    EXPECT_EQ(NO_ERROR, AudioSystem::getMasterVolume(&origVol));
+    float newVol;
+    if (origVol + 0.2f > 1.0f) {
+        newVol = origVol - 0.2f;
+    } else {
+        newVol = origVol + 0.2f;
+    }
+    EXPECT_EQ(NO_ERROR, AudioSystem::setMasterVolume(newVol));
+    EXPECT_EQ(NO_ERROR, AudioSystem::getMasterVolume(&tstVol));
+    EXPECT_EQ(newVol, tstVol);
+    EXPECT_EQ(NO_ERROR, AudioSystem::setMasterVolume(origVol));
+    EXPECT_EQ(NO_ERROR, AudioSystem::getMasterVolume(&tstVol));
+    EXPECT_EQ(origVol, tstVol);
+}
+
+TEST_F(AudioSystemTest, GetSetMasterMute) {
+    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
+    bool origMuteState, tstMuteState;
+    EXPECT_EQ(NO_ERROR, AudioSystem::getMasterMute(&origMuteState));
+    EXPECT_EQ(NO_ERROR, AudioSystem::setMasterMute(!origMuteState));
+    EXPECT_EQ(NO_ERROR, AudioSystem::getMasterMute(&tstMuteState));
+    EXPECT_EQ(!origMuteState, tstMuteState);
+    EXPECT_EQ(NO_ERROR, AudioSystem::setMasterMute(origMuteState));
+    EXPECT_EQ(NO_ERROR, AudioSystem::getMasterMute(&tstMuteState));
+    EXPECT_EQ(origMuteState, tstMuteState);
+}
+
+TEST_F(AudioSystemTest, GetSetMicMute) {
+    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
+    bool origMuteState, tstMuteState;
+    EXPECT_EQ(NO_ERROR, AudioSystem::isMicrophoneMuted(&origMuteState));
+    EXPECT_EQ(NO_ERROR, AudioSystem::muteMicrophone(!origMuteState));
+    EXPECT_EQ(NO_ERROR, AudioSystem::isMicrophoneMuted(&tstMuteState));
+    EXPECT_EQ(!origMuteState, tstMuteState);
+    EXPECT_EQ(NO_ERROR, AudioSystem::muteMicrophone(origMuteState));
+    EXPECT_EQ(NO_ERROR, AudioSystem::isMicrophoneMuted(&tstMuteState));
+    EXPECT_EQ(origMuteState, tstMuteState);
+}
+
+TEST_F(AudioSystemTest, GetSetMasterBalance) {
+    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
+    float origBalance, tstBalance;
+    EXPECT_EQ(OK, AudioSystem::getMasterBalance(&origBalance));
+    float newBalance;
+    if (origBalance + 0.2f > 1.0f) {
+        newBalance = origBalance - 0.2f;
+    } else {
+        newBalance = origBalance + 0.2f;
+    }
+    EXPECT_EQ(OK, AudioSystem::setMasterBalance(newBalance));
+    EXPECT_EQ(OK, AudioSystem::getMasterBalance(&tstBalance));
+    EXPECT_EQ(newBalance, tstBalance);
+    EXPECT_EQ(OK, AudioSystem::setMasterBalance(origBalance));
+    EXPECT_EQ(OK, AudioSystem::getMasterBalance(&tstBalance));
+    EXPECT_EQ(origBalance, tstBalance);
+}
+
+TEST_F(AudioSystemTest, GetStreamVolume) {
+    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
+    float origStreamVol;
+    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamVolume(AUDIO_STREAM_MUSIC, &origStreamVol,
+                                                     mCbPlayback->mAudioIo));
+}
+
+TEST_F(AudioSystemTest, GetStreamMute) {
+    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
+    bool origMuteState;
+    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamMute(AUDIO_STREAM_MUSIC, &origMuteState));
+}
+
+TEST_F(AudioSystemTest, StartAndStopAudioSource) {
+    std::vector<struct audio_port_v7> ports;
+    audio_port_config sourcePortConfig;
+    audio_attributes_t attributes = AudioSystem::streamTypeToAttributes(AUDIO_STREAM_MUSIC);
+    audio_port_handle_t sourcePortHandle = AUDIO_PORT_HANDLE_NONE;
+
+    status_t status = listAudioPorts(ports);
+    ASSERT_EQ(OK, status);
+    if (ports.empty()) {
+        GTEST_SKIP() << "No ports returned by the audio system";
+    }
+
+    bool sourceFound = false;
+    for (const auto& port : ports) {
+        if (port.role != AUDIO_PORT_ROLE_SOURCE || port.type != AUDIO_PORT_TYPE_DEVICE) continue;
+        if (port.ext.device.type != AUDIO_DEVICE_IN_FM_TUNER) continue;
+        sourceFound = true;
+        sourcePortConfig = port.active_config;
+
+        bool patchFound;
+
+        // start audio source.
+        status_t ret =
+                AudioSystem::startAudioSource(&sourcePortConfig, &attributes, &sourcePortHandle);
+        EXPECT_EQ(OK, ret) << "AudioSystem::startAudioSource for source "
+                           << audio_device_to_string(port.ext.device.type) << " failed";
+        if (ret != OK) continue;
+
+        // verify that patch is established by the source port.
+        ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(port.id, patchFound));
+        EXPECT_EQ(true, patchFound);
+        EXPECT_NE(sourcePortHandle, AUDIO_PORT_HANDLE_NONE);
+
+        if (sourcePortHandle != AUDIO_PORT_HANDLE_NONE) {
+            ret = AudioSystem::stopAudioSource(sourcePortHandle);
+            EXPECT_EQ(OK, ret) << "AudioSystem::stopAudioSource failed for handle "
+                               << sourcePortHandle;
+        }
+
+        // verify that no source port patch exists.
+        ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(port.id, patchFound));
+        EXPECT_EQ(false, patchFound);
+    }
+    if (!sourceFound) {
+        GTEST_SKIP() << "No ports suitable for testing";
+    }
+}
+
+TEST_F(AudioSystemTest, CreateAndReleaseAudioPatch) {
+    status_t status;
+    struct audio_patch audioPatch;
+    std::vector<struct audio_port_v7> ports;
+    audio_patch_handle_t audioPatchHandle = AUDIO_PATCH_HANDLE_NONE;
+
+    bool patchFound = false;
+    audio_port_v7 sourcePort{};
+    audio_port_v7 sinkPort{};
+
+    audioPatch.id = 0;
+    audioPatch.num_sources = 1;
+    audioPatch.num_sinks = 1;
+
+    status = listAudioPorts(ports);
+    ASSERT_EQ(OK, status);
+    if (ports.empty()) {
+        GTEST_SKIP() << "No output devices returned by the audio system";
+    }
+
+    for (const auto& port : ports) {
+        if (port.role == AUDIO_PORT_ROLE_SOURCE && port.type == AUDIO_PORT_TYPE_DEVICE) {
+            sourcePort = port;
+        }
+        if (port.role == AUDIO_PORT_ROLE_SINK && port.type == AUDIO_PORT_TYPE_DEVICE &&
+            port.ext.device.type == AUDIO_DEVICE_OUT_SPEAKER) {
+            sinkPort = port;
+        }
+    }
+
+    audioPatch.sources[0] = sourcePort.active_config;
+    audioPatch.sinks[0] = sinkPort.active_config;
+
+    status = AudioSystem::createAudioPatch(&audioPatch, &audioPatchHandle);
+    EXPECT_EQ(OK, status) << "AudioSystem::createAudiopatch failed between source "
+                          << sourcePort.ext.device.address << " and sink "
+                          << sinkPort.ext.device.address;
+
+    // verify that patch is established between source and the sink.
+    ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(sourcePort.id, patchFound));
+    EXPECT_EQ(true, patchFound);
+
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, audioPatchHandle);
+    status = AudioSystem::releaseAudioPatch(audioPatchHandle);
+    EXPECT_EQ(OK, status) << "AudioSystem::releaseAudioPatch failed between source "
+                          << sourcePort.ext.device.address << " and sink "
+                          << sinkPort.ext.device.address;
+
+    // verify that no patch is established between source and the sink after releaseAudioPatch.
+    ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(sourcePort.id, patchFound));
+    EXPECT_EQ(false, patchFound);
+}
+
+TEST_F(AudioSystemTest, GetAudioPort) {
+    std::vector<struct audio_port_v7> ports;
+    status_t status = listAudioPorts(ports);
+    ASSERT_EQ(OK, status);
+    for (const auto& port : ports) {
+        audio_port_v7 portTest{.id = port.id};
+        EXPECT_EQ(OK, AudioSystem::getAudioPort(&portTest));
+        EXPECT_TRUE(audio_ports_v7_are_equal(&portTest, &port));
+    }
+}
+
+TEST_F(AudioSystemTest, TestPhoneState) {
+    uid_t uid = getuid();
+    EXPECT_EQ(OK, AudioSystem::setPhoneState(AUDIO_MODE_RINGTONE, uid));
+    audio_mode_t state = AudioSystem::getPhoneState();
+    EXPECT_EQ(AUDIO_MODE_RINGTONE, state);
+    EXPECT_EQ(OK, AudioSystem::setPhoneState(AUDIO_MODE_IN_COMMUNICATION, uid));
+    state = AudioSystem::getPhoneState();
+    EXPECT_EQ(AUDIO_MODE_IN_COMMUNICATION, state);
+    EXPECT_EQ(OK, AudioSystem::setPhoneState(AUDIO_MODE_NORMAL, uid));
+    state = AudioSystem::getPhoneState();
+    EXPECT_EQ(AUDIO_MODE_NORMAL, state);
+}
+
+TEST_F(AudioSystemTest, GetDirectProfilesForAttributes) {
+    std::vector<audio_profile> audioProfiles;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+    EXPECT_EQ(BAD_VALUE, AudioSystem::getDirectProfilesForAttributes(nullptr, nullptr));
+    EXPECT_EQ(BAD_VALUE, AudioSystem::getDirectProfilesForAttributes(nullptr, &audioProfiles));
+    EXPECT_EQ(BAD_VALUE, AudioSystem::getDirectProfilesForAttributes(&attributes, nullptr));
+    EXPECT_EQ(NO_ERROR, AudioSystem::getDirectProfilesForAttributes(&attributes, &audioProfiles));
+}
+
+bool isPublicStrategy(const AudioProductStrategy& strategy) {
+    bool result = true;
+    for (auto& attribute : strategy.getVolumeGroupAttributes()) {
+        if (attribute.getAttributes() == AUDIO_ATTRIBUTES_INITIALIZER &&
+            (uint32_t(attribute.getStreamType()) >= AUDIO_STREAM_PUBLIC_CNT)) {
+            result = false;
+            break;
+        }
+    }
+    return result;
+}
+
+TEST_F(AudioSystemTest, DevicesForRoleAndStrategy) {
+    std::vector<struct audio_port_v7> ports;
+    status_t status = listAudioPorts(ports);
+    ASSERT_EQ(OK, status);
+
+    std::vector<struct audio_port_v7> devicePorts;
+    for (const auto& port : ports) {
+        if (port.type == AUDIO_PORT_TYPE_DEVICE && audio_is_output_device(port.ext.device.type)) {
+            devicePorts.push_back(port);
+        }
+    }
+    if (devicePorts.empty()) {
+        GTEST_SKIP() << "No output devices returned by the audio system";
+    }
+
+    AudioProductStrategyVector strategies;
+    EXPECT_EQ(OK, AudioSystem::listAudioProductStrategies(strategies));
+    if (strategies.empty()) {
+        GTEST_SKIP() << "No strategies returned by the audio system";
+    }
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+
+    bool hasStrategyForMedia = false;
+    AudioProductStrategy mediaStrategy;
+    for (const auto& strategy : strategies) {
+        if (!isPublicStrategy(strategy)) continue;
+
+        for (const auto& att : strategy.getVolumeGroupAttributes()) {
+            if (strategy.attributesMatches(att.getAttributes(), attributes)) {
+                hasStrategyForMedia = true;
+                mediaStrategy = strategy;
+                break;
+            }
+        }
+    }
+
+    if (!hasStrategyForMedia) {
+        GTEST_SKIP() << "No strategies returned for music media";
+    }
+
+    AudioDeviceTypeAddrVector devices;
+    EXPECT_EQ(BAD_VALUE, AudioSystem::getDevicesForRoleAndStrategy(PRODUCT_STRATEGY_NONE,
+                                                                   DEVICE_ROLE_PREFERRED, devices));
+    EXPECT_EQ(BAD_VALUE, AudioSystem::getDevicesForRoleAndStrategy(mediaStrategy.getId(),
+                                                                   DEVICE_ROLE_NONE, devices));
+    status = AudioSystem::getDevicesForRoleAndStrategy(mediaStrategy.getId(), DEVICE_ROLE_PREFERRED,
+                                                       devices);
+    if (status == NAME_NOT_FOUND) {
+        AudioDeviceTypeAddrVector outputDevices;
+        for (const auto& port : devicePorts) {
+            if (port.ext.device.type == AUDIO_DEVICE_OUT_SPEAKER) {
+                const AudioDeviceTypeAddr outputDevice(port.ext.device.type,
+                                                       port.ext.device.address);
+                outputDevices.push_back(outputDevice);
+            }
+        }
+        EXPECT_EQ(OK, AudioSystem::setDevicesRoleForStrategy(mediaStrategy.getId(),
+                                                             DEVICE_ROLE_PREFERRED, outputDevices));
+        EXPECT_EQ(OK, AudioSystem::getDevicesForRoleAndStrategy(mediaStrategy.getId(),
+                                                                DEVICE_ROLE_PREFERRED, devices));
+        EXPECT_EQ(devices, outputDevices);
+        EXPECT_EQ(OK, AudioSystem::clearDevicesRoleForStrategy(mediaStrategy.getId(),
+                                                               DEVICE_ROLE_PREFERRED));
+        EXPECT_EQ(NAME_NOT_FOUND, AudioSystem::getDevicesForRoleAndStrategy(
+                                          mediaStrategy.getId(), DEVICE_ROLE_PREFERRED, devices));
+    }
+}
+
+TEST_F(AudioSystemTest, VolumeIndexForAttributes) {
+    AudioVolumeGroupVector groups;
+    EXPECT_EQ(OK, AudioSystem::listAudioVolumeGroups(groups));
+    for (const auto& group : groups) {
+        if (group.getAudioAttributes().empty()) continue;
+        const audio_attributes_t attr = group.getAudioAttributes()[0];
+        if (attr == AUDIO_ATTRIBUTES_INITIALIZER) continue;
+        audio_stream_type_t streamType = AudioSystem::attributesToStreamType(attr);
+        if (streamType >= AUDIO_STREAM_PUBLIC_CNT) continue;
+
+        volume_group_t vg;
+        EXPECT_EQ(OK, AudioSystem::getVolumeGroupFromAudioAttributes(attr, vg));
+        EXPECT_EQ(group.getId(), vg);
+
+        int index;
+        EXPECT_EQ(OK,
+                  AudioSystem::getVolumeIndexForAttributes(attr, index, AUDIO_DEVICE_OUT_SPEAKER));
+
+        int indexTest;
+        EXPECT_EQ(OK, AudioSystem::getStreamVolumeIndex(streamType, &indexTest,
+                                                        AUDIO_DEVICE_OUT_SPEAKER));
+        EXPECT_EQ(index, indexTest);
+    }
+}
+
+TEST_F(AudioSystemTest, DevicesRoleForCapturePreset) {
+    std::vector<struct audio_port_v7> ports;
+    status_t status = listAudioPorts(ports);
+    ASSERT_EQ(OK, status);
+
+    if (ports.empty()) {
+        GTEST_SKIP() << "No ports returned by the audio system";
+    }
+
+    audio_devices_t inDeviceA = AUDIO_DEVICE_IN_BUILTIN_MIC;
+    audio_devices_t inDeviceB = AUDIO_DEVICE_IN_BUILTIN_MIC;
+    for (const auto& port : ports) {
+        if (port.role != AUDIO_PORT_ROLE_SOURCE || port.type != AUDIO_PORT_TYPE_DEVICE) continue;
+        if (port.ext.device.type == inDeviceA) continue;
+        inDeviceB = port.ext.device.type;
+        break;
+    }
+    const audio_source_t audioSource = AUDIO_SOURCE_MIC;
+    const device_role_t role = DEVICE_ROLE_PREFERRED;
+    const AudioDeviceTypeAddr inputDevice(inDeviceA, "");
+    const AudioDeviceTypeAddrVector inputDevices = {inputDevice};
+    const AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
+    const AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+
+    // Test invalid device when setting
+    EXPECT_EQ(BAD_VALUE,
+              AudioSystem::setDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+    EXPECT_EQ(BAD_VALUE,
+              AudioSystem::addDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+    EXPECT_EQ(BAD_VALUE,
+              AudioSystem::removeDevicesRoleForCapturePreset(audioSource, role, outputDevices));
+
+    // Test invalid role
+    AudioDeviceTypeAddrVector devices;
+    EXPECT_EQ(BAD_VALUE, AudioSystem::getDevicesForRoleAndCapturePreset(audioSource,
+                                                                        DEVICE_ROLE_NONE, devices));
+    EXPECT_EQ(BAD_VALUE, AudioSystem::setDevicesRoleForCapturePreset(audioSource, DEVICE_ROLE_NONE,
+                                                                     inputDevices));
+    EXPECT_EQ(BAD_VALUE, AudioSystem::addDevicesRoleForCapturePreset(audioSource, DEVICE_ROLE_NONE,
+                                                                     inputDevices));
+    EXPECT_EQ(BAD_VALUE, AudioSystem::removeDevicesRoleForCapturePreset(
+                                 audioSource, DEVICE_ROLE_NONE, inputDevices));
+    EXPECT_EQ(BAD_VALUE,
+              AudioSystem::clearDevicesRoleForCapturePreset(audioSource, DEVICE_ROLE_NONE));
+
+    // Without setting, call get/remove/clear must fail
+    EXPECT_EQ(NAME_NOT_FOUND,
+              AudioSystem::getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_TRUE(devices.empty());
+    EXPECT_EQ(NAME_NOT_FOUND,
+              AudioSystem::removeDevicesRoleForCapturePreset(audioSource, role, devices));
+    EXPECT_EQ(NAME_NOT_FOUND, AudioSystem::clearDevicesRoleForCapturePreset(audioSource, role));
+
+    // Test set/get devices role
+    EXPECT_EQ(NO_ERROR,
+              AudioSystem::setDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    ASSERT_EQ(NO_ERROR, AudioSystem::getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_EQ(devices, inputDevices);
+
+    // Test setting will change the previously set devices
+    const AudioDeviceTypeAddr inputDevice2 = AudioDeviceTypeAddr(inDeviceB, "");
+    AudioDeviceTypeAddrVector inputDevices2 = {inputDevice2};
+    EXPECT_EQ(NO_ERROR,
+              AudioSystem::setDevicesRoleForCapturePreset(audioSource, role, inputDevices2));
+    devices.clear();
+    EXPECT_EQ(NO_ERROR, AudioSystem::getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_EQ(devices, inputDevices2);
+
+    // Test add devices
+    EXPECT_EQ(NO_ERROR,
+              AudioSystem::addDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    devices.clear();
+    EXPECT_EQ(NO_ERROR, AudioSystem::getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_EQ(2, devices.size());
+    EXPECT_TRUE(std::find(devices.begin(), devices.end(), inputDevice) != devices.end());
+    EXPECT_TRUE(std::find(devices.begin(), devices.end(), inputDevice2) != devices.end());
+
+    // Test remove devices
+    EXPECT_EQ(NO_ERROR,
+              AudioSystem::removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+    devices.clear();
+    EXPECT_EQ(NO_ERROR, AudioSystem::getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_EQ(devices, inputDevices2);
+
+    // Test remove devices that are not set as the device role
+    EXPECT_EQ(BAD_VALUE,
+              AudioSystem::removeDevicesRoleForCapturePreset(audioSource, role, inputDevices));
+
+    // Test clear devices
+    EXPECT_EQ(NO_ERROR, AudioSystem::clearDevicesRoleForCapturePreset(audioSource, role));
+    devices.clear();
+    EXPECT_EQ(NAME_NOT_FOUND,
+              AudioSystem::getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+
+    AudioDeviceTypeAddrVector inputDevices3 = {inputDevice, inputDevice2};
+    EXPECT_EQ(NO_ERROR,
+              AudioSystem::setDevicesRoleForCapturePreset(audioSource, role, inputDevices3));
+    devices.clear();
+    EXPECT_EQ(NO_ERROR, AudioSystem::getDevicesForRoleAndCapturePreset(audioSource, role, devices));
+    EXPECT_EQ(2, devices.size());
+    EXPECT_TRUE(std::find(devices.begin(), devices.end(), inputDevice) != devices.end());
+    EXPECT_TRUE(std::find(devices.begin(), devices.end(), inputDevice2) != devices.end());
+    EXPECT_EQ(NO_ERROR, AudioSystem::clearDevicesRoleForCapturePreset(audioSource, role));
+}
+
+TEST_F(AudioSystemTest, UidDeviceAffinities) {
+    uid_t uid = getuid();
+
+    // Test invalid device for example audio_is_input_device
+    AudioDeviceTypeAddr inputDevice(AUDIO_DEVICE_IN_BUILTIN_MIC, "");
+    AudioDeviceTypeAddrVector inputDevices = {inputDevice};
+    EXPECT_EQ(BAD_VALUE, AudioSystem::setUidDeviceAffinities(uid, inputDevices));
+
+    // Test valid device for example audio_is_output_device
+    AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
+    AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+    EXPECT_EQ(NO_ERROR, AudioSystem::setUidDeviceAffinities(uid, outputDevices));
+    EXPECT_EQ(NO_ERROR, AudioSystem::removeUidDeviceAffinities(uid));
+}
+
+TEST_F(AudioSystemTest, UserIdDeviceAffinities) {
+    int userId = 200;
+
+    // Test invalid device for example audio_is_input_device
+    AudioDeviceTypeAddr inputDevice(AUDIO_DEVICE_IN_BUILTIN_MIC, "");
+    AudioDeviceTypeAddrVector inputDevices = {inputDevice};
+    EXPECT_EQ(BAD_VALUE, AudioSystem::setUserIdDeviceAffinities(userId, inputDevices));
+
+    // Test valid device for ezample audio_is_output_device
+    AudioDeviceTypeAddr outputDevice(AUDIO_DEVICE_OUT_SPEAKER, "");
+    AudioDeviceTypeAddrVector outputDevices = {outputDevice};
+    EXPECT_EQ(NO_ERROR, AudioSystem::setUserIdDeviceAffinities(userId, outputDevices));
+    EXPECT_EQ(NO_ERROR, AudioSystem::removeUserIdDeviceAffinities(userId));
+}
+
+namespace {
+
+class WithSimulatedDeviceConnections {
+  public:
+    WithSimulatedDeviceConnections()
+        : mIsSupported(AudioSystem::setSimulateDeviceConnections(true) == OK) {}
+    ~WithSimulatedDeviceConnections() {
+        if (mIsSupported) {
+            if (status_t status = AudioSystem::setSimulateDeviceConnections(false); status != OK) {
+                ALOGE("Error restoring device connections simulation state: %d", status);
+            }
+        }
+    }
+    bool isSupported() const { return mIsSupported; }
+
+  private:
+    const bool mIsSupported;
+};
+
+android::media::audio::common::AudioPort GenerateUniqueDeviceAddress(
+        const android::media::audio::common::AudioPort& port) {
+    static int nextId = 0;
+    using Tag = AudioDeviceAddress::Tag;
+    AudioDeviceAddress address;
+    switch (suggestDeviceAddressTag(port.ext.get<AudioPortExt::Tag::device>().device.type)) {
+        case Tag::id:
+            address = AudioDeviceAddress::make<Tag::id>(std::to_string(++nextId));
+            break;
+        case Tag::mac:
+            address = AudioDeviceAddress::make<Tag::mac>(
+                    std::vector<uint8_t>{1, 2, 3, 4, 5, static_cast<uint8_t>(++nextId & 0xff)});
+            break;
+        case Tag::ipv4:
+            address = AudioDeviceAddress::make<Tag::ipv4>(
+                    std::vector<uint8_t>{192, 168, 0, static_cast<uint8_t>(++nextId & 0xff)});
+            break;
+        case Tag::ipv6:
+            address = AudioDeviceAddress::make<Tag::ipv6>(std::vector<int32_t>{
+                    0xfc00, 0x0123, 0x4567, 0x89ab, 0xcdef, 0, 0, ++nextId & 0xffff});
+            break;
+        case Tag::alsa:
+            address = AudioDeviceAddress::make<Tag::alsa>(std::vector<int32_t>{1, ++nextId});
+            break;
+    }
+    android::media::audio::common::AudioPort result = port;
+    result.ext.get<AudioPortExt::Tag::device>().device.address = std::move(address);
+    return result;
+}
+
+}  // namespace
+
+TEST_F(AudioSystemTest, SetDeviceConnectedState) {
+    WithSimulatedDeviceConnections connSim;
+    if (!connSim.isSupported()) {
+        GTEST_SKIP() << "Simulation of external device connections not supported";
+    }
+    std::vector<media::AudioPortFw> ports;
+    ASSERT_EQ(OK, AudioSystem::listDeclaredDevicePorts(media::AudioPortRole::NONE, &ports));
+    if (ports.empty()) {
+        GTEST_SKIP() << "No ports returned by the audio system";
+    }
+    const std::set<AudioDeviceType> typesToUse{
+            AudioDeviceType::IN_DEVICE,       AudioDeviceType::IN_HEADSET,
+            AudioDeviceType::IN_MICROPHONE,   AudioDeviceType::OUT_DEVICE,
+            AudioDeviceType::OUT_HEADPHONE,   AudioDeviceType::OUT_HEADSET,
+            AudioDeviceType::OUT_HEARING_AID, AudioDeviceType::OUT_SPEAKER};
+    std::vector<media::AudioPortFw> externalDevicePorts;
+    for (const auto& port : ports) {
+        if (const auto& device = port.hal.ext.get<AudioPortExt::device>().device;
+            !device.type.connection.empty() && typesToUse.count(device.type.type)) {
+            externalDevicePorts.push_back(port);
+        }
+    }
+    if (externalDevicePorts.empty()) {
+        GTEST_SKIP() << "No ports for considered non-attached devices";
+    }
+    for (auto& port : externalDevicePorts) {
+        android::media::audio::common::AudioPort aidlPort = GenerateUniqueDeviceAddress(port.hal);
+        SCOPED_TRACE(aidlPort.toString());
+        audio_devices_t type;
+        char address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+        status_t status = aidl2legacy_AudioDevice_audio_device(
+                aidlPort.ext.get<AudioPortExt::Tag::device>().device, &type, address);
+        ASSERT_EQ(OK, status);
+        audio_policy_dev_state_t deviceState = AudioSystem::getDeviceConnectionState(type, address);
+        EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, deviceState);
+        if (deviceState != AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) continue;
+        // !!! Instead of the default format, use each format from 'ext.encodedFormats'
+        // !!! if they are not empty
+        status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                       aidlPort, AUDIO_FORMAT_DEFAULT);
+        EXPECT_EQ(OK, status);
+        if (status != OK) continue;
+        deviceState = AudioSystem::getDeviceConnectionState(type, address);
+        EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE, deviceState);
+        status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                       aidlPort, AUDIO_FORMAT_DEFAULT);
+        EXPECT_EQ(OK, status);
+        deviceState = AudioSystem::getDeviceConnectionState(type, address);
+        EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, deviceState);
+    }
+}
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
new file mode 100644
index 0000000..8daba0a
--- /dev/null
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -0,0 +1,211 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+
+#include <gtest/gtest.h>
+
+#include "audio_test_utils.h"
+
+using namespace android;
+
+TEST(AudioTrackTest, TestPlayTrack) {
+    const auto ap = sp<AudioPlayback>::make(44100 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
+                                            AUDIO_CHANNEL_OUT_STEREO, AUDIO_OUTPUT_FLAG_NONE,
+                                            AUDIO_SESSION_NONE, AudioTrack::TRANSFER_OBTAIN);
+    ASSERT_NE(nullptr, ap);
+    ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
+            << "Unable to open Resource";
+    EXPECT_EQ(OK, ap->create()) << "track creation failed";
+    EXPECT_EQ(OK, ap->start()) << "audio track start failed";
+    EXPECT_EQ(OK, ap->onProcess());
+    ap->stop();
+}
+
+TEST(AudioTrackTest, TestSeek) {
+    const auto ap = sp<AudioPlayback>::make(
+            44100 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO);
+    ASSERT_NE(nullptr, ap);
+    ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
+            << "Unable to open Resource";
+    EXPECT_EQ(OK, ap->create()) << "track creation failed";
+    EXPECT_EQ(OK, ap->start()) << "audio track start failed";
+    EXPECT_EQ(OK, ap->onProcess(true));
+    ap->stop();
+}
+
+TEST(AudioTrackTest, OffloadOrDirectPlayback) {
+    audio_offload_info_t info = AUDIO_INFO_INITIALIZER;
+    info.sample_rate = 44100;
+    info.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    info.format = AUDIO_FORMAT_MP3;
+    info.stream_type = AUDIO_STREAM_MUSIC;
+    info.bit_rate = 192;
+    info.duration_us = 120 * 1000000;  // 120 sec
+
+    audio_config_base_t config = {/* .sample_rate = */ info.sample_rate,
+                                  /* .channel_mask = */ info.channel_mask,
+                                  /* .format = */ AUDIO_FORMAT_PCM_16_BIT};
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.flags = AUDIO_FLAG_NONE;
+
+    if (!AudioTrack::isDirectOutputSupported(config, attributes) &&
+        AUDIO_OFFLOAD_NOT_SUPPORTED == AudioSystem::getOffloadSupport(info)) {
+        GTEST_SKIP() << "offload or direct playback is not supported";
+    }
+    sp<AudioPlayback> ap = nullptr;
+    if (AUDIO_OFFLOAD_NOT_SUPPORTED != AudioSystem::getOffloadSupport(info)) {
+        ap = sp<AudioPlayback>::make(info.sample_rate, info.format, info.channel_mask,
+                                     AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD, AUDIO_SESSION_NONE,
+                                     AudioTrack::TRANSFER_OBTAIN, nullptr, &info);
+    } else {
+        ap = sp<AudioPlayback>::make(config.sample_rate, config.format, config.channel_mask,
+                                     AUDIO_OUTPUT_FLAG_DIRECT, AUDIO_SESSION_NONE,
+                                     AudioTrack::TRANSFER_OBTAIN);
+    }
+    ASSERT_NE(nullptr, ap);
+    EXPECT_EQ(OK, ap->create()) << "track creation failed";
+    audio_dual_mono_mode_t mode;
+    if (OK != ap->getAudioTrackHandle()->getDualMonoMode(&mode)) {
+        std::cerr << "no dual mono presentation is available" << std::endl;
+    }
+    if (OK != ap->getAudioTrackHandle()->setDualMonoMode(AUDIO_DUAL_MONO_MODE_LR)) {
+        std::cerr << "no dual mono presentation is available" << std::endl;
+    } else {
+        EXPECT_EQ(OK, ap->getAudioTrackHandle()->getDualMonoMode(&mode));
+        EXPECT_EQ(AUDIO_DUAL_MONO_MODE_LR, mode);
+    }
+    float leveldB;
+    if (OK != ap->getAudioTrackHandle()->getAudioDescriptionMixLevel(&leveldB)) {
+        std::cerr << "Audio Description mixing is unavailable" << std::endl;
+    }
+    if (OK != ap->getAudioTrackHandle()->setAudioDescriptionMixLevel(3.14f)) {
+        std::cerr << "Audio Description mixing is unavailable" << std::endl;
+    } else {
+        EXPECT_EQ(OK, ap->getAudioTrackHandle()->getAudioDescriptionMixLevel(&leveldB));
+        EXPECT_EQ(3.14f, leveldB);
+    }
+    AudioPlaybackRate audioRate;
+    audioRate = ap->getAudioTrackHandle()->getPlaybackRate();
+    std::cerr << "playback speed :: " << audioRate.mSpeed << std::endl
+              << "playback pitch :: " << audioRate.mPitch << std::endl;
+    audioRate.mSpeed = 2.0f;
+    audioRate.mPitch = 2.0f;
+    audioRate.mStretchMode = AUDIO_TIMESTRETCH_STRETCH_VOICE;
+    audioRate.mFallbackMode = AUDIO_TIMESTRETCH_FALLBACK_MUTE;
+    EXPECT_TRUE(isAudioPlaybackRateValid(audioRate));
+    if (OK != ap->getAudioTrackHandle()->setPlaybackRate(audioRate)) {
+        std::cerr << "unable to set playback rate parameters" << std::endl;
+    } else {
+        AudioPlaybackRate audioRateLocal;
+        audioRateLocal = ap->getAudioTrackHandle()->getPlaybackRate();
+        EXPECT_TRUE(isAudioPlaybackRateEqual(audioRate, audioRateLocal));
+    }
+    ap->stop();
+}
+
+TEST(AudioTrackTest, TestAudioCbNotifier) {
+    const auto ap = sp<AudioPlayback>::make(0 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
+                                            AUDIO_CHANNEL_OUT_STEREO, AUDIO_OUTPUT_FLAG_FAST,
+                                            AUDIO_SESSION_NONE, AudioTrack::TRANSFER_SHARED);
+    ASSERT_NE(nullptr, ap);
+    ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
+            << "Unable to open Resource";
+    EXPECT_EQ(OK, ap->create()) << "track creation failed";
+    EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->addAudioDeviceCallback(nullptr));
+    sp<OnAudioDeviceUpdateNotifier> cb = sp<OnAudioDeviceUpdateNotifier>::make();
+    sp<OnAudioDeviceUpdateNotifier> cbOld = sp<OnAudioDeviceUpdateNotifier>::make();
+    EXPECT_EQ(OK, ap->getAudioTrackHandle()->addAudioDeviceCallback(cbOld));
+    EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->addAudioDeviceCallback(cbOld));
+    EXPECT_EQ(OK, ap->getAudioTrackHandle()->addAudioDeviceCallback(cb));
+    EXPECT_EQ(OK, ap->start()) << "audio track start failed";
+    EXPECT_EQ(OK, ap->onProcess());
+    EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
+    EXPECT_EQ(cb->mAudioIo, ap->getAudioTrackHandle()->getOutput());
+    EXPECT_EQ(cb->mDeviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
+    String8 keys;
+    keys = ap->getAudioTrackHandle()->getParameters(keys);
+    if (!keys.isEmpty()) {
+        std::cerr << "track parameters :: " << keys << std::endl;
+    }
+    EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+    EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->removeAudioDeviceCallback(nullptr));
+    EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cbOld));
+    EXPECT_EQ(OK, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cb));
+    ap->stop();
+}
+
+class AudioTrackCreateTest
+    : public ::testing::TestWithParam<std::tuple<uint32_t, audio_format_t, audio_channel_mask_t,
+                                                 audio_output_flags_t, audio_session_t>> {
+  public:
+    AudioTrackCreateTest()
+        : mSampleRate(std::get<0>(GetParam())),
+          mFormat(std::get<1>(GetParam())),
+          mChannelMask(std::get<2>(GetParam())),
+          mFlags(std::get<3>(GetParam())),
+          mSessionId(std::get<4>(GetParam())){};
+
+    const uint32_t mSampleRate;
+    const audio_format_t mFormat;
+    const audio_channel_mask_t mChannelMask;
+    const audio_output_flags_t mFlags;
+    const audio_session_t mSessionId;
+
+    sp<AudioPlayback> mAP;
+
+    virtual void SetUp() override {
+        mAP = sp<AudioPlayback>::make(mSampleRate, mFormat, mChannelMask, mFlags,
+                                              mSessionId);
+        ASSERT_NE(nullptr, mAP);
+        ASSERT_EQ(OK, mAP->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
+                << "Unable to open Resource";
+        ASSERT_EQ(OK, mAP->create()) << "track creation failed";
+    }
+
+    virtual void TearDown() override {
+        if (mAP) mAP->stop();
+    }
+};
+
+TEST_P(AudioTrackCreateTest, TestCreateTrack) {
+    EXPECT_EQ(mFormat, mAP->getAudioTrackHandle()->format());
+    EXPECT_EQ(audio_channel_count_from_out_mask(mChannelMask),
+              mAP->getAudioTrackHandle()->channelCount());
+    if (mSampleRate != 0) EXPECT_EQ(mSampleRate, mAP->getAudioTrackHandle()->getSampleRate());
+    if (mSessionId != AUDIO_SESSION_NONE)
+        EXPECT_EQ(mSessionId, mAP->getAudioTrackHandle()->getSessionId());
+    EXPECT_EQ(mSampleRate, mAP->getAudioTrackHandle()->getOriginalSampleRate());
+    EXPECT_EQ(OK, mAP->start()) << "audio track start failed";
+    EXPECT_EQ(OK, mAP->onProcess());
+}
+
+// sampleRate, format, channelMask, flags, sessionId
+INSTANTIATE_TEST_SUITE_P(
+        AudioTrackParameterizedTest, AudioTrackCreateTest,
+        ::testing::Combine(::testing::Values(48000), ::testing::Values(AUDIO_FORMAT_PCM_16_BIT),
+                           ::testing::Values(AUDIO_CHANNEL_OUT_STEREO),
+                           ::testing::Values(AUDIO_OUTPUT_FLAG_NONE,
+                                             AUDIO_OUTPUT_FLAG_PRIMARY | AUDIO_OUTPUT_FLAG_FAST,
+                                             AUDIO_OUTPUT_FLAG_RAW | AUDIO_OUTPUT_FLAG_FAST,
+                                             AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
+                           ::testing::Values(AUDIO_SESSION_NONE)));
diff --git a/media/libaudioclient/tests/bbb_1ch_8kHz_s16le.raw b/media/libaudioclient/tests/bbb_1ch_8kHz_s16le.raw
new file mode 100644
index 0000000..2d1e4bf
--- /dev/null
+++ b/media/libaudioclient/tests/bbb_1ch_8kHz_s16le.raw
Binary files differ
diff --git a/media/libaudioclient/tests/bbb_2ch_24kHz_s16le.raw b/media/libaudioclient/tests/bbb_2ch_24kHz_s16le.raw
new file mode 100644
index 0000000..c8ac5f7
--- /dev/null
+++ b/media/libaudioclient/tests/bbb_2ch_24kHz_s16le.raw
Binary files differ
diff --git a/media/libaudioclient/tests/test_create_audiorecord.cpp b/media/libaudioclient/tests/test_create_audiorecord.cpp
index 2e0883b..277110b 100644
--- a/media/libaudioclient/tests/test_create_audiorecord.cpp
+++ b/media/libaudioclient/tests/test_create_audiorecord.cpp
@@ -29,14 +29,13 @@
 
 #define NUM_ARGUMENTS 8
 #define VERSION_VALUE "1.0"
-#define PACKAGE_NAME  "AudioRecord test"
+#define PACKAGE_NAME "AudioRecord test"
 
 namespace android {
 
 using android::content::AttributionSourceState;
 
-int testRecord(FILE *inputFile, int outputFileFd)
-{
+int testRecord(FILE* inputFile, int outputFileFd) {
     char line[MAX_INPUT_FILE_LINE_LENGTH];
     uint32_t testCount = 0;
     Vector<String16> args;
@@ -47,11 +46,9 @@
     attributionSource.token = sp<BBinder>::make();
 
     if (inputFile == nullptr) {
-        sp<AudioRecord> record = new AudioRecord(AUDIO_SOURCE_DEFAULT,
-                                              0 /* sampleRate */,
-                                              AUDIO_FORMAT_DEFAULT,
-                                              AUDIO_CHANNEL_IN_MONO,
-                                              attributionSource);
+        sp<AudioRecord> record =
+                new AudioRecord(AUDIO_SOURCE_DEFAULT, 0 /* sampleRate */, AUDIO_FORMAT_DEFAULT,
+                                AUDIO_CHANNEL_IN_MONO, attributionSource);
         if (record == 0 || record->initCheck() != NO_ERROR) {
             write(outputFileFd, "Error creating AudioRecord\n",
                   sizeof("Error creating AudioRecord\n"));
@@ -80,11 +77,10 @@
         char statusStr[MAX_OUTPUT_FILE_LINE_LENGTH];
         bool fast = false;
 
-        if (sscanf(line, " %u %x %x %zu %d %x %u %u",
-                   &sampleRate, &format, &channelMask,
-                   &frameCount, &notificationFrames,
-                   &flags, &sessionId, &inputSource) != NUM_ARGUMENTS) {
-            fprintf(stderr, "Malformed line for test #%u in input file\n", testCount+1);
+        if (sscanf(line, " %u %x %x %zu %d %x %u %u", &sampleRate, &format, &channelMask,
+                   &frameCount, &notificationFrames, &flags, &sessionId,
+                   &inputSource) != NUM_ARGUMENTS) {
+            fprintf(stderr, "Malformed line for test #%u in input file\n", testCount + 1);
             ret = 1;
             continue;
         }
@@ -100,21 +96,10 @@
         sp<AudioRecord> record = new AudioRecord(attributionSource);
         const auto emptyCallback = sp<AudioRecord::IAudioRecordCallback>::make();
 
-        record->set(AUDIO_SOURCE_DEFAULT,
-                   sampleRate,
-                   format,
-                   channelMask,
-                   frameCount,
-                   fast ? emptyCallback : nullptr,
-                   notificationFrames,
-                   false,
-                   sessionId,
-                   fast ? AudioRecord::TRANSFER_CALLBACK : AudioRecord::TRANSFER_DEFAULT,
-                   flags,
-                   getuid(),
-                   getpid(),
-                   &attributes,
-                   AUDIO_PORT_HANDLE_NONE);
+        record->set(AUDIO_SOURCE_DEFAULT, sampleRate, format, channelMask, frameCount,
+                    fast ? emptyCallback : nullptr, notificationFrames, false, sessionId,
+                    fast ? AudioRecord::TRANSFER_CALLBACK : AudioRecord::TRANSFER_DEFAULT, flags,
+                    getuid(), getpid(), &attributes, AUDIO_PORT_HANDLE_NONE);
         status = record->initCheck();
         sprintf(statusStr, "\n#### Test %u status %d\n", testCount, status);
         write(outputFileFd, statusStr, strlen(statusStr));
@@ -126,11 +111,8 @@
     return ret;
 }
 
-}; // namespace android
+};  // namespace android
 
-
-int main(int argc, char **argv)
-{
+int main(int argc, char** argv) {
     return android::main(argc, argv, android::testRecord);
 }
-
diff --git a/media/libaudioclient/tests/test_create_audiotrack.cpp b/media/libaudioclient/tests/test_create_audiotrack.cpp
index e7231d3..4e09e21 100644
--- a/media/libaudioclient/tests/test_create_audiotrack.cpp
+++ b/media/libaudioclient/tests/test_create_audiotrack.cpp
@@ -32,18 +32,15 @@
 
 namespace android {
 
-int testTrack(FILE *inputFile, int outputFileFd)
-{
+int testTrack(FILE* inputFile, int outputFileFd) {
     char line[MAX_INPUT_FILE_LINE_LENGTH];
     uint32_t testCount = 0;
     Vector<String16> args;
     int ret = 0;
 
     if (inputFile == nullptr) {
-        sp<AudioTrack> track = new AudioTrack(AUDIO_STREAM_DEFAULT,
-                                              0 /* sampleRate */,
-                                              AUDIO_FORMAT_DEFAULT,
-                                              AUDIO_CHANNEL_OUT_STEREO);
+        sp<AudioTrack> track = new AudioTrack(AUDIO_STREAM_DEFAULT, 0 /* sampleRate */,
+                                              AUDIO_FORMAT_DEFAULT, AUDIO_CHANNEL_OUT_STEREO);
         if (track == 0 || track->initCheck() != NO_ERROR) {
             write(outputFileFd, "Error creating AudioTrack\n",
                   sizeof("Error creating AudioTrack\n"));
@@ -78,11 +75,10 @@
         bool offload = false;
         bool fast = false;
 
-        if (sscanf(line, " %u %x %x %zu %d %u %x %u %u %u",
-                   &sampleRate, &format, &channelMask,
-                   &frameCount, &notificationFrames, &useSharedBuffer,
-                   &flags, &sessionId, &usage, &contentType) != NUM_ARGUMENTS) {
-            fprintf(stderr, "Malformed line for test #%u in input file\n", testCount+1);
+        if (sscanf(line, " %u %x %x %zu %d %u %x %u %u %u", &sampleRate, &format, &channelMask,
+                   &frameCount, &notificationFrames, &useSharedBuffer, &flags, &sessionId, &usage,
+                   &contentType) != NUM_ARGUMENTS) {
+            fprintf(stderr, "Malformed line for test #%u in input file\n", testCount + 1);
             ret = 1;
             continue;
         }
@@ -90,7 +86,7 @@
 
         if (useSharedBuffer != 0) {
             size_t heapSize = audio_channel_count_from_out_mask(channelMask) *
-                    audio_bytes_per_sample(format) * frameCount;
+                              audio_bytes_per_sample(format) * frameCount;
             heap = new MemoryDealer(heapSize, "AudioTrack Heap Base");
             sharedBuffer = heap->allocate(heapSize);
             frameCount = 0;
@@ -111,25 +107,13 @@
         attributes.usage = usage;
         sp<AudioTrack> track = new AudioTrack();
         const auto emptyCallback = sp<AudioTrack::IAudioTrackCallback>::make();
-        track->set(AUDIO_STREAM_DEFAULT,
-                   sampleRate,
-                   format,
-                   channelMask,
-                   frameCount,
-                   flags,
-                   (fast || offload) ? emptyCallback : nullptr,
-                   notificationFrames,
-                   sharedBuffer,
-                   false,
-                   sessionId,
-                   ((fast && sharedBuffer == 0) || offload) ?
-                           AudioTrack::TRANSFER_CALLBACK : AudioTrack::TRANSFER_DEFAULT,
-                   offload ? &offloadInfo : nullptr,
-                   AttributionSourceState(),
-                   &attributes,
-                   false,
-                   1.0f,
-                   AUDIO_PORT_HANDLE_NONE);
+        track->set(AUDIO_STREAM_DEFAULT, sampleRate, format, channelMask, frameCount, flags,
+                   (fast || offload) ? emptyCallback : nullptr, notificationFrames, sharedBuffer,
+                   false, sessionId,
+                   ((fast && sharedBuffer == 0) || offload) ? AudioTrack::TRANSFER_CALLBACK
+                                                            : AudioTrack::TRANSFER_DEFAULT,
+                   offload ? &offloadInfo : nullptr, AttributionSourceState(), &attributes, false,
+                   1.0f, AUDIO_PORT_HANDLE_NONE);
         status = track->initCheck();
         sprintf(statusStr, "\n#### Test %u status %d\n", testCount, status);
         write(outputFileFd, statusStr, strlen(statusStr));
@@ -141,11 +125,8 @@
     return ret;
 }
 
-}; // namespace android
+};  // namespace android
 
-
-int main(int argc, char **argv)
-{
+int main(int argc, char** argv) {
     return android::main(argc, argv, android::testTrack);
 }
-
diff --git a/media/libaudioclient/tests/test_create_utils.cpp b/media/libaudioclient/tests/test_create_utils.cpp
index caf5227..c2c2e8b 100644
--- a/media/libaudioclient/tests/test_create_utils.cpp
+++ b/media/libaudioclient/tests/test_create_utils.cpp
@@ -23,10 +23,10 @@
 
 namespace android {
 
-int readLine(FILE *inputFile, char *line, int size) {
+int readLine(FILE* inputFile, char* line, int size) {
     int ret = 0;
     while (true) {
-        char *str = fgets(line, size, inputFile);
+        char* str = fgets(line, size, inputFile);
         if (str == nullptr) {
             ret = -1;
             break;
@@ -42,8 +42,7 @@
     return ret;
 }
 
-bool checkVersion(FILE *inputFile, const char *version)
-{
+bool checkVersion(FILE* inputFile, const char* version) {
     char line[MAX_INPUT_FILE_LINE_LENGTH];
     char versionKey[MAX_INPUT_FILE_LINE_LENGTH];
     char versionValue[MAX_INPUT_FILE_LINE_LENGTH];
@@ -68,9 +67,8 @@
     return true;
 }
 
-int main(int argc, char **argv, test_func_t testFunc)
-{
-    FILE *inputFile = nullptr;
+int main(int argc, char** argv, test_func_t testFunc) {
+    FILE* inputFile = nullptr;
     int outputFileFd = STDOUT_FILENO;
     mode_t mode = S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH;
     int ret = 0;
@@ -96,7 +94,7 @@
         if (strcmp(*argv, "-o") == 0) {
             argv++;
             if (*argv) {
-                outputFileFd = open(*argv, O_WRONLY|O_CREAT, mode);
+                outputFileFd = open(*argv, O_WRONLY | O_CREAT, mode);
                 if (outputFileFd < 0) {
                     ret = 1;
                 }
@@ -126,5 +124,4 @@
     return ret;
 }
 
-}; // namespace android
-
+};  // namespace android
diff --git a/media/libaudioclient/tests/test_create_utils.h b/media/libaudioclient/tests/test_create_utils.h
index 9a6f9fa..110baf7 100644
--- a/media/libaudioclient/tests/test_create_utils.h
+++ b/media/libaudioclient/tests/test_create_utils.h
@@ -27,13 +27,12 @@
 
 namespace android {
 
-int readLine(FILE *inputFile, char *line, int size);
+int readLine(FILE* inputFile, char* line, int size);
 
-bool checkVersion(FILE *inputFile, const char *version);
+bool checkVersion(FILE* inputFile, const char* version);
 
+typedef int (*test_func_t)(FILE* inputFile, int outputFileFd);
 
-typedef int (*test_func_t)(FILE *inputFile, int outputFileFd);
+int main(int argc, char** argv, test_func_t testFunc);
 
-int main(int argc, char **argv, test_func_t testFunc);
-
-}; // namespace android
+};  // namespace android
diff --git a/media/libaudioclient/tests/trackplayerbase_tests.cpp b/media/libaudioclient/tests/trackplayerbase_tests.cpp
new file mode 100644
index 0000000..c9b704d
--- /dev/null
+++ b/media/libaudioclient/tests/trackplayerbase_tests.cpp
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TrackPlayerBaseTest"
+
+#include <gtest/gtest.h>
+
+#include <media/TrackPlayerBase.h>
+
+using namespace android;
+using namespace android::media;
+
+class TrackPlayer : public TrackPlayerBase, public AudioTrack::IAudioTrackCallback {
+  public:
+    // methods protected in base class
+    using TrackPlayerBase::playerPause;
+    using TrackPlayerBase::playerSetVolume;
+    using TrackPlayerBase::playerStart;
+    using TrackPlayerBase::playerStop;
+};
+
+class TrackPlayerBaseTest
+    : public ::testing::TestWithParam<std::tuple<double, double, uint32_t, uint32_t>> {
+  public:
+    TrackPlayerBaseTest()
+        : mDuration(std::get<0>(GetParam())),
+          mPulseFreq(std::get<1>(GetParam())),
+          mChannelCount(std::get<2>(GetParam())),
+          mSampleRate(std::get<3>(GetParam())){};
+
+    virtual void SetUp() override {
+        mFrameCount = mDuration * mSampleRate;
+        audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(mChannelCount);
+        sp<AudioTrack> track = new AudioTrack(mStreamType, mSampleRate, mFormat, channelMask,
+                                              mFrameCount, mFlags, nullptr /* callback */,
+                                              0 /* notificationFrames */, AUDIO_SESSION_NONE);
+        ASSERT_EQ(track->initCheck(), NO_ERROR);
+
+        mPlayer = new TrackPlayer();
+        mPlayer->init(track.get(), mPlayer, PLAYER_TYPE_AAUDIO, AUDIO_USAGE_MEDIA,
+                      AUDIO_SESSION_NONE);
+        sp<AudioTrack> playerTrack = mPlayer->mAudioTrack;
+        ASSERT_EQ(playerTrack->initCheck(), NO_ERROR);
+
+        mBufferSize = mFrameCount * playerTrack->frameSize();
+        mBuffer.resize(mBufferSize, 0);
+
+        // populate buffer
+        ASSERT_NE(mPulseFreq, 0);
+        int32_t nPulseSamples = mSampleRate / mPulseFreq;
+        int32_t pulseSize = nPulseSamples * playerTrack->frameSize();
+
+        int32_t marker = 0;
+        while (marker + pulseSize <= mBufferSize) {
+            memset(mBuffer.data() + marker, 127, pulseSize / 2);
+            marker += pulseSize;
+        }
+    }
+
+    void playBuffer() {
+        bool blocking = true;
+        ssize_t nbytes = mPlayer->mAudioTrack->write(mBuffer.data(), mBufferSize, blocking);
+        EXPECT_EQ(nbytes, mBufferSize) << "Did not write all data in blocking mode";
+    }
+
+    const double mDuration;  // seconds
+    sp<TrackPlayer> mPlayer;
+
+  private:
+    const double mPulseFreq;
+    const uint32_t mChannelCount;
+    const uint32_t mSampleRate;
+
+    const audio_format_t mFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_output_flags_t mFlags = AUDIO_OUTPUT_FLAG_NONE;
+    const audio_stream_type_t mStreamType = AUDIO_STREAM_MUSIC;
+
+    int32_t mBufferSize;
+    int32_t mFrameCount;
+    std::vector<uint8_t> mBuffer;
+};
+
+class PlaybackTestParam : public TrackPlayerBaseTest {};
+
+TEST_P(PlaybackTestParam, PlaybackTest) {
+    // no-op implementation
+    EXPECT_TRUE(mPlayer->setStartDelayMs(0).isOk());
+
+    ASSERT_EQ(mPlayer->playerStart(), NO_ERROR);
+    ASSERT_NO_FATAL_FAILURE(playBuffer());
+    EXPECT_EQ(mPlayer->playerStop(), NO_ERROR);
+}
+
+INSTANTIATE_TEST_SUITE_P(TrackPlayerTest, PlaybackTestParam,
+                         ::testing::Values(std::make_tuple(2.5, 25.0, 2, 48000)));
+
+class ChangeVolumeTestParam : public TrackPlayerBaseTest {};
+
+TEST_P(ChangeVolumeTestParam, ChangeVolumeTest) {
+    float volume = 1.0f;
+    (void)mPlayer->setPlayerVolume(volume / 2, volume);
+
+    ASSERT_TRUE(mPlayer->start().isOk());
+    ASSERT_EQ(mPlayer->playerSetVolume(), NO_ERROR);
+
+    ASSERT_NO_FATAL_FAILURE(playBuffer());
+
+    EXPECT_TRUE(mPlayer->stop().isOk());
+
+    std::vector<float> setVol = {0.95f, 0.05f, 0.5f, 0.25f, -1.0f, 1.0f, 1.0f};
+    std::vector<float> setPan = {0.0f, 0.0f, 1.0f, -1.0f, -1.0f, 0.5f, -0.5f};
+
+    ASSERT_TRUE(mPlayer->start().isOk());
+
+    for (int32_t i = 0; i < setVol.size(); i++) {
+        EXPECT_TRUE(mPlayer->setVolume(setVol[i]).isOk());
+        EXPECT_TRUE(mPlayer->setPan(setPan[i]).isOk());
+        ASSERT_NO_FATAL_FAILURE(playBuffer());
+    }
+    EXPECT_TRUE(mPlayer->stop().isOk());
+}
+
+INSTANTIATE_TEST_SUITE_P(TrackPlayerTest, ChangeVolumeTestParam,
+                         ::testing::Values(std::make_tuple(1.0, 100.0, 1, 24000)));
+
+class PauseTestParam : public TrackPlayerBaseTest {};
+
+TEST_P(PauseTestParam, PauseTest) {
+    ASSERT_EQ(mPlayer->playerStart(), NO_ERROR);
+    ASSERT_NO_FATAL_FAILURE(playBuffer());
+
+    ASSERT_EQ(mPlayer->playerPause(), NO_ERROR);
+    ASSERT_EQ(mPlayer->playerStart(), NO_ERROR);
+
+    ASSERT_NO_FATAL_FAILURE(playBuffer());
+
+    EXPECT_EQ(mPlayer->playerStop(), NO_ERROR);
+
+    for (int32_t i = 0; i < 5; i++) {
+        ASSERT_TRUE(mPlayer->start().isOk());
+        ASSERT_NO_FATAL_FAILURE(playBuffer());
+        ASSERT_TRUE(mPlayer->pause().isOk());
+    }
+    EXPECT_TRUE(mPlayer->stop().isOk());
+}
+
+INSTANTIATE_TEST_SUITE_P(TrackPlayerTest, PauseTestParam,
+                         ::testing::Values(std::make_tuple(1.0, 75.0, 2, 24000)));
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index 159f898..c758fcd 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -18,21 +18,22 @@
 
     export_include_dirs: ["include"],
     header_libs: [
-        "libaudioclient_aidl_conversion_util",
+        "libaudio_aidl_conversion_common_util_cpp",
         "libaudio_system_headers",
         "libmedia_helper_headers",
     ],
     export_header_lib_headers: [
-        "libaudioclient_aidl_conversion_util",
+        "libaudio_aidl_conversion_common_util_cpp",
         "libaudio_system_headers",
         "libmedia_helper_headers",
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_export_static",
+    ],
     static_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
     ],
     export_static_lib_headers: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
     ],
     host_supported: true,
@@ -57,8 +58,11 @@
         "DeviceDescriptorBase.cpp",
     ],
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_export_shared",
+    ],
+
     shared_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "libaudioutils",
@@ -70,7 +74,6 @@
     ],
 
     export_shared_lib_headers: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
     ],
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 0a8188f..202a400 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -77,6 +77,13 @@
     return audioDeviceOutLeAudioUnicastSet;
 }
 
+const DeviceTypeSet& getAudioDeviceOutLeAudioBroadcastSet() {
+    static const DeviceTypeSet audioDeviceOutLeAudioUnicastSet = DeviceTypeSet(
+            std::begin(AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY),
+            std::end(AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY));
+    return audioDeviceOutLeAudioUnicastSet;
+}
+
 std::string deviceTypesToString(const DeviceTypeSet &deviceTypes) {
     if (deviceTypes.empty()) {
         return "Empty device types";
diff --git a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
index 4a7e956..ad39d32 100644
--- a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
+++ b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
@@ -150,6 +150,20 @@
     return remainedDevices;
 }
 
+AudioDeviceTypeAddrVector joinDeviceTypeAddrs(
+        const AudioDeviceTypeAddrVector& devices,
+        const AudioDeviceTypeAddrVector& devicesToJoin) {
+    std::set<AudioDeviceTypeAddr> devicesSet(devices.begin(), devices.end());
+    std::set<AudioDeviceTypeAddr> devicesToJoinSet(devicesToJoin.begin(), devicesToJoin.end());
+    AudioDeviceTypeAddrVector joinedDevices;
+
+    std::set_union(devicesSet.begin(), devicesSet.end(),
+                   devicesToJoinSet.begin(), devicesToJoinSet.end(),
+                   std::back_inserter(joinedDevices));
+
+    return joinedDevices;
+}
+
 std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
                                           bool includeSensitiveInfo) {
     std::stringstream stream;
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index 4513323..6e05abc 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -222,7 +222,7 @@
            mExtraAudioDescriptors == other->getExtraAudioDescriptors();
 }
 
-status_t AudioPort::writeToParcelable(media::AudioPort* parcelable) const {
+status_t AudioPort::writeToParcelable(media::AudioPortFw* parcelable) const {
     parcelable->hal.name = mName;
     parcelable->sys.type = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_type_t_AudioPortType(mType));
@@ -249,7 +249,7 @@
     return OK;
 }
 
-status_t AudioPort::readFromParcelable(const media::AudioPort& parcelable) {
+status_t AudioPort::readFromParcelable(const media::AudioPortFw& parcelable) {
     mName = parcelable.hal.name;
     mType = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioPortType_audio_port_type_t(parcelable.sys.type));
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 2170cd8..999e263 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -327,6 +327,24 @@
     return false;
 }
 
+const SampleRateSet AudioProfileVector::getSampleRatesFor(audio_format_t format) const {
+    for (const auto& profile : *this) {
+        if (profile->getFormat() == format) {
+            return profile->getSampleRates();
+        }
+    }
+    return {};
+}
+
+const ChannelMaskSet AudioProfileVector::getChannelMasksFor(audio_format_t format) const {
+    for (const auto& profile : *this) {
+        if (profile->getFormat() == format) {
+            return profile->getChannels();
+        }
+    }
+    return {};
+}
+
 bool AudioProfileVector::contains(const sp<AudioProfile>& profile, bool ignoreDynamicFlags) const
 {
     for (const auto& audioProfile : *this) {
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 5ffbffc..2bb6afc 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -126,7 +126,14 @@
                     "%*sEncapsulation modes: %u, metadata types: %u\n", spaces, "",
                     mEncapsulationModes, mEncapsulationMetadataTypes));
 
-    AudioPort::dump(dst, spaces, nullptr, verbose);
+    std::string portStr;
+    AudioPort::dump(&portStr, spaces, nullptr, verbose);
+    if (!portStr.empty()) {
+        if (!mName.empty()) {
+            dst->append(base::StringPrintf("%*s", spaces, ""));
+        }
+        dst->append(portStr);
+    }
 }
 
 std::string DeviceDescriptorBase::toString(bool includeSensitiveInfo) const
@@ -174,7 +181,7 @@
     return false;
 }
 
-status_t DeviceDescriptorBase::writeToParcelable(media::AudioPort* parcelable) const {
+status_t DeviceDescriptorBase::writeToParcelable(media::AudioPortFw* parcelable) const {
     AudioPort::writeToParcelable(parcelable);
     AudioPortConfig::writeToParcelable(&parcelable->sys.activeConfig.hal, useInputChannelMask());
     parcelable->hal.id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
@@ -186,17 +193,17 @@
     deviceExt.encodedFormats = VALUE_OR_RETURN_STATUS(
             convertContainer<std::vector<media::audio::common::AudioFormatDescription>>(
                     mEncodedFormats, legacy2aidl_audio_format_t_AudioFormatDescription));
+    deviceExt.encapsulationModes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
+    deviceExt.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioEncapsulationMetadataType_mask(mEncapsulationMetadataTypes));
     UNION_SET(parcelable->hal.ext, device, deviceExt);
     media::AudioPortDeviceExtSys deviceSys;
-    deviceSys.encapsulationModes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
-    deviceSys.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_AudioEncapsulationMetadataType_mask(mEncapsulationMetadataTypes));
     UNION_SET(parcelable->sys.ext, device, deviceSys);
     return OK;
 }
 
-status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPort& parcelable) {
+status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPortFw& parcelable) {
     if (parcelable.sys.type != media::AudioPortType::DEVICE) {
         return BAD_VALUE;
     }
@@ -214,12 +221,12 @@
     mEncodedFormats = VALUE_OR_RETURN_STATUS(
             convertContainer<FormatVector>(deviceExt.encodedFormats,
                     aidl2legacy_AudioFormatDescription_audio_format_t));
+    mEncapsulationModes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioEncapsulationMode_mask(deviceExt.encapsulationModes));
+    mEncapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioEncapsulationMetadataType_mask(deviceExt.encapsulationMetadataTypes));
     media::AudioPortDeviceExtSys deviceSys = VALUE_OR_RETURN_STATUS(
             UNION_GET(parcelable.sys.ext, device));
-    mEncapsulationModes = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioEncapsulationMode_mask(deviceSys.encapsulationModes));
-    mEncapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioEncapsulationMetadataType_mask(deviceSys.encapsulationMetadataTypes));
     return OK;
 }
 
@@ -245,7 +252,7 @@
 }
 
 ConversionResult<sp<DeviceDescriptorBase>>
-aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl) {
+aidl2legacy_DeviceDescriptorBase(const media::AudioPortFw& aidl) {
     sp<DeviceDescriptorBase> result = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
     status_t status = result->readFromParcelable(aidl);
     if (status != OK) {
@@ -254,9 +261,9 @@
     return result;
 }
 
-ConversionResult<media::AudioPort>
+ConversionResult<media::AudioPortFw>
 legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy) {
-    media::AudioPort aidl;
+    media::AudioPortFw aidl;
     status_t status = legacy->writeToParcelable(&aidl);
     if (status != OK) {
         return base::unexpected(status);
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index efe8437..a4e271e 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -7,7 +7,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index b6e6c84..88dcee9 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -31,6 +31,7 @@
 using DeviceTypeSet = std::set<audio_devices_t>;
 using FormatSet = std::set<audio_format_t>;
 using SampleRateSet = std::set<uint32_t>;
+using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
 
 using FormatVector = std::vector<audio_format_t>;
 
@@ -42,6 +43,7 @@
 const DeviceTypeSet& getAudioDeviceInAllUsbSet();
 const DeviceTypeSet& getAudioDeviceOutAllBleSet();
 const DeviceTypeSet& getAudioDeviceOutLeAudioUnicastSet();
+const DeviceTypeSet& getAudioDeviceOutLeAudioBroadcastSet();
 
 template<typename T>
 static std::vector<T> Intersection(const std::set<T>& a, const std::set<T>& b) {
@@ -66,6 +68,9 @@
     for (const auto &channel : channelMasks) {
         if (audio_channel_mask_out_to_in(channel) != AUDIO_CHANNEL_INVALID) {
             inMaskSet.insert(audio_channel_mask_out_to_in(channel));
+        } else if (audio_channel_mask_get_representation(channel)
+                    == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+            inMaskSet.insert(channel);
         }
     }
     return inMaskSet;
@@ -76,6 +81,9 @@
     for (const auto &channel : channelMasks) {
         if (audio_channel_mask_in_to_out(channel) != AUDIO_CHANNEL_INVALID) {
             outMaskSet.insert(audio_channel_mask_in_to_out(channel));
+        } else if (audio_channel_mask_get_representation(channel)
+                    == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+            outMaskSet.insert(channel);
         }
     }
     return outMaskSet;
diff --git a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
index 11aa222..b2f2bd0 100644
--- a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
+++ b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
@@ -84,6 +84,14 @@
         const AudioDeviceTypeAddrVector& devices,
         const AudioDeviceTypeAddrVector& devicesToExclude);
 
+/**
+ * Return a collection of AudioDeviceTypeAddrs that is the union of `devices` and
+ * `devicesToJoin`
+ */
+AudioDeviceTypeAddrVector joinDeviceTypeAddrs(
+        const AudioDeviceTypeAddrVector& devices,
+        const AudioDeviceTypeAddrVector& devicesToJoin);
+
 std::string dumpAudioDeviceTypeAddrVector(const AudioDeviceTypeAddrVector& deviceTypeAddrs,
                                           bool includeSensitiveInfo=false);
 
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index d6a098f..77e58ed 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -19,8 +19,8 @@
 #include <string>
 #include <type_traits>
 
-#include <android/media/AudioPort.h>
-#include <android/media/AudioPortConfig.h>
+#include <android/media/AudioPortFw.h>
+#include <android/media/AudioPortConfigFw.h>
 #include <android/media/audio/common/ExtraAudioDescriptor.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
@@ -72,7 +72,7 @@
     AudioProfileVector &getAudioProfiles() { return mProfiles; }
 
     void setExtraAudioDescriptors(
-            const std::vector<media::audio::common::ExtraAudioDescriptor> extraAudioDescriptors) {
+            const std::vector<media::audio::common::ExtraAudioDescriptor>& extraAudioDescriptors) {
         mExtraAudioDescriptors = extraAudioDescriptors;
     }
     std::vector<media::audio::common::ExtraAudioDescriptor> &getExtraAudioDescriptors() {
@@ -118,8 +118,8 @@
 
     bool equals(const sp<AudioPort>& other) const;
 
-    status_t writeToParcelable(media::AudioPort* parcelable) const;
-    status_t readFromParcelable(const media::AudioPort& parcelable);
+    status_t writeToParcelable(media::AudioPortFw* parcelable) const;
+    status_t readFromParcelable(const media::AudioPortFw& parcelable);
 
     AudioGains mGains; // gain controllers
     // Maximum number of input or output streams that can be simultaneously
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index 79dfd12..a668afe 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -139,6 +139,9 @@
     bool hasDynamicProfile() const;
     bool hasDynamicRateFor(audio_format_t format) const;
 
+    const SampleRateSet getSampleRatesFor(audio_format_t format) const;
+    const ChannelMaskSet getChannelMasksFor(audio_format_t format) const;
+
     bool contains(const sp<AudioProfile>& profile, bool ignoreDynamicFlags = false) const;
 
     virtual void dump(std::string *dst, int spaces) const;
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index 1f0c768..501831d 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -18,7 +18,7 @@
 
 #include <vector>
 
-#include <android/media/AudioPort.h>
+#include <android/media/AudioPortFw.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include <media/AudioContainers.h>
@@ -53,7 +53,7 @@
 
     // AudioPortConfig
     virtual sp<AudioPort> getAudioPort() const {
-        return static_cast<AudioPort*>(const_cast<DeviceDescriptorBase*>(this));
+        return sp<AudioPort>::fromExisting(const_cast<DeviceDescriptorBase*>(this));
     }
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
             const struct audio_port_config *srcConfig = NULL) const;
@@ -79,8 +79,8 @@
 
     bool equals(const sp<DeviceDescriptorBase>& other) const;
 
-    status_t writeToParcelable(media::AudioPort* parcelable) const;
-    status_t readFromParcelable(const media::AudioPort& parcelable);
+    status_t writeToParcelable(media::AudioPortFw* parcelable) const;
+    status_t readFromParcelable(const media::AudioPortFw& parcelable);
 
 protected:
     AudioDeviceTypeAddr mDeviceTypeAddr;
@@ -116,8 +116,8 @@
 
 // Conversion routines, according to AidlConversion.h conventions.
 ConversionResult<sp<DeviceDescriptorBase>>
-aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl);
-ConversionResult<media::AudioPort>
+aidl2legacy_DeviceDescriptorBase(const media::AudioPortFw& aidl);
+ConversionResult<media::AudioPortFw>
 legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy);
 
 } // namespace android
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index 3f1fbea..2f4aee0 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -10,6 +10,9 @@
 cc_test {
     name: "audiofoundation_parcelable_test",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_static",
+    ],
     shared_libs: [
         "libbase",
         "libbinder",
@@ -18,9 +21,9 @@
     ],
 
     static_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
         "libstagefright_foundation",
     ],
diff --git a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
index 50d8dc8..e315858 100644
--- a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
+++ b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
@@ -117,7 +117,7 @@
     audioPort->setGains(getAudioGainsForTest());
     audioPort->setAudioProfiles(getAudioProfileVectorForTest());
 
-    media::AudioPort parcelable;
+    media::AudioPortFw parcelable;
     ASSERT_EQ(NO_ERROR, audioPort->writeToParcelable(&parcelable));
     sp<AudioPort> audioPortFromParcel = new AudioPort(
             "", AUDIO_PORT_TYPE_NONE, AUDIO_PORT_ROLE_NONE);
@@ -152,7 +152,7 @@
     ASSERT_EQ(desc->setEncapsulationMetadataTypes(
             AUDIO_ENCAPSULATION_METADATA_TYPE_ALL_POSITION_BITS), NO_ERROR);
 
-    media::AudioPort parcelable;
+    media::AudioPortFw parcelable;
     ASSERT_EQ(NO_ERROR, desc->writeToParcelable(&parcelable));
     sp<DeviceDescriptorBase> descFromParcel = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
     ASSERT_EQ(NO_ERROR, descFromParcel->readFromParcelable(parcelable));
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 5f63e8d..3c05b0b 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -13,7 +13,7 @@
     srcs: [
         "DevicesFactoryHalInterface.cpp",
         "EffectsFactoryHalInterface.cpp",
-        "FactoryHalHidl.cpp",
+        "FactoryHal.cpp",
     ],
 
     cflags: [
@@ -28,10 +28,12 @@
         "libaudiohal@6.0",
         "libaudiohal@7.0",
         "libaudiohal@7.1",
+        "libaudiohal@aidl",
     ],
 
     shared_libs: [
         "audioclient-types-aidl-cpp",
+        "libbinder_ndk",
         "libdl",
         "libhidlbase",
         "liblog",
@@ -41,6 +43,7 @@
     header_libs: [
         "libaudiohal_headers",
         "libbase_headers",
+        "liberror_headers",
         "libmediautils_headers",
     ]
 }
@@ -65,15 +68,25 @@
 
     header_libs: [
         "libaudiohal_headers"
-    ]
+    ],
 }
 
 cc_library_headers {
     name: "libaudiohal_headers",
 
-    export_include_dirs: ["include"],
+    header_libs: [
+        "libeffectsconfig_headers",
+    ],
 
-    // This is needed because the stream interface includes media/MicrophoneInfo.h
-    header_libs: ["av-headers"],
-    export_header_lib_headers: ["av-headers"],
+    export_header_lib_headers: ["libeffectsconfig_headers"],
+
+    export_include_dirs: ["include"],
+}
+
+cc_library_headers {
+    name: "libaudiohalimpl_headers",
+
+    header_libs: ["libaudiohal_headers"],
+    export_header_lib_headers: ["libaudiohal_headers"],
+    export_include_dirs: ["impl"],
 }
diff --git a/media/libaudiohal/DevicesFactoryHalInterface.cpp b/media/libaudiohal/DevicesFactoryHalInterface.cpp
index 5ad26fc..adce681 100644
--- a/media/libaudiohal/DevicesFactoryHalInterface.cpp
+++ b/media/libaudiohal/DevicesFactoryHalInterface.cpp
@@ -14,19 +14,14 @@
  * limitations under the License.
  */
 
-#include <string>
-
 #include <media/audiohal/DevicesFactoryHalInterface.h>
-#include <media/audiohal/FactoryHalHidl.h>
+#include <media/audiohal/FactoryHal.h>
 
 namespace android {
 
 // static
 sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
-    using namespace std::string_literals;
-    return createPreferredImpl<DevicesFactoryHalInterface>(
-            std::make_pair("android.hardware.audio"s, "IDevicesFactory"s),
-            std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s));
+    return createPreferredImpl<DevicesFactoryHalInterface>(true /* isCore */);
 }
 
 } // namespace android
diff --git a/media/libaudiohal/EffectsFactoryHalInterface.cpp b/media/libaudiohal/EffectsFactoryHalInterface.cpp
index 8a28f64..3a05f21 100644
--- a/media/libaudiohal/EffectsFactoryHalInterface.cpp
+++ b/media/libaudiohal/EffectsFactoryHalInterface.cpp
@@ -14,19 +14,14 @@
  * limitations under the License.
  */
 
-#include <string>
-
 #include <media/audiohal/EffectsFactoryHalInterface.h>
-#include <media/audiohal/FactoryHalHidl.h>
+#include <media/audiohal/FactoryHal.h>
 
 namespace android {
 
 // static
 sp<EffectsFactoryHalInterface> EffectsFactoryHalInterface::create() {
-    using namespace std::string_literals;
-    return createPreferredImpl<EffectsFactoryHalInterface>(
-            std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s),
-            std::make_pair("android.hardware.audio"s, "IDevicesFactory"s));
+    return createPreferredImpl<EffectsFactoryHalInterface>(false /* isCore */);
 }
 
 // static
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
new file mode 100644
index 0000000..f88915d
--- /dev/null
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <memory>
+#define LOG_TAG "FactoryHal"
+
+#include <algorithm>
+#include <array>
+#include <cstddef>
+#include <dlfcn.h>
+#include <utility>
+
+#include <android/binder_manager.h>
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <hidl/ServiceManagement.h>
+#include <hidl/Status.h>
+#include <utils/Log.h>
+
+#include "include/media/audiohal/AudioHalVersionInfo.h"
+#include "include/media/audiohal/FactoryHal.h"
+
+namespace android::detail {
+
+namespace {
+
+using ::android::detail::AudioHalVersionInfo;
+
+// The pair of the interface's package name and the interface name,
+// e.g. <"android.hardware.audio", "IDevicesFactory"> for HIDL, <"android.hardware.audio.core",
+// "IModule"> for AIDL.
+// Splitting is used for easier construction of versioned names (FQNs).
+using InterfaceName = std::pair<std::string, std::string>;
+
+/**
+ * Supported HAL versions, from most recent to least recent.
+ * This list need to keep sync with AudioHalVersionInfo.VERSIONS in
+ * media/java/android/media/AudioHalVersionInfo.java.
+ */
+static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
+    // TODO: remove this comment to get AIDL
+    // AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
+    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
+    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
+    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
+    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 5, 0),
+    AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 4, 0),
+};
+
+static const std::map<AudioHalVersionInfo::Type, InterfaceName> sDevicesHALInterfaces = {
+        {AudioHalVersionInfo::Type::AIDL, std::make_pair("android.hardware.audio.core", "IModule")},
+        {AudioHalVersionInfo::Type::HIDL,
+         std::make_pair("android.hardware.audio", "IDevicesFactory")},
+};
+
+static const std::map<AudioHalVersionInfo::Type, InterfaceName> sEffectsHALInterfaces = {
+        {AudioHalVersionInfo::Type::AIDL,
+         std::make_pair("android.hardware.audio.effect", "IFactory")},
+        {AudioHalVersionInfo::Type::HIDL,
+         std::make_pair("android.hardware.audio.effect", "IEffectsFactory")},
+};
+
+bool createHalService(const AudioHalVersionInfo& version, bool isDevice, void** rawInterface) {
+    const std::string libName = "libaudiohal@" + version.toVersionString() + ".so";
+    const std::string factoryFunctionName =
+            isDevice ? "createIDevicesFactory" : "createIEffectsFactory";
+    constexpr int dlMode = RTLD_LAZY;
+    void* handle = nullptr;
+    dlerror(); // clear
+    handle = dlopen(libName.c_str(), dlMode);
+    if (handle == nullptr) {
+        const char* error = dlerror();
+        ALOGE("Failed to dlopen %s: %s", libName.c_str(),
+                error != nullptr ? error : "unknown error");
+        return false;
+    }
+    void* (*factoryFunction)();
+    *(void **)(&factoryFunction) = dlsym(handle, factoryFunctionName.c_str());
+    if (!factoryFunction) {
+        const char* error = dlerror();
+        ALOGE("Factory function %s not found in library %s: %s",
+                factoryFunctionName.c_str(), libName.c_str(),
+                error != nullptr ? error : "unknown error");
+        dlclose(handle);
+        return false;
+    }
+    *rawInterface = (*factoryFunction)();
+    ALOGW_IF(!*rawInterface, "Factory function %s from %s returned nullptr",
+            factoryFunctionName.c_str(), libName.c_str());
+    return true;
+}
+
+bool hasAidlHalService(const InterfaceName& interface, const AudioHalVersionInfo& version) {
+    const std::string name = interface.first + "." + interface.second + "/default";
+    const bool isDeclared = AServiceManager_isDeclared(name.c_str());
+    if (!isDeclared) {
+        ALOGW("%s %s: false", __func__, name.c_str());
+        return false;
+    }
+    ALOGI("%s %s: true, version %s", __func__, name.c_str(), version.toString().c_str());
+    return true;
+}
+
+bool hasHidlHalService(const InterfaceName& interface, const AudioHalVersionInfo& version) {
+    using ::android::hidl::manager::V1_0::IServiceManager;
+    sp<IServiceManager> sm = ::android::hardware::defaultServiceManager();
+    if (!sm) {
+        ALOGW("Failed to obtain HIDL ServiceManager");
+        return false;
+    }
+    // Since audio HAL doesn't support multiple clients, avoid instantiating
+    // the interface right away. Instead, query the transport type for it.
+    using ::android::hardware::Return;
+    using Transport = IServiceManager::Transport;
+    const std::string fqName =
+            interface.first + "@" + version.toVersionString() + "::" + interface.second;
+    const std::string instance = "default";
+    Return<Transport> transport = sm->getTransport(fqName, instance);
+    if (!transport.isOk()) {
+        ALOGW("Failed to obtain transport type for %s/%s: %s",
+              fqName.c_str(), instance.c_str(), transport.description().c_str());
+        return false;
+    }
+    return transport != Transport::EMPTY;
+}
+
+bool hasHalService(const InterfaceName& interface, const AudioHalVersionInfo& version) {
+    auto halType = version.getType();
+    if (halType == AudioHalVersionInfo::Type::AIDL) {
+        return hasAidlHalService(interface, version);
+    } else if (halType == AudioHalVersionInfo::Type::HIDL) {
+        return hasHidlHalService(interface, version);
+    } else {
+        ALOGE("HalType not supported %s", version.toString().c_str());
+        return false;
+    }
+}
+
+}  // namespace
+
+void *createPreferredImpl(bool isDevice) {
+    auto findMostRecentVersion = [](const auto& iMap) {
+        return std::find_if(sAudioHALVersions.begin(), sAudioHALVersions.end(),
+                            [iMap](const auto& v) {
+                                auto iface = iMap.find(v.getType());
+                                return hasHalService(iface->second, v);
+                            });
+    };
+
+    auto interfaceMap = isDevice ? sDevicesHALInterfaces : sEffectsHALInterfaces;
+    auto siblingInterfaceMap = isDevice ? sEffectsHALInterfaces : sDevicesHALInterfaces;
+    auto ifaceVersionIt = findMostRecentVersion(interfaceMap);
+    auto siblingVersionIt = findMostRecentVersion(siblingInterfaceMap);
+    if (ifaceVersionIt != sAudioHALVersions.end() && siblingVersionIt != sAudioHALVersions.end() &&
+        // same HAL type (HIDL/AIDL) and same major version
+        ifaceVersionIt->getType() == siblingVersionIt->getType() &&
+        ifaceVersionIt->getMajorVersion() == siblingVersionIt->getMajorVersion()) {
+        void* rawInterface;
+        if (createHalService(std::max(*ifaceVersionIt, *siblingVersionIt), isDevice,
+                             &rawInterface)) {
+            return rawInterface;
+        } else {
+            ALOGE("Failed to create HAL services with major %s, sibling %s!",
+                  ifaceVersionIt->toString().c_str(), siblingVersionIt->toString().c_str());
+        }
+    } else {
+        ALOGE("Found no HAL version, main(%s) %s %s!", isDevice ? "Device" : "Effect",
+              (ifaceVersionIt == sAudioHALVersions.end()) ? "null"
+                                                          : ifaceVersionIt->toString().c_str(),
+              (siblingVersionIt == sAudioHALVersions.end()) ? "null"
+                                                            : siblingVersionIt->toString().c_str());
+    }
+    return nullptr;
+}
+
+}  // namespace android::detail
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
deleted file mode 100644
index 590fec5..0000000
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "FactoryHalHidl"
-
-#include <algorithm>
-#include <array>
-#include <utility>
-
-#include <media/audiohal/FactoryHalHidl.h>
-
-#include <dlfcn.h>
-
-#include <android/hidl/manager/1.0/IServiceManager.h>
-#include <hidl/ServiceManagement.h>
-#include <hidl/Status.h>
-#include <utils/Log.h>
-
-namespace android::detail {
-
-namespace {
-/** Supported HAL versions, from most recent to least recent.
- */
-#define CONC_VERSION(maj, min) #maj "." #min
-#define DECLARE_VERSION(maj, min) std::make_pair(std::make_pair(maj, min), CONC_VERSION(maj, min))
-static constexpr std::array<std::pair<std::pair<int, int>, const char*>, 5> sAudioHALVersions = {
-    DECLARE_VERSION(7, 1),
-    DECLARE_VERSION(7, 0),
-    DECLARE_VERSION(6, 0),
-    DECLARE_VERSION(5, 0),
-    DECLARE_VERSION(4, 0)
-};
-
-bool createHalService(const std::string& version, const std::string& interface,
-        void** rawInterface) {
-    const std::string libName = "libaudiohal@" + version + ".so";
-    const std::string factoryFunctionName = "create" + interface;
-    constexpr int dlMode = RTLD_LAZY;
-    void* handle = nullptr;
-    dlerror(); // clear
-    handle = dlopen(libName.c_str(), dlMode);
-    if (handle == nullptr) {
-        const char* error = dlerror();
-        ALOGE("Failed to dlopen %s: %s", libName.c_str(),
-                error != nullptr ? error : "unknown error");
-        return false;
-    }
-    void* (*factoryFunction)();
-    *(void **)(&factoryFunction) = dlsym(handle, factoryFunctionName.c_str());
-    if (!factoryFunction) {
-        const char* error = dlerror();
-        ALOGE("Factory function %s not found in library %s: %s",
-                factoryFunctionName.c_str(), libName.c_str(),
-                error != nullptr ? error : "unknown error");
-        dlclose(handle);
-        return false;
-    }
-    *rawInterface = (*factoryFunction)();
-    ALOGW_IF(!*rawInterface, "Factory function %s from %s returned nullptr",
-            factoryFunctionName.c_str(), libName.c_str());
-    return true;
-}
-
-bool hasHalService(const std::string& package, const std::string& version,
-        const std::string& interface) {
-    using ::android::hidl::manager::V1_0::IServiceManager;
-    sp<IServiceManager> sm = ::android::hardware::defaultServiceManager();
-    if (!sm) {
-        ALOGE("Failed to obtain HIDL ServiceManager");
-        return false;
-    }
-    // Since audio HAL doesn't support multiple clients, avoid instantiating
-    // the interface right away. Instead, query the transport type for it.
-    using ::android::hardware::Return;
-    using Transport = IServiceManager::Transport;
-    const std::string fqName = package + "@" + version + "::" + interface;
-    const std::string instance = "default";
-    Return<Transport> transport = sm->getTransport(fqName, instance);
-    if (!transport.isOk()) {
-        ALOGE("Failed to obtain transport type for %s/%s: %s",
-                fqName.c_str(), instance.c_str(), transport.description().c_str());
-        return false;
-    }
-    return transport != Transport::EMPTY;
-}
-
-}  // namespace
-
-void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface) {
-    auto findMostRecentVersion = [](const InterfaceName& iface) {
-        return std::find_if(detail::sAudioHALVersions.begin(), detail::sAudioHALVersions.end(),
-                [&](const auto& v) { return hasHalService(iface.first, v.second, iface.second); });
-    };
-    auto ifaceVersionIt = findMostRecentVersion(iface);
-    auto siblingVersionIt = findMostRecentVersion(siblingIface);
-    if (ifaceVersionIt != detail::sAudioHALVersions.end() &&
-            siblingVersionIt != detail::sAudioHALVersions.end() &&
-            // same major version
-            ifaceVersionIt->first.first == siblingVersionIt->first.first) {
-        std::string libraryVersion =
-                ifaceVersionIt->first >= siblingVersionIt->first ?
-                ifaceVersionIt->second : siblingVersionIt->second;
-        void* rawInterface;
-        if (createHalService(libraryVersion, iface.second, &rawInterface)) {
-            return rawInterface;
-        }
-    }
-    return nullptr;
-}
-
-}  // namespace android::detail
diff --git a/media/libaudiohal/TEST_MAPPING b/media/libaudiohal/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/media/libaudiohal/TEST_MAPPING
+++ b/media/libaudiohal/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index d30883a..09e70eb 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -12,13 +12,14 @@
     srcs: [
         "CoreConversionHelperHidl.cpp",
         "DeviceHalHidl.cpp",
+        "DevicesFactoryHalEntry.cpp",
         "DevicesFactoryHalHidl.cpp",
         "StreamHalHidl.cpp",
     ],
 }
 
 filegroup {
-    name: "audio_effect_hal_client_sources",
+    name: "audio_effect_hidl_hal_client_sources",
     srcs: [
         "EffectBufferHalHidl.cpp",
         "EffectConversionHelperHidl.cpp",
@@ -28,6 +29,22 @@
 }
 
 cc_defaults {
+    name: "libaudiohal_hidl_default",
+    shared_libs: [
+        "android.hardware.audio.common-util",
+        "android.hidl.allocator@1.0",
+        "android.hidl.memory@1.0",
+        "libaudiohal_deathhandler",
+        "libeffectsconfig",
+        "libhidlbase",
+        "libhidlmemory",
+    ],
+    header_libs: [
+        "android.hardware.audio.common.util@all-versions",
+    ]
+}
+
+cc_defaults {
     name: "libaudiohal_default",
 
     cflags: [
@@ -37,31 +54,28 @@
         "-fvisibility=hidden",
     ],
     shared_libs: [
-        "android.hardware.audio.common-util",
-        "android.hidl.allocator@1.0",
-        "android.hidl.memory@1.0",
+        "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
+        "libaudioclient_aidl_conversion",
         "libaudiofoundation",
-        "libaudiohal_deathhandler",
         "libaudioutils",
         "libbase",
         "libbinder",
         "libcutils",
         "libfmq",
         "libhardware",
-        "libhidlbase",
-        "libhidlmemory",
         "liblog",
         "libmedia_helper",
         "libmediautils",
         "libutils",
-        "audioclient-types-aidl-cpp",
     ],
     header_libs: [
-        "android.hardware.audio.common.util@all-versions",
         "libaudioclient_headers",
         "libaudiohal_headers"
     ],
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_export_shared",
+    ],
 
     export_shared_lib_headers: [
         "libfmq",
@@ -70,11 +84,14 @@
 
 cc_library_shared {
     name: "libaudiohal@4.0",
-    defaults: ["libaudiohal_default"],
+    defaults: [
+        "libaudiohal_default",
+        "libaudiohal_hidl_default"
+    ],
     srcs: [
         ":audio_core_hal_client_sources",
-        ":audio_effect_hal_client_sources",
-        "EffectsFactoryHalHidlEntry.cpp",
+        ":audio_effect_hidl_hal_client_sources",
+        "EffectsFactoryHalEntry.cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common@4.0",
@@ -93,11 +110,14 @@
 
 cc_library_shared {
     name: "libaudiohal@5.0",
-    defaults: ["libaudiohal_default"],
+    defaults: [
+        "libaudiohal_default",
+        "libaudiohal_hidl_default"
+    ],
     srcs: [
         ":audio_core_hal_client_sources",
-        ":audio_effect_hal_client_sources",
-        "EffectsFactoryHalHidlEntry.cpp",
+        ":audio_effect_hidl_hal_client_sources",
+        "EffectsFactoryHalEntry.cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common@5.0",
@@ -116,11 +136,14 @@
 
 cc_library_shared {
     name: "libaudiohal@6.0",
-    defaults: ["libaudiohal_default"],
+    defaults: [
+        "libaudiohal_default",
+        "libaudiohal_hidl_default"
+    ],
     srcs: [
         ":audio_core_hal_client_sources",
-        ":audio_effect_hal_client_sources",
-        "EffectsFactoryHalHidlEntry.cpp",
+        ":audio_effect_hidl_hal_client_sources",
+        "EffectsFactoryHalEntry.cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common@6.0",
@@ -139,9 +162,12 @@
 
 cc_library_static {
     name: "libaudiohal.effect@7.0",
-    defaults: ["libaudiohal_default"],
+    defaults: [
+        "libaudiohal_default",
+        "libaudiohal_hidl_default"
+    ],
     srcs: [
-        ":audio_effect_hal_client_sources",
+        ":audio_effect_hidl_hal_client_sources",
     ],
     static_libs: [
         "android.hardware.audio.common@7.0",
@@ -158,10 +184,13 @@
 
 cc_library_shared {
     name: "libaudiohal@7.0",
-    defaults: ["libaudiohal_default"],
+    defaults: [
+        "libaudiohal_default",
+        "libaudiohal_hidl_default"
+    ],
     srcs: [
         ":audio_core_hal_client_sources",
-        "EffectsFactoryHalHidlEntry.cpp",
+        "EffectsFactoryHalEntry.cpp",
     ],
     static_libs: [
         "android.hardware.audio.common@7.0",
@@ -182,10 +211,15 @@
 
 cc_library_shared {
     name: "libaudiohal@7.1",
-    defaults: ["libaudiohal_default"],
+    defaults: [
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_hardware_audio_sounddose_ndk_shared",
+        "libaudiohal_default",
+        "libaudiohal_hidl_default"
+    ],
     srcs: [
         ":audio_core_hal_client_sources",
-        "EffectsFactoryHalHidlEntry.cpp",
+        "EffectsFactoryHalEntry.cpp",
     ],
     static_libs: [
         "android.hardware.audio.common@7.0",
@@ -199,6 +233,9 @@
         "android.hardware.audio@7.1-util",
         "libaudiohal.effect@7.0",
     ],
+    shared_libs: [
+        "libbinder_ndk",
+    ],
     cflags: [
         "-DMAJOR_VERSION=7",
         "-DMINOR_VERSION=1",
@@ -207,3 +244,72 @@
         "-include common/all-versions/VersionMacro.h",
     ]
 }
+
+cc_library_shared {
+    name: "libaudiohal@aidl",
+    defaults: [
+        "libaudiohal_default",
+        "latest_android_hardware_audio_common_ndk_shared",
+        "latest_android_hardware_audio_core_ndk_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    srcs: [
+        "DeviceHalAidl.cpp",
+        "DevicesFactoryHalEntry.cpp",
+        "DevicesFactoryHalAidl.cpp",
+        "EffectConversionHelperAidl.cpp",
+        "EffectBufferHalAidl.cpp",
+        "EffectHalAidl.cpp",
+        "effectsAidlConversion/AidlConversionAec.cpp",
+        "effectsAidlConversion/AidlConversionAgc1.cpp",
+        "effectsAidlConversion/AidlConversionAgc2.cpp",
+        "effectsAidlConversion/AidlConversionBassBoost.cpp",
+        "effectsAidlConversion/AidlConversionDownmix.cpp",
+        "effectsAidlConversion/AidlConversionDynamicsProcessing.cpp",
+        "effectsAidlConversion/AidlConversionEnvReverb.cpp",
+        "effectsAidlConversion/AidlConversionEq.cpp",
+        "effectsAidlConversion/AidlConversionHapticGenerator.cpp",
+        "effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp",
+        "effectsAidlConversion/AidlConversionNoiseSuppression.cpp",
+        "effectsAidlConversion/AidlConversionPresetReverb.cpp",
+        "effectsAidlConversion/AidlConversionSpatializer.cpp",
+        "effectsAidlConversion/AidlConversionVendorExtension.cpp",
+        "effectsAidlConversion/AidlConversionVirtualizer.cpp",
+        "effectsAidlConversion/AidlConversionVisualizer.cpp",
+        "EffectsFactoryHalAidl.cpp",
+        "EffectsFactoryHalEntry.cpp",
+        "StreamHalAidl.cpp",
+        ":audio_effectproxy_src_files"
+    ],
+    static_libs: [
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
+    ],
+    shared_libs: [
+        "libaudio_aidl_conversion_common_cpp",
+        "libaudio_aidl_conversion_common_ndk",
+        "libaudio_aidl_conversion_common_ndk_cpp",
+        "libaudio_aidl_conversion_core_ndk",
+        "libaudio_aidl_conversion_effect_ndk",
+        "libaudioaidlcommon",
+        "libbinder_ndk",
+    ],
+    header_libs: [
+        "libaudio_system_headers",
+        "libeffectsconfig_headers",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wthread-safety",
+        "-DBACKEND_CPP_NDK",
+    ],
+}
+
+filegroup {
+    name: "audio_effectproxy_src_files",
+    srcs: ["EffectProxy.cpp"],
+}
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
new file mode 100644
index 0000000..5534d13
--- /dev/null
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <string_view>
+#include <vector>
+
+#include <android-base/expected.h>
+#include <error/Result.h>
+#include <media/AudioParameter.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+class Args {
+  public:
+    explicit Args(const Vector<String16>& args)
+            : mValues(args.size()), mPtrs(args.size()) {
+        for (size_t i = 0; i < args.size(); ++i) {
+            mValues[i] = std::string(String8(args[i]));
+            mPtrs[i] = mValues[i].c_str();
+        }
+    }
+    const char** args() { return mPtrs.data(); }
+  private:
+    std::vector<std::string> mValues;
+    std::vector<const char*> mPtrs;
+};
+
+class ConversionHelperAidl {
+  protected:
+    ConversionHelperAidl(std::string_view className) : mClassName(className) {}
+
+    const std::string& getClassName() const {
+        return mClassName;
+    }
+
+    const std::string mClassName;
+};
+
+// 'action' must accept a value of type 'T' and return 'status_t'.
+// The function returns 'true' if the parameter was found, and the action has succeeded.
+// The function returns 'false' if the parameter was not found.
+// Any errors get propagated, if there are errors it means the parameter was found.
+template<typename T, typename F>
+error::Result<bool> filterOutAndProcessParameter(
+        AudioParameter& parameters, const String8& key, const F& action) {
+    if (parameters.containsKey(key)) {
+        T value;
+        status_t status = parameters.get(key, value);
+        if (status == OK) {
+            parameters.remove(key);
+            status = action(value);
+            if (status == OK) return true;
+        }
+        return base::unexpected(status);
+    }
+    return false;
+}
+
+}  // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
new file mode 100644
index 0000000..3125e311
--- /dev/null
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -0,0 +1,1642 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DeviceHalAidl"
+// #define LOG_NDEBUG 0
+
+#include <algorithm>
+#include <forward_list>
+
+#include <aidl/android/hardware/audio/core/BnStreamCallback.h>
+#include <aidl/android/hardware/audio/core/BnStreamOutEventCallback.h>
+#include <aidl/android/hardware/audio/core/StreamDescriptor.h>
+#include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdkCpp.h>
+#include <media/AidlConversionUtil.h>
+#include <mediautils/TimeCheck.h>
+#include <Utils.h>
+#include <utils/Log.h>
+
+#include "DeviceHalAidl.h"
+#include "StreamHalAidl.h"
+
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::media::audio::common::Boolean;
+using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioConfig;
+using aidl::android::media::audio::common::AudioDevice;
+using aidl::android::media::audio::common::AudioDeviceAddress;
+using aidl::android::media::audio::common::AudioDeviceType;
+using aidl::android::media::audio::common::AudioFormatType;
+using aidl::android::media::audio::common::AudioInputFlags;
+using aidl::android::media::audio::common::AudioIoFlags;
+using aidl::android::media::audio::common::AudioLatencyMode;
+using aidl::android::media::audio::common::AudioMMapPolicy;
+using aidl::android::media::audio::common::AudioMMapPolicyInfo;
+using aidl::android::media::audio::common::AudioMMapPolicyType;
+using aidl::android::media::audio::common::AudioMode;
+using aidl::android::media::audio::common::AudioOutputFlags;
+using aidl::android::media::audio::common::AudioPort;
+using aidl::android::media::audio::common::AudioPortConfig;
+using aidl::android::media::audio::common::AudioPortDeviceExt;
+using aidl::android::media::audio::common::AudioPortExt;
+using aidl::android::media::audio::common::AudioPortMixExt;
+using aidl::android::media::audio::common::AudioPortMixExtUseCase;
+using aidl::android::media::audio::common::AudioProfile;
+using aidl::android::media::audio::common::AudioSource;
+using aidl::android::media::audio::common::Float;
+using aidl::android::media::audio::common::Int;
+using aidl::android::media::audio::common::MicrophoneDynamicInfo;
+using aidl::android::media::audio::common::MicrophoneInfo;
+using aidl::android::hardware::audio::common::getFrameSizeInBytes;
+using aidl::android::hardware::audio::common::isBitPositionFlagSet;
+using aidl::android::hardware::audio::common::isDefaultAudioFormat;
+using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
+using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::hardware::audio::core::AudioPatch;
+using aidl::android::hardware::audio::core::AudioRoute;
+using aidl::android::hardware::audio::core::IBluetooth;
+using aidl::android::hardware::audio::core::IBluetoothA2dp;
+using aidl::android::hardware::audio::core::IBluetoothLe;
+using aidl::android::hardware::audio::core::IModule;
+using aidl::android::hardware::audio::core::ITelephony;
+using aidl::android::hardware::audio::core::ModuleDebug;
+using aidl::android::hardware::audio::core::StreamDescriptor;
+
+namespace android {
+
+namespace {
+
+bool isConfigEqualToPortConfig(const AudioConfig& config, const AudioPortConfig& portConfig) {
+    return portConfig.sampleRate.value().value == config.base.sampleRate &&
+            portConfig.channelMask.value() == config.base.channelMask &&
+            portConfig.format.value() == config.base.format;
+}
+
+void setConfigFromPortConfig(AudioConfig* config, const AudioPortConfig& portConfig) {
+    config->base.sampleRate = portConfig.sampleRate.value().value;
+    config->base.channelMask = portConfig.channelMask.value();
+    config->base.format = portConfig.format.value();
+}
+
+void setPortConfigFromConfig(AudioPortConfig* portConfig, const AudioConfig& config) {
+    portConfig->sampleRate = Int{ .value = config.base.sampleRate };
+    portConfig->channelMask = config.base.channelMask;
+    portConfig->format = config.base.format;
+}
+
+// Note: these converters are for types defined in different AIDL files. Although these
+// AIDL files are copies of each other, however formally these are different types
+// thus we don't use a conversion via a parcelable.
+ConversionResult<media::AudioRoute> ndk2cpp_AudioRoute(const AudioRoute& ndk) {
+    media::AudioRoute cpp;
+    cpp.sourcePortIds.insert(
+            cpp.sourcePortIds.end(), ndk.sourcePortIds.begin(), ndk.sourcePortIds.end());
+    cpp.sinkPortId = ndk.sinkPortId;
+    cpp.isExclusive = ndk.isExclusive;
+    return cpp;
+}
+
+template<typename T>
+std::shared_ptr<T> retrieveSubInterface(const std::shared_ptr<IModule>& module,
+        ::ndk::ScopedAStatus (IModule::*getT)(std::shared_ptr<T>*)) {
+    if (module != nullptr) {
+        std::shared_ptr<T> instance;
+        if (auto status = (module.get()->*getT)(&instance); status.isOk()) {
+            return instance;
+        }
+    }
+    return nullptr;
+}
+
+}  // namespace
+
+DeviceHalAidl::DeviceHalAidl(const std::string& instance, const std::shared_ptr<IModule>& module)
+        : ConversionHelperAidl("DeviceHalAidl"),
+          mInstance(instance), mModule(module),
+          mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
+          mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
+          mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
+          mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)) {
+}
+
+status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
+    auto convertAudioPortFromMap = [](const Ports::value_type& pair) {
+        return ndk2cpp_AudioPort(pair.second);
+    };
+    return ::aidl::android::convertRange(mPorts.begin(), mPorts.end(), ports->begin(),
+            convertAudioPortFromMap);
+}
+
+status_t DeviceHalAidl::getAudioRoutes(std::vector<media::AudioRoute> *routes) {
+    *routes = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainer<std::vector<media::AudioRoute>>(
+                    mRoutes, ndk2cpp_AudioRoute));
+    return OK;
+}
+
+status_t DeviceHalAidl::getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) {
+    TIME_CHECK();
+    if (modes == nullptr) {
+        return BAD_VALUE;
+    }
+    if (mModule == nullptr) return NO_INIT;
+    if (mTelephony == nullptr) return INVALID_OPERATION;
+    std::vector<AudioMode> aidlModes;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mTelephony->getSupportedAudioModes(&aidlModes)));
+    *modes = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainer<std::vector<media::audio::common::AudioMode>>(
+                    aidlModes, ndk2cpp_AudioMode));
+    return OK;
+}
+
+status_t DeviceHalAidl::getSupportedDevices(uint32_t*) {
+    // Obsolete.
+    return INVALID_OPERATION;
+}
+
+status_t DeviceHalAidl::initCheck() {
+    TIME_CHECK();
+    if (mModule == nullptr) return NO_INIT;
+    std::vector<AudioPort> ports;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
+    ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
+            __func__, mInstance.c_str());
+    std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
+            [](const auto& p) { return std::make_pair(p.id, p); });
+    mDefaultInputPortId = mDefaultOutputPortId = -1;
+    const int defaultDeviceFlag = 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
+    for (const auto& pair : mPorts) {
+        const auto& p = pair.second;
+        if (p.ext.getTag() == AudioPortExt::Tag::device &&
+                (p.ext.get<AudioPortExt::Tag::device>().flags & defaultDeviceFlag) != 0) {
+            if (p.flags.getTag() == AudioIoFlags::Tag::input) {
+                mDefaultInputPortId = p.id;
+            } else if (p.flags.getTag() == AudioIoFlags::Tag::output) {
+                mDefaultOutputPortId = p.id;
+            }
+        }
+    }
+    ALOGI("%s: module %s default port ids: input %d, output %d",
+            __func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
+    RETURN_STATUS_IF_ERROR(updateRoutes());
+    std::vector<AudioPortConfig> portConfigs;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mModule->getAudioPortConfigs(&portConfigs)));  // OK if empty
+    std::transform(portConfigs.begin(), portConfigs.end(),
+            std::inserter(mPortConfigs, mPortConfigs.end()),
+            [](const auto& p) { return std::make_pair(p.id, p); });
+    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+            std::inserter(mInitialPortConfigIds, mInitialPortConfigIds.end()),
+            [](const auto& pcPair) { return pcPair.first; });
+    std::vector<AudioPatch> patches;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mModule->getAudioPatches(&patches)));  // OK if empty
+    std::transform(patches.begin(), patches.end(),
+            std::inserter(mPatches, mPatches.end()),
+            [](const auto& p) { return std::make_pair(p.id, p); });
+    return OK;
+}
+
+status_t DeviceHalAidl::setVoiceVolume(float volume) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (mTelephony == nullptr) return INVALID_OPERATION;
+    ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mTelephony->setTelecomConfig(inConfig, &outConfig)));
+    ALOGW_IF(outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
+            "%s: the resulting voice volume %f is not the same as requested %f",
+            __func__, outConfig.voiceVolume.value().value, volume);
+    return OK;
+}
+
+status_t DeviceHalAidl::setMasterVolume(float volume) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    return statusTFromBinderStatus(mModule->setMasterVolume(volume));
+}
+
+status_t DeviceHalAidl::getMasterVolume(float *volume) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    return statusTFromBinderStatus(mModule->getMasterVolume(volume));
+}
+
+status_t DeviceHalAidl::setMode(audio_mode_t mode) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
+    if (mTelephony != nullptr) {
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mTelephony->switchAudioMode(audioMode)));
+    }
+    return statusTFromBinderStatus(mModule->updateAudioMode(audioMode));
+}
+
+status_t DeviceHalAidl::setMicMute(bool state) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    return statusTFromBinderStatus(mModule->setMicMute(state));
+}
+
+status_t DeviceHalAidl::getMicMute(bool *state) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    return statusTFromBinderStatus(mModule->getMicMute(state));
+}
+
+status_t DeviceHalAidl::setMasterMute(bool state) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    return statusTFromBinderStatus(mModule->setMasterMute(state));
+}
+
+status_t DeviceHalAidl::getMasterMute(bool *state) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    return statusTFromBinderStatus(mModule->getMasterMute(state));
+}
+
+status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
+    if (!mModule) return NO_INIT;
+    AudioParameter parameters(kvPairs);
+    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+
+    if (status_t status = filterAndUpdateBtA2dpParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT A2DP parameters failed: %d", __func__, status);
+    }
+    if (status_t status = filterAndUpdateBtHfpParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT HFP parameters failed: %d", __func__, status);
+    }
+    if (status_t status = filterAndUpdateBtLeParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT LE parameters failed: %d", __func__, status);
+    }
+    if (status_t status = filterAndUpdateBtScoParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating BT SCO parameters failed: %d", __func__, status);
+    }
+
+    ALOGW_IF(parameters.size() != 0, "%s: unknown parameters, ignored: \"%s\"",
+            __func__, parameters.toString().c_str());
+    return OK;
+}
+
+status_t DeviceHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+    TIME_CHECK();
+    // FIXME(b/278976019): Support keyReconfigA2dpSupported via vendor plugin
+    values->clear();
+    if (!mModule) return NO_INIT;
+    ALOGE("%s not implemented yet", __func__);
+    return OK;
+}
+
+namespace {
+
+class Cleanup {
+  public:
+    typedef void (DeviceHalAidl::*Cleaner)(int32_t);
+
+    Cleanup(DeviceHalAidl* device, Cleaner cleaner, int32_t id) :
+            mDevice(device), mCleaner(cleaner), mId(id) {}
+    ~Cleanup() { clean(); }
+    void clean() {
+        if (mDevice != nullptr) (mDevice->*mCleaner)(mId);
+        disarm();
+    }
+    void disarm() { mDevice = nullptr; }
+
+  private:
+    DeviceHalAidl* mDevice;
+    const Cleaner mCleaner;
+    const int32_t mId;
+};
+
+}  // namespace
+
+// Since the order of container elements destruction is unspecified,
+// ensure that cleanups are performed from the most recent one and upwards.
+// This is the same as if there were individual Cleanup instances on the stack,
+// however the bonus is that we can disarm all of them with just one statement.
+class DeviceHalAidl::Cleanups : public std::forward_list<Cleanup> {
+  public:
+    ~Cleanups() { for (auto& c : *this) c.clean(); }
+    void disarmAll() { for (auto& c : *this) c.disarm(); }
+};
+
+status_t DeviceHalAidl::getInputBufferSize(const struct audio_config* config, size_t* size) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (size == nullptr) return BAD_VALUE;
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
+    AudioDevice aidlDevice;
+    aidlDevice.type.type = AudioDeviceType::IN_DEFAULT;
+    AudioSource aidlSource = AudioSource::DEFAULT;
+    AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::input>(0);
+    AudioPortConfig mixPortConfig;
+    Cleanups cleanups;
+    audio_config writableConfig = *config;
+    AudioPatch aidlPatch;
+    RETURN_STATUS_IF_ERROR(prepareToOpenStream(0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
+                    &writableConfig, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    *size = aidlConfig.frameCount *
+            getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
+    // Do not disarm cleanups to release temporary port configs.
+    return OK;
+}
+
+status_t DeviceHalAidl::prepareToOpenStream(
+        int32_t aidlHandle, const AudioDevice& aidlDevice, const AudioIoFlags& aidlFlags,
+        AudioSource aidlSource, struct audio_config* config,
+        Cleanups* cleanups, AudioConfig* aidlConfig, AudioPortConfig* mixPortConfig,
+        AudioPatch* aidlPatch) {
+    ALOGD("%p %s::%s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
+            this, getClassName().c_str(), __func__, aidlHandle, aidlDevice.toString().c_str(),
+            aidlFlags.toString().c_str(), toString(aidlSource).c_str(),
+            aidlConfig->toString().c_str(), mixPortConfig->toString().c_str());
+    resetUnusedPatchesAndPortConfigs();
+    const bool isInput = aidlFlags.getTag() == AudioIoFlags::Tag::input;
+    // Find / create AudioPortConfigs for the device port and the mix port,
+    // then find / create a patch between them, and open a stream on the mix port.
+    AudioPortConfig devicePortConfig;
+    bool created = false;
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(aidlDevice, aidlConfig,
+                                                  &devicePortConfig, &created));
+    if (created) {
+        cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
+    }
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(*aidlConfig, aidlFlags, aidlHandle, aidlSource,
+                    std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
+    if (created) {
+        cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, mixPortConfig->id);
+    }
+    setConfigFromPortConfig(aidlConfig, *mixPortConfig);
+    if (isInput) {
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(
+                        {devicePortConfig.id}, {mixPortConfig->id}, aidlPatch, &created));
+    } else {
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(
+                        {mixPortConfig->id}, {devicePortConfig.id}, aidlPatch, &created));
+    }
+    if (created) {
+        cleanups->emplace_front(this, &DeviceHalAidl::resetPatch, aidlPatch->id);
+    }
+    if (aidlConfig->frameCount <= 0) {
+        aidlConfig->frameCount = aidlPatch->minimumStreamBufferSizeFrames;
+    }
+    *config = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(*aidlConfig, isInput));
+    return OK;
+}
+
+namespace {
+
+class StreamCallbackBase {
+  protected:
+    explicit StreamCallbackBase(const sp<CallbackBroker>& broker) : mBroker(broker) {}
+  public:
+    void* getCookie() const { return mCookie; }
+    void setCookie(void* cookie) { mCookie = cookie; }
+    sp<CallbackBroker> getBroker() const {
+        if (void* cookie = mCookie; cookie != nullptr) return mBroker.promote();
+        return nullptr;
+    }
+  private:
+    const wp<CallbackBroker> mBroker;
+    std::atomic<void*> mCookie;
+};
+
+template<class C>
+class StreamCallbackBaseHelper {
+  protected:
+    explicit StreamCallbackBaseHelper(const StreamCallbackBase& base) : mBase(base) {}
+    sp<C> getCb(const sp<CallbackBroker>& broker, void* cookie);
+    using CbRef = const sp<C>&;
+    ndk::ScopedAStatus runCb(const std::function<void(CbRef cb)>& f) {
+        if (auto cb = getCb(mBase.getBroker(), mBase.getCookie()); cb != nullptr) f(cb);
+        return ndk::ScopedAStatus::ok();
+    }
+  private:
+    const StreamCallbackBase& mBase;
+};
+
+template<>
+sp<StreamOutHalInterfaceCallback> StreamCallbackBaseHelper<StreamOutHalInterfaceCallback>::getCb(
+        const sp<CallbackBroker>& broker, void* cookie) {
+    if (broker != nullptr) return broker->getStreamOutCallback(cookie);
+    return nullptr;
+}
+
+template<>
+sp<StreamOutHalInterfaceEventCallback>
+StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>::getCb(
+        const sp<CallbackBroker>& broker, void* cookie) {
+    if (broker != nullptr) return broker->getStreamOutEventCallback(cookie);
+    return nullptr;
+}
+
+template<>
+sp<StreamOutHalInterfaceLatencyModeCallback>
+StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>::getCb(
+        const sp<CallbackBroker>& broker, void* cookie) {
+    if (broker != nullptr) return broker->getStreamOutLatencyModeCallback(cookie);
+    return nullptr;
+}
+
+/*
+Note on the callback ownership.
+
+In the Binder ownership model, the server implementation is kept alive
+as long as there is any client (proxy object) alive. This is done by
+incrementing the refcount of the server-side object by the Binder framework.
+When it detects that the last client is gone, it decrements the refcount back.
+
+Thus, it is not needed to keep any references to StreamCallback on our
+side (after we have sent an instance to the client), because we are
+the server-side. The callback object will be kept alive as long as the HAL server
+holds a strong ref to IStreamCallback proxy.
+*/
+
+class OutputStreamCallbackAidl : public StreamCallbackBase,
+                             public StreamCallbackBaseHelper<StreamOutHalInterfaceCallback>,
+                             public ::aidl::android::hardware::audio::core::BnStreamCallback {
+  public:
+    explicit OutputStreamCallbackAidl(const sp<CallbackBroker>& broker)
+            : StreamCallbackBase(broker),
+              StreamCallbackBaseHelper<StreamOutHalInterfaceCallback>(
+                      *static_cast<StreamCallbackBase*>(this)) {}
+    ndk::ScopedAStatus onTransferReady() override {
+        return runCb([](CbRef cb) { cb->onWriteReady(); });
+    }
+    ndk::ScopedAStatus onError() override {
+        return runCb([](CbRef cb) { cb->onError(); });
+    }
+    ndk::ScopedAStatus onDrainReady() override {
+        return runCb([](CbRef cb) { cb->onDrainReady(); });
+    }
+};
+
+class OutputStreamEventCallbackAidl :
+            public StreamCallbackBase,
+            public StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>,
+            public StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>,
+            public ::aidl::android::hardware::audio::core::BnStreamOutEventCallback {
+  public:
+    explicit OutputStreamEventCallbackAidl(const sp<CallbackBroker>& broker)
+            : StreamCallbackBase(broker),
+              StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>(
+                      *static_cast<StreamCallbackBase*>(this)),
+              StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>(
+                      *static_cast<StreamCallbackBase*>(this)) {}
+    ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& in_audioMetadata) override {
+        std::basic_string<uint8_t> halMetadata(in_audioMetadata.begin(), in_audioMetadata.end());
+        return StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>::runCb(
+                [&halMetadata](auto cb) { cb->onCodecFormatChanged(halMetadata); });
+    }
+    ndk::ScopedAStatus onRecommendedLatencyModeChanged(
+            const std::vector<AudioLatencyMode>& in_modes) override {
+        auto halModes = VALUE_OR_FATAL(
+                ::aidl::android::convertContainer<std::vector<audio_latency_mode_t>>(
+                        in_modes,
+                        ::aidl::android::aidl2legacy_AudioLatencyMode_audio_latency_mode_t));
+        return StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>::runCb(
+                [&halModes](auto cb) { cb->onRecommendedLatencyModeChanged(halModes); });
+    }
+};
+
+}  // namespace
+
+status_t DeviceHalAidl::openOutputStream(
+        audio_io_handle_t handle, audio_devices_t devices,
+        audio_output_flags_t flags, struct audio_config* config,
+        const char* address,
+        sp<StreamOutHalInterface>* outStream) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!outStream || !config) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_io_handle_t_int32_t(handle));
+    AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
+    AudioDevice aidlDevice = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
+    int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
+    AudioPortConfig mixPortConfig;
+    Cleanups cleanups;
+    AudioPatch aidlPatch;
+    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags,
+                    AudioSource::SYS_RESERVED_INVALID /*only needed for input*/,
+                    config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments args;
+    args.portConfigId = mixPortConfig.id;
+    const bool isOffload = isBitPositionFlagSet(
+            aidlOutputFlags, AudioOutputFlags::COMPRESS_OFFLOAD);
+    std::shared_ptr<OutputStreamCallbackAidl> streamCb;
+    if (isOffload) {
+        streamCb = ndk::SharedRefBase::make<OutputStreamCallbackAidl>(this);
+    }
+    auto eventCb = ndk::SharedRefBase::make<OutputStreamEventCallbackAidl>(this);
+    if (isOffload) {
+        args.offloadInfo = aidlConfig.offloadInfo;
+        args.callback = streamCb;
+    }
+    args.bufferSizeFrames = aidlConfig.frameCount;
+    args.eventCallback = eventCb;
+    ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
+    StreamContextAidl context(ret.desc, isOffload);
+    if (!context.isValid()) {
+        ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
+                __func__, ret.desc.toString().c_str());
+        return NO_INIT;
+    }
+    *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+            std::move(ret.stream), this /*callbackBroker*/);
+    mStreams.insert(std::pair(*outStream, aidlPatch.id));
+    void* cbCookie = (*outStream).get();
+    {
+        std::lock_guard l(mLock);
+        mCallbacks.emplace(cbCookie, Callbacks{});
+    }
+    if (streamCb) streamCb->setCookie(cbCookie);
+    eventCb->setCookie(cbCookie);
+    cleanups.disarmAll();
+    return OK;
+}
+
+status_t DeviceHalAidl::openInputStream(
+        audio_io_handle_t handle, audio_devices_t devices,
+        struct audio_config* config, audio_input_flags_t flags,
+        const char* address, audio_source_t source,
+        audio_devices_t outputDevice, const char* outputDeviceAddress,
+        sp<StreamInHalInterface>* inStream) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!inStream || !config) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_io_handle_t_int32_t(handle));
+    AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
+    AudioDevice aidlDevice = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
+    int32_t aidlInputFlags = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
+    AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::input>(aidlInputFlags);
+    AudioSource aidlSource = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_source_t_AudioSource(source));
+    AudioPortConfig mixPortConfig;
+    Cleanups cleanups;
+    AudioPatch aidlPatch;
+    RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, aidlSource,
+                    config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
+    ::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments args;
+    args.portConfigId = mixPortConfig.id;
+    RecordTrackMetadata aidlTrackMetadata{
+        .source = aidlSource, .gain = 1, .channelMask = aidlConfig.base.channelMask };
+    if (outputDevice != AUDIO_DEVICE_NONE) {
+        aidlTrackMetadata.destinationDevice = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_device_AudioDevice(
+                    outputDevice, outputDeviceAddress));
+    }
+    args.sinkMetadata.tracks.push_back(std::move(aidlTrackMetadata));
+    args.bufferSizeFrames = aidlConfig.frameCount;
+    ::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn ret;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
+    StreamContextAidl context(ret.desc, false /*isAsynchronous*/);
+    if (!context.isValid()) {
+        ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
+                __func__, ret.desc.toString().c_str());
+        return NO_INIT;
+    }
+    *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+            std::move(ret.stream), this /*micInfoProvider*/);
+    mStreams.insert(std::pair(*inStream, aidlPatch.id));
+    cleanups.disarmAll();
+    return OK;
+}
+
+status_t DeviceHalAidl::supportsAudioPatches(bool* supportsPatches) {
+    *supportsPatches = true;
+    return OK;
+}
+
+status_t DeviceHalAidl::createAudioPatch(unsigned int num_sources,
+                                         const struct audio_port_config* sources,
+                                         unsigned int num_sinks,
+                                         const struct audio_port_config* sinks,
+                                         audio_patch_handle_t* patch) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX ||
+        sources == nullptr || sinks == nullptr || patch == nullptr) {
+        return BAD_VALUE;
+    }
+    // When the patch handle (*patch) is AUDIO_PATCH_HANDLE_NONE, it means
+    // the framework wants to create a new patch. The handle has to be generated
+    // by the HAL. Since handles generated this way can only be unique within
+    // a HAL module, the framework generates a globally unique handle, and maps
+    // it on the <HAL module, patch handle> pair.
+    // When the patch handle is set, it meant the framework intends to update
+    // an existing patch.
+    //
+    // This behavior corresponds to HAL module behavior, with the only difference
+    // that the HAL module uses `int32_t` for patch IDs. The following assert ensures
+    // that both the framework and the HAL use the same value for "no ID":
+    static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
+    int32_t halPatchId = static_cast<int32_t>(*patch);
+
+    // Upon conversion, mix port configs contain audio configuration, while
+    // device port configs contain device address. This data is used to find
+    // or create HAL configs.
+    std::vector<AudioPortConfig> aidlSources, aidlSinks;
+    for (unsigned int i = 0; i < num_sources; ++i) {
+        bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+                        sources[i].role, sources[i].type)) ==
+                ::aidl::android::AudioPortDirection::INPUT;
+        aidlSources.push_back(VALUE_OR_RETURN_STATUS(
+                        ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
+                                sources[i], isInput, 0)));
+    }
+    for (unsigned int i = 0; i < num_sinks; ++i) {
+        bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+                        sinks[i].role, sinks[i].type)) ==
+                ::aidl::android::AudioPortDirection::INPUT;
+        aidlSinks.push_back(VALUE_OR_RETURN_STATUS(
+                        ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
+                                sinks[i], isInput, 0)));
+    }
+    Cleanups cleanups;
+    auto existingPatchIt = halPatchId != 0 ? mPatches.find(halPatchId): mPatches.end();
+    AudioPatch aidlPatch;
+    if (existingPatchIt != mPatches.end()) {
+        aidlPatch = existingPatchIt->second;
+        aidlPatch.sourcePortConfigIds.clear();
+        aidlPatch.sinkPortConfigIds.clear();
+    }
+    ALOGD("%s: sources: %s, sinks: %s",
+            __func__, ::android::internal::ToString(aidlSources).c_str(),
+            ::android::internal::ToString(aidlSinks).c_str());
+    auto fillPortConfigs = [&](
+            const std::vector<AudioPortConfig>& configs,
+            const std::set<int32_t>& destinationPortIds,
+            std::vector<int32_t>* ids, std::set<int32_t>* portIds) -> status_t {
+        for (const auto& s : configs) {
+            AudioPortConfig portConfig;
+            bool created = false;
+            RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+                            s, destinationPortIds, &portConfig, &created));
+            if (created) {
+                cleanups.emplace_front(this, &DeviceHalAidl::resetPortConfig, portConfig.id);
+            }
+            ids->push_back(portConfig.id);
+            if (portIds != nullptr) {
+                portIds->insert(portConfig.portId);
+            }
+        }
+        return OK;
+    };
+    // When looking up port configs, the destinationPortId is only used for mix ports.
+    // Thus, we process device port configs first, and look up the destination port ID from them.
+    bool sourceIsDevice = std::any_of(aidlSources.begin(), aidlSources.end(),
+            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+    const std::vector<AudioPortConfig>& devicePortConfigs =
+            sourceIsDevice ? aidlSources : aidlSinks;
+    std::vector<int32_t>* devicePortConfigIds =
+            sourceIsDevice ? &aidlPatch.sourcePortConfigIds : &aidlPatch.sinkPortConfigIds;
+    const std::vector<AudioPortConfig>& mixPortConfigs =
+            sourceIsDevice ? aidlSinks : aidlSources;
+    std::vector<int32_t>* mixPortConfigIds =
+            sourceIsDevice ? &aidlPatch.sinkPortConfigIds : &aidlPatch.sourcePortConfigIds;
+    std::set<int32_t> devicePortIds;
+    RETURN_STATUS_IF_ERROR(fillPortConfigs(
+                    devicePortConfigs, std::set<int32_t>(), devicePortConfigIds, &devicePortIds));
+    RETURN_STATUS_IF_ERROR(fillPortConfigs(
+                    mixPortConfigs, devicePortIds, mixPortConfigIds, nullptr));
+    if (existingPatchIt != mPatches.end()) {
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                        mModule->setAudioPatch(aidlPatch, &aidlPatch)));
+        existingPatchIt->second = aidlPatch;
+    } else {
+        bool created = false;
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(aidlPatch, &aidlPatch, &created));
+        // Since no cleanup of the patch is needed, 'created' is ignored.
+        halPatchId = aidlPatch.id;
+        *patch = static_cast<audio_patch_handle_t>(halPatchId);
+    }
+    cleanups.disarmAll();
+    return OK;
+}
+
+status_t DeviceHalAidl::releaseAudioPatch(audio_patch_handle_t patch) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
+    if (patch == AUDIO_PATCH_HANDLE_NONE) {
+        return BAD_VALUE;
+    }
+    int32_t halPatchId = static_cast<int32_t>(patch);
+    auto patchIt = mPatches.find(halPatchId);
+    if (patchIt == mPatches.end()) {
+        ALOGE("%s: patch with id %d not found", __func__, halPatchId);
+        return BAD_VALUE;
+    }
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->resetAudioPatch(halPatchId)));
+    mPatches.erase(patchIt);
+    return OK;
+}
+
+status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    audio_port_v7 portV7;
+    audio_populate_audio_port_v7(port, &portV7);
+    RETURN_STATUS_IF_ERROR(getAudioPort(&portV7));
+    return audio_populate_audio_port(&portV7, port) ? OK : BAD_VALUE;
+}
+
+status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+            ::aidl::android::AudioPortDirection::INPUT;
+    auto aidlPort = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+    if (aidlPort.ext.getTag() != AudioPortExt::device) {
+        ALOGE("%s: provided port is not a device port (module %s): %s",
+                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        return BAD_VALUE;
+    }
+    const auto& matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+    // It seems that we don't have to call HAL since all valid ports have been added either
+    // during initialization, or while handling connection of an external device.
+    auto portsIt = findPort(matchDevice);
+    if (portsIt == mPorts.end()) {
+        ALOGE("%s: device port for device %s is not found in the module %s",
+                __func__, matchDevice.toString().c_str(), mInstance.c_str());
+        return BAD_VALUE;
+    }
+    const int32_t fwkId = aidlPort.id;
+    aidlPort = portsIt->second;
+    aidlPort.id = fwkId;
+    *port = VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioPort_audio_port_v7(
+                    aidlPort, isInput));
+    return OK;
+}
+
+status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (config == nullptr) {
+        return BAD_VALUE;
+    }
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+                    config->role, config->type)) == ::aidl::android::AudioPortDirection::INPUT;
+    AudioPortConfig requestedPortConfig = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
+                    *config, isInput, 0 /*portId*/));
+    AudioPortConfig portConfig;
+    bool created = false;
+    RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+                    requestedPortConfig, std::set<int32_t>(), &portConfig, &created));
+    return OK;
+}
+
+MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
+    if (mMicrophones.status == Microphones::Status::UNKNOWN) {
+        TIME_CHECK();
+        std::vector<MicrophoneInfo> aidlInfo;
+        status_t status = statusTFromBinderStatus(mModule->getMicrophones(&aidlInfo));
+        if (status == OK) {
+            mMicrophones.status = Microphones::Status::QUERIED;
+            mMicrophones.info = std::move(aidlInfo);
+        } else if (status == INVALID_OPERATION) {
+            mMicrophones.status = Microphones::Status::NOT_SUPPORTED;
+        } else {
+            ALOGE("%s: Unexpected status from 'IModule.getMicrophones': %d", __func__, status);
+            return {};
+        }
+    }
+    if (mMicrophones.status == Microphones::Status::QUERIED) {
+        return &mMicrophones.info;
+    }
+    return {};  // NOT_SUPPORTED
+}
+
+status_t DeviceHalAidl::getMicrophones(
+        std::vector<audio_microphone_characteristic_t>* microphones) {
+    if (!microphones) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    auto staticInfo = getMicrophoneInfo();
+    if (!staticInfo) return INVALID_OPERATION;
+    std::vector<MicrophoneDynamicInfo> emptyDynamicInfo;
+    emptyDynamicInfo.reserve(staticInfo->size());
+    std::transform(staticInfo->begin(), staticInfo->end(), std::back_inserter(emptyDynamicInfo),
+            [](const auto& info) { return MicrophoneDynamicInfo{ .id = info.id }; });
+    *microphones = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainers<std::vector<audio_microphone_characteristic_t>>(
+                    *staticInfo, emptyDynamicInfo,
+                    ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t)
+    );
+    return OK;
+}
+
+status_t DeviceHalAidl::addDeviceEffect(audio_port_handle_t device __unused,
+        sp<EffectHalInterface> effect) {
+    if (!effect) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    ALOGE("%s not implemented yet", __func__);
+    return OK;
+}
+status_t DeviceHalAidl::removeDeviceEffect(audio_port_handle_t device __unused,
+                            sp<EffectHalInterface> effect) {
+    if (!effect) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    ALOGE("%s not implemented yet", __func__);
+    return OK;
+}
+
+status_t DeviceHalAidl::getMmapPolicyInfos(
+        media::audio::common::AudioMMapPolicyType policyType,
+        std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
+    TIME_CHECK();
+    AudioMMapPolicyType mmapPolicyType = VALUE_OR_RETURN_STATUS(
+            cpp2ndk_AudioMMapPolicyType(policyType));
+
+    std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
+
+    if (status_t status = statusTFromBinderStatus(
+            mModule->getMmapPolicyInfos(mmapPolicyType, &mmapPolicyInfos)); status != OK) {
+        return status;
+    }
+
+    *policyInfos = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::audio::common::AudioMMapPolicyInfo>>(
+                mmapPolicyInfos, ndk2cpp_AudioMMapPolicyInfo));
+    return OK;
+}
+
+int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
+    TIME_CHECK();
+    int32_t mixerBurstCount = 0;
+    if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
+        return mixerBurstCount;
+    }
+    return 0;
+}
+
+int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
+    TIME_CHECK();
+    int32_t hardwareBurstMinUsec = 0;
+    if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
+        return hardwareBurstMinUsec;
+    }
+    return 0;
+}
+
+error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    int32_t aidlHwAvSync;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
+    return VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_int32_t_audio_hw_sync_t(aidlHwAvSync));
+}
+
+status_t DeviceHalAidl::dump(int fd, const Vector<String16>& args) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    return mModule->dump(fd, Args(args).args(), args.size());
+}
+
+int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (supports == nullptr) {
+        return BAD_VALUE;
+    }
+    return statusTFromBinderStatus(mModule->supportsVariableLatency(supports));
+}
+
+status_t DeviceHalAidl::getSoundDoseInterface(const std::string& module,
+                                              ::ndk::SpAIBinder* soundDoseBinder)  {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (mSoundDose == nullptr) {
+        ndk::ScopedAStatus status = mModule->getSoundDose(&mSoundDose);
+        if (!status.isOk()) {
+            ALOGE("%s failed to return the sound dose interface for module %s: %s",
+                  __func__,
+                  module.c_str(),
+                  status.getDescription().c_str());
+            return BAD_VALUE;
+        }
+    }
+    *soundDoseBinder = mSoundDose->asBinder();
+    ALOGI("%s using audio AIDL HAL sound dose interface", __func__);
+
+    return OK;
+}
+
+status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
+    // There is not AIDL API defined for `prepareToDisconnectExternalDevice`.
+    // Call `setConnectedState` instead.
+    // TODO(b/279824103): call prepareToDisconnectExternalDevice when it is added.
+    const status_t status = setConnectedState(port, false /*connected*/);
+    if (status == NO_ERROR) {
+        mDeviceDisconnectionNotified.insert(port->id);
+    }
+    return status;
+}
+
+status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    if (port == nullptr) {
+        return BAD_VALUE;
+    }
+    if (!connected && mDeviceDisconnectionNotified.erase(port->id) > 0) {
+        // For device disconnection, APM will first call `prepareToDisconnectExternalDevice`
+        // and then call `setConnectedState`. However, there is no API for
+        // `prepareToDisconnectExternalDevice` yet. In that case, `setConnectedState` will be
+        // called when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if
+        // previous call is successful. Also remove the cache here to avoid a large cache after
+        // a long run.
+        return NO_ERROR;
+    }
+    bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+            ::aidl::android::AudioPortDirection::INPUT;
+    AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+    if (aidlPort.ext.getTag() != AudioPortExt::device) {
+        ALOGE("%s: provided port is not a device port (module %s): %s",
+                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        return BAD_VALUE;
+    }
+    if (connected) {
+        AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+        // Reset the device address to find the "template" port.
+        matchDevice.address = AudioDeviceAddress::make<AudioDeviceAddress::id>();
+        auto portsIt = findPort(matchDevice);
+        if (portsIt == mPorts.end()) {
+            ALOGW("%s: device port for device %s is not found in the module %s",
+                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
+            return BAD_VALUE;
+        }
+        // Use the ID of the "template" port, use all the information from the provided port.
+        aidlPort.id = portsIt->first;
+        AudioPort connectedPort;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
+                                aidlPort, &connectedPort)));
+        const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
+        LOG_ALWAYS_FATAL_IF(!inserted,
+                "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
+                __func__, mInstance.c_str(), connectedPort.toString().c_str(),
+                it->second.toString().c_str());
+    } else {  // !connected
+        AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+        auto portsIt = findPort(matchDevice);
+        if (portsIt == mPorts.end()) {
+            ALOGW("%s: device port for device %s is not found in the module %s",
+                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
+            return BAD_VALUE;
+        }
+        // Any streams opened on the external device must be closed by this time,
+        // thus we can clean up patches and port configs that were created for them.
+        resetUnusedPatchesAndPortConfigs();
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(
+                                portsIt->second.id)));
+        mPorts.erase(portsIt);
+    }
+    return updateRoutes();
+}
+
+status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
+    TIME_CHECK();
+    if (!mModule) return NO_INIT;
+    ModuleDebug debug{ .simulateDeviceConnections = enabled };
+    status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
+    // This is important to log as it affects HAL behavior.
+    if (status == OK) {
+        ALOGI("%s: set enabled: %d", __func__, enabled);
+    } else {
+        ALOGW("%s: set enabled to %d failed: %d", __func__, enabled, status);
+    }
+    return status;
+}
+
+bool DeviceHalAidl::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
+    if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
+    return p.ext.get<AudioPortExt::Tag::device>().device == device;
+}
+
+bool DeviceHalAidl::audioDeviceMatches(const AudioDevice& device, const AudioPortConfig& p) {
+    if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
+    if (device.type.type == AudioDeviceType::IN_DEFAULT) {
+        return p.portId == mDefaultInputPortId;
+    } else if (device.type.type == AudioDeviceType::OUT_DEFAULT) {
+        return p.portId == mDefaultOutputPortId;
+    }
+    return p.ext.get<AudioPortExt::Tag::device>().device == device;
+}
+
+status_t DeviceHalAidl::createOrUpdatePortConfig(
+        const AudioPortConfig& requestedPortConfig, PortConfigs::iterator* result, bool* created) {
+    TIME_CHECK();
+    AudioPortConfig appliedPortConfig;
+    bool applied = false;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPortConfig(
+                            requestedPortConfig, &appliedPortConfig, &applied)));
+    if (!applied) {
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPortConfig(
+                                appliedPortConfig, &appliedPortConfig, &applied)));
+        if (!applied) {
+            ALOGE("%s: module %s did not apply suggested config %s",
+                    __func__, mInstance.c_str(), appliedPortConfig.toString().c_str());
+            return NO_INIT;
+        }
+    }
+
+    int32_t id = appliedPortConfig.id;
+    if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
+        LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
+                requestedPortConfig.id, id);
+    }
+
+    auto [it, inserted] = mPortConfigs.insert_or_assign(std::move(id),
+            std::move(appliedPortConfig));
+    *result = it;
+    *created = inserted;
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtA2dpParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    std::optional<bool> a2dpEnabled;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtA2dpSuspended),
+                    [&a2dpEnabled](const String8& trueOrFalse) {
+                        if (trueOrFalse == AudioParameter::valueTrue) {
+                            a2dpEnabled = false;  // 'suspended' == true
+                            return OK;
+                        } else if (trueOrFalse == AudioParameter::valueFalse) {
+                            a2dpEnabled = true;  // 'suspended' == false
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
+                        return BAD_VALUE;
+                    }));
+    // FIXME(b/278976019): Support keyReconfigA2dp via vendor plugin
+    if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
+        return statusTFromBinderStatus(mBluetoothA2dp->setEnabled(a2dpEnabled.value()));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtHfpParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    IBluetooth::HfpConfig hfpConfig;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtHfpEnable),
+                    [&hfpConfig](const String8& trueOrFalse) {
+                        if (trueOrFalse == AudioParameter::valueTrue) {
+                            hfpConfig.isEnabled = Boolean{ .value = true };
+                            return OK;
+                        } else if (trueOrFalse == AudioParameter::valueFalse) {
+                            hfpConfig.isEnabled = Boolean{ .value = false };
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                    parameters, String8(AudioParameter::keyBtHfpSamplingRate),
+                    [&hfpConfig](int sampleRate) {
+                        return sampleRate > 0 ?
+                                hfpConfig.sampleRate = Int{ .value = sampleRate }, OK : BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                    parameters, String8(AudioParameter::keyBtHfpVolume),
+                    [&hfpConfig](int volume0to15) {
+                        if (volume0to15 >= 0 && volume0to15 <= 15) {
+                            hfpConfig.volume = Float{ .value = volume0to15 / 15.0f };
+                            return OK;
+                        }
+                        return BAD_VALUE;
+                    }));
+    if (mBluetooth != nullptr && hfpConfig != IBluetooth::HfpConfig{}) {
+        IBluetooth::HfpConfig newHfpConfig;
+        return statusTFromBinderStatus(mBluetooth->setHfpConfig(hfpConfig, &newHfpConfig));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtLeParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    std::optional<bool> leEnabled;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtLeSuspended),
+                    [&leEnabled](const String8& trueOrFalse) {
+                        if (trueOrFalse == AudioParameter::valueTrue) {
+                            leEnabled = false;  // 'suspended' == true
+                            return OK;
+                        } else if (trueOrFalse == AudioParameter::valueFalse) {
+                            leEnabled = true;  // 'suspended' == false
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
+                        return BAD_VALUE;
+                    }));
+    if (mBluetoothLe != nullptr && leEnabled.has_value()) {
+        return statusTFromBinderStatus(mBluetoothLe->setEnabled(leEnabled.value()));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::filterAndUpdateBtScoParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    IBluetooth::ScoConfig scoConfig;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtSco),
+                    [&scoConfig](const String8& onOrOff) {
+                        if (onOrOff == AudioParameter::valueOn) {
+                            scoConfig.isEnabled = Boolean{ .value = true };
+                            return OK;
+                        } else if (onOrOff == AudioParameter::valueOff) {
+                            scoConfig.isEnabled = Boolean{ .value = false };
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtSco, onOrOff.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtScoHeadsetName),
+                    [&scoConfig](const String8& name) {
+                        scoConfig.debugName = name;
+                        return OK;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtNrec),
+                    [&scoConfig](const String8& onOrOff) {
+                        if (onOrOff == AudioParameter::valueOn) {
+                            scoConfig.isNrecEnabled = Boolean{ .value = true };
+                            return OK;
+                        } else if (onOrOff == AudioParameter::valueOff) {
+                            scoConfig.isNrecEnabled = Boolean{ .value = false };
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtNrec, onOrOff.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyBtScoWb),
+                    [&scoConfig](const String8& onOrOff) {
+                        if (onOrOff == AudioParameter::valueOn) {
+                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
+                            return OK;
+                        } else if (onOrOff == AudioParameter::valueOff) {
+                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyBtScoWb, onOrOff.c_str());
+                        return BAD_VALUE;
+                    }));
+    if (mBluetooth != nullptr && scoConfig != IBluetooth::ScoConfig{}) {
+        IBluetooth::ScoConfig newScoConfig;
+        return statusTFromBinderStatus(mBluetooth->setScoConfig(scoConfig, &newScoConfig));
+    }
+    return OK;
+}
+
+status_t DeviceHalAidl::findOrCreatePatch(
+        const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+    std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
+            requestedPatch.sourcePortConfigIds.end());
+    std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
+            requestedPatch.sinkPortConfigIds.end());
+    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+}
+
+status_t DeviceHalAidl::findOrCreatePatch(
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+        AudioPatch* patch, bool* created) {
+    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+    if (patchIt == mPatches.end()) {
+        TIME_CHECK();
+        AudioPatch requestedPatch, appliedPatch;
+        requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
+                sourcePortConfigIds.begin(), sourcePortConfigIds.end());
+        requestedPatch.sinkPortConfigIds.insert(requestedPatch.sinkPortConfigIds.end(),
+                sinkPortConfigIds.begin(), sinkPortConfigIds.end());
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->setAudioPatch(
+                                requestedPatch, &appliedPatch)));
+        patchIt = mPatches.insert(mPatches.end(), std::make_pair(appliedPatch.id, appliedPatch));
+        *created = true;
+    } else {
+        *created = false;
+    }
+    *patch = patchIt->second;
+    return OK;
+}
+
+status_t DeviceHalAidl::findOrCreatePortConfig(const AudioDevice& device, const AudioConfig* config,
+        AudioPortConfig* portConfig, bool* created) {
+    auto portConfigIt = findPortConfig(device);
+    if (portConfigIt == mPortConfigs.end()) {
+        auto portsIt = findPort(device);
+        if (portsIt == mPorts.end()) {
+            ALOGE("%s: device port for device %s is not found in the module %s",
+                    __func__, device.toString().c_str(), mInstance.c_str());
+            return BAD_VALUE;
+        }
+        AudioPortConfig requestedPortConfig;
+        requestedPortConfig.portId = portsIt->first;
+        if (config != nullptr) {
+            setPortConfigFromConfig(&requestedPortConfig, *config);
+        }
+        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+                created));
+    } else {
+        *created = false;
+    }
+    *portConfig = portConfigIt->second;
+    return OK;
+}
+
+status_t DeviceHalAidl::findOrCreatePortConfig(
+        const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
+        AudioSource source, const std::set<int32_t>& destinationPortIds,
+        AudioPortConfig* portConfig, bool* created) {
+    // These flags get removed one by one in this order when retrying port finding.
+    static const std::vector<AudioInputFlags> kOptionalInputFlags{
+        AudioInputFlags::FAST, AudioInputFlags::RAW };
+    auto portConfigIt = findPortConfig(config, flags, ioHandle);
+    if (portConfigIt == mPortConfigs.end() && flags.has_value()) {
+        auto optionalInputFlagsIt = kOptionalInputFlags.begin();
+        AudioIoFlags matchFlags = flags.value();
+        auto portsIt = findPort(config, matchFlags, destinationPortIds);
+        while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
+                && optionalInputFlagsIt != kOptionalInputFlags.end()) {
+            if (!isBitPositionFlagSet(
+                            matchFlags.get<AudioIoFlags::Tag::input>(), *optionalInputFlagsIt)) {
+                ++optionalInputFlagsIt;
+                continue;
+            }
+            matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
+                    ~makeBitPositionFlagMask(*optionalInputFlagsIt++));
+            portsIt = findPort(config, matchFlags, destinationPortIds);
+            ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
+                    "retried with flags %s", __func__, config.toString().c_str(),
+                    flags.value().toString().c_str(), mInstance.c_str(),
+                    matchFlags.toString().c_str());
+        }
+        if (portsIt == mPorts.end()) {
+            ALOGE("%s: mix port for config %s, flags %s is not found in the module %s",
+                    __func__, config.toString().c_str(), matchFlags.toString().c_str(),
+                    mInstance.c_str());
+            return BAD_VALUE;
+        }
+        AudioPortConfig requestedPortConfig;
+        requestedPortConfig.portId = portsIt->first;
+        setPortConfigFromConfig(&requestedPortConfig, config);
+        requestedPortConfig.ext = AudioPortMixExt{ .handle = ioHandle };
+        if (matchFlags.getTag() == AudioIoFlags::Tag::input
+                && source != AudioSource::SYS_RESERVED_INVALID) {
+            requestedPortConfig.ext.get<AudioPortExt::Tag::mix>().usecase =
+                    AudioPortMixExtUseCase::make<AudioPortMixExtUseCase::Tag::source>(source);
+        }
+        RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+                created));
+    } else if (!flags.has_value()) {
+        ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
+                "and was not created as flags are not specified",
+                __func__, config.toString().c_str(), ioHandle, mInstance.c_str());
+        return BAD_VALUE;
+    } else {
+        AudioPortConfig requestedPortConfig = portConfigIt->second;
+        if (requestedPortConfig.ext.getTag() == AudioPortExt::Tag::mix) {
+            AudioPortMixExt& mixExt = requestedPortConfig.ext.get<AudioPortExt::Tag::mix>();
+            if (mixExt.usecase.getTag() == AudioPortMixExtUseCase::Tag::source &&
+                    source != AudioSource::SYS_RESERVED_INVALID) {
+                mixExt.usecase.get<AudioPortMixExtUseCase::Tag::source>() = source;
+            }
+        }
+
+        if (requestedPortConfig != portConfigIt->second) {
+            RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+                    created));
+        } else {
+            *created = false;
+        }
+    }
+    *portConfig = portConfigIt->second;
+    return OK;
+}
+
+status_t DeviceHalAidl::findOrCreatePortConfig(
+        const AudioPortConfig& requestedPortConfig, const std::set<int32_t>& destinationPortIds,
+        AudioPortConfig* portConfig, bool* created) {
+    using Tag = AudioPortExt::Tag;
+    if (requestedPortConfig.ext.getTag() == Tag::mix) {
+        if (const auto& p = requestedPortConfig;
+                !p.sampleRate.has_value() || !p.channelMask.has_value() ||
+                !p.format.has_value()) {
+            ALOGW("%s: provided mix port config is not fully specified: %s",
+                    __func__, p.toString().c_str());
+            return BAD_VALUE;
+        }
+        AudioConfig config;
+        setConfigFromPortConfig(&config, requestedPortConfig);
+        AudioSource source = requestedPortConfig.ext.get<Tag::mix>().usecase.getTag() ==
+                AudioPortMixExtUseCase::Tag::source ?
+                requestedPortConfig.ext.get<Tag::mix>().usecase.
+                get<AudioPortMixExtUseCase::Tag::source>() : AudioSource::SYS_RESERVED_INVALID;
+        return findOrCreatePortConfig(config, requestedPortConfig.flags,
+                requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
+                portConfig, created);
+    } else if (requestedPortConfig.ext.getTag() == Tag::device) {
+        return findOrCreatePortConfig(
+                requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
+                portConfig, created);
+    }
+    ALOGW("%s: unsupported audio port config: %s",
+            __func__, requestedPortConfig.toString().c_str());
+    return BAD_VALUE;
+}
+
+DeviceHalAidl::Patches::iterator DeviceHalAidl::findPatch(
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+    return std::find_if(mPatches.begin(), mPatches.end(),
+            [&](const auto& pair) {
+                const auto& p = pair.second;
+                std::set<int32_t> patchSrcs(
+                        p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
+                std::set<int32_t> patchSinks(
+                        p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
+                return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+}
+
+DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(const AudioDevice& device) {
+    if (device.type.type == AudioDeviceType::IN_DEFAULT) {
+        return mPorts.find(mDefaultInputPortId);
+    } else if (device.type.type == AudioDeviceType::OUT_DEFAULT) {
+        return mPorts.find(mDefaultOutputPortId);
+    }
+    return std::find_if(mPorts.begin(), mPorts.end(),
+            [&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
+}
+
+DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(
+            const AudioConfig& config, const AudioIoFlags& flags,
+            const std::set<int32_t>& destinationPortIds) {
+    auto belongsToProfile = [&config](const AudioProfile& prof) {
+        return (isDefaultAudioFormat(config.base.format) || prof.format == config.base.format) &&
+                (config.base.channelMask.getTag() == AudioChannelLayout::none ||
+                        std::find(prof.channelMasks.begin(), prof.channelMasks.end(),
+                                config.base.channelMask) != prof.channelMasks.end()) &&
+                (config.base.sampleRate == 0 ||
+                        std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
+                                config.base.sampleRate) != prof.sampleRates.end());
+    };
+    static const std::vector<AudioOutputFlags> kOptionalOutputFlags{AudioOutputFlags::BIT_PERFECT};
+    int optionalFlags = 0;
+    auto flagMatches = [&flags, &optionalFlags](const AudioIoFlags& portFlags) {
+        // Ports should be able to match if the optional flags are not requested.
+        return portFlags == flags ||
+               (portFlags.getTag() == AudioIoFlags::Tag::output &&
+                        AudioIoFlags::make<AudioIoFlags::Tag::output>(
+                                portFlags.get<AudioIoFlags::Tag::output>() &
+                                        ~optionalFlags) == flags);
+    };
+    auto matcher = [&](const auto& pair) {
+        const auto& p = pair.second;
+        return p.ext.getTag() == AudioPortExt::Tag::mix &&
+                flagMatches(p.flags) &&
+                (destinationPortIds.empty() ||
+                        std::any_of(destinationPortIds.begin(), destinationPortIds.end(),
+                                [&](const int32_t destId) { return mRoutingMatrix.count(
+                                            std::make_pair(p.id, destId)) != 0; })) &&
+                (p.profiles.empty() ||
+                        std::find_if(p.profiles.begin(), p.profiles.end(), belongsToProfile) !=
+                        p.profiles.end()); };
+    auto result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+    if (result == mPorts.end() && flags.getTag() == AudioIoFlags::Tag::output) {
+        auto optionalOutputFlagsIt = kOptionalOutputFlags.begin();
+        while (result == mPorts.end() && optionalOutputFlagsIt != kOptionalOutputFlags.end()) {
+            if (isBitPositionFlagSet(
+                        flags.get<AudioIoFlags::Tag::output>(), *optionalOutputFlagsIt)) {
+                // If the flag is set by the request, it must be matched.
+                ++optionalOutputFlagsIt;
+                continue;
+            }
+            optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
+            result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
+            ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
+                  "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
+                    flags.toString().c_str(), mInstance.c_str(), optionalFlags);
+        }
+    }
+    return result;
+}
+
+DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(const AudioDevice& device) {
+    return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
+            [&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
+}
+
+DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(
+            const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle) {
+    using Tag = AudioPortExt::Tag;
+    return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
+            [&](const auto& pair) {
+                const auto& p = pair.second;
+                LOG_ALWAYS_FATAL_IF(p.ext.getTag() == Tag::mix &&
+                        !p.sampleRate.has_value() || !p.channelMask.has_value() ||
+                        !p.format.has_value() || !p.flags.has_value(),
+                        "%s: stored mix port config is not fully specified: %s",
+                        __func__, p.toString().c_str());
+                return p.ext.getTag() == Tag::mix &&
+                        isConfigEqualToPortConfig(config, p) &&
+                        (!flags.has_value() || p.flags.value() == flags.value()) &&
+                        p.ext.template get<Tag::mix>().handle == ioHandle; });
+}
+
+void DeviceHalAidl::resetPatch(int32_t patchId) {
+    if (auto it = mPatches.find(patchId); it != mPatches.end()) {
+        mPatches.erase(it);
+        TIME_CHECK();
+        if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
+            ALOGE("%s: error while resetting patch %d: %s",
+                    __func__, patchId, status.getDescription().c_str());
+        }
+        return;
+    }
+    ALOGE("%s: patch id %d not found", __func__, patchId);
+}
+
+void DeviceHalAidl::resetPortConfig(int32_t portConfigId) {
+    if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
+        mPortConfigs.erase(it);
+        TIME_CHECK();
+        if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
+                !status.isOk()) {
+            ALOGE("%s: error while resetting port config %d: %s",
+                    __func__, portConfigId, status.getDescription().c_str());
+        }
+        return;
+    }
+    ALOGE("%s: port config id %d not found", __func__, portConfigId);
+}
+
+void DeviceHalAidl::resetUnusedPatches() {
+    // Since patches can be created independently of streams via 'createAudioPatch',
+    // here we only clean up patches for released streams.
+    for (auto it = mStreams.begin(); it != mStreams.end(); ) {
+        if (auto streamSp = it->first.promote(); streamSp) {
+            ++it;
+        } else {
+            resetPatch(it->second);
+            it = mStreams.erase(it);
+        }
+    }
+}
+
+void DeviceHalAidl::resetUnusedPatchesAndPortConfigs() {
+    resetUnusedPatches();
+    resetUnusedPortConfigs();
+}
+
+void DeviceHalAidl::resetUnusedPortConfigs() {
+    // The assumption is that port configs are used to create patches
+    // (or to open streams, but that involves creation of patches, too). Thus,
+    // orphaned port configs can and should be reset.
+    std::set<int32_t> portConfigIds;
+    std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+            std::inserter(portConfigIds, portConfigIds.end()),
+            [](const auto& pcPair) { return pcPair.first; });
+    for (const auto& p : mPatches) {
+        for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
+        for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
+    }
+    for (int32_t id : mInitialPortConfigIds) {
+        portConfigIds.erase(id);
+    }
+    for (int32_t id : portConfigIds) resetPortConfig(id);
+}
+
+status_t DeviceHalAidl::updateRoutes() {
+    TIME_CHECK();
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
+    ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
+            __func__, mInstance.c_str());
+    mRoutingMatrix.clear();
+    for (const auto& r : mRoutes) {
+        for (auto portId : r.sourcePortIds) {
+            mRoutingMatrix.emplace(r.sinkPortId, portId);
+            mRoutingMatrix.emplace(portId, r.sinkPortId);
+        }
+    }
+    return OK;
+}
+
+void DeviceHalAidl::clearCallbacks(void* cookie) {
+    std::lock_guard l(mLock);
+    mCallbacks.erase(cookie);
+}
+
+sp<StreamOutHalInterfaceCallback> DeviceHalAidl::getStreamOutCallback(void* cookie) {
+    return getCallbackImpl(cookie, &Callbacks::out);
+}
+
+void DeviceHalAidl::setStreamOutCallback(
+        void* cookie, const sp<StreamOutHalInterfaceCallback>& cb) {
+    setCallbackImpl(cookie, &Callbacks::out, cb);
+}
+
+sp<StreamOutHalInterfaceEventCallback> DeviceHalAidl::getStreamOutEventCallback(
+        void* cookie) {
+    return getCallbackImpl(cookie, &Callbacks::event);
+}
+
+void DeviceHalAidl::setStreamOutEventCallback(
+        void* cookie, const sp<StreamOutHalInterfaceEventCallback>& cb) {
+    setCallbackImpl(cookie, &Callbacks::event, cb);
+}
+
+sp<StreamOutHalInterfaceLatencyModeCallback> DeviceHalAidl::getStreamOutLatencyModeCallback(
+        void* cookie) {
+    return getCallbackImpl(cookie, &Callbacks::latency);
+}
+
+void DeviceHalAidl::setStreamOutLatencyModeCallback(
+        void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>& cb) {
+    setCallbackImpl(cookie, &Callbacks::latency, cb);
+}
+
+template<class C>
+sp<C> DeviceHalAidl::getCallbackImpl(void* cookie, wp<C> DeviceHalAidl::Callbacks::* field) {
+    std::lock_guard l(mLock);
+    if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+        return ((it->second).*field).promote();
+    }
+    return nullptr;
+}
+template<class C>
+void DeviceHalAidl::setCallbackImpl(
+        void* cookie, wp<C> DeviceHalAidl::Callbacks::* field, const sp<C>& cb) {
+    std::lock_guard l(mLock);
+    if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+        (it->second).*field = cb;
+    }
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
new file mode 100644
index 0000000..37d800b
--- /dev/null
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -0,0 +1,315 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <set>
+#include <vector>
+
+#include <aidl/android/hardware/audio/core/BpModule.h>
+#include <aidl/android/hardware/audio/core/sounddose/BpSoundDose.h>
+#include <android-base/thread_annotations.h>
+#include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
+
+#include "ConversionHelperAidl.h"
+
+namespace android {
+
+class StreamOutHalInterfaceCallback;
+class StreamOutHalInterfaceEventCallback;
+class StreamOutHalInterfaceLatencyModeCallback;
+
+// The role of the broker is to connect AIDL callback interface implementations
+// with StreamOut callback implementations. Since AIDL requires all callbacks
+// to be provided upfront, while libaudiohal interfaces allow late registration,
+// there is a need to coordinate the matching process.
+class CallbackBroker : public virtual RefBase {
+  public:
+    virtual ~CallbackBroker() = default;
+    // The cookie is always the stream instance pointer. We don't use weak pointers to avoid extra
+    // costs on reference counting. The stream cleans up related entries on destruction. Since
+    // access to the callbacks map is synchronized, the possibility for pointer aliasing due to
+    // allocation of a new stream at the address of previously deleted stream is avoided.
+    virtual void clearCallbacks(void* cookie) = 0;
+    virtual sp<StreamOutHalInterfaceCallback> getStreamOutCallback(void* cookie) = 0;
+    virtual void setStreamOutCallback(void* cookie, const sp<StreamOutHalInterfaceCallback>&) = 0;
+    virtual sp<StreamOutHalInterfaceEventCallback> getStreamOutEventCallback(void* cookie) = 0;
+    virtual void setStreamOutEventCallback(void* cookie,
+            const sp<StreamOutHalInterfaceEventCallback>&) = 0;
+    virtual sp<StreamOutHalInterfaceLatencyModeCallback> getStreamOutLatencyModeCallback(
+            void* cookie) = 0;
+    virtual void setStreamOutLatencyModeCallback(
+            void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>&) = 0;
+};
+
+class MicrophoneInfoProvider : public virtual RefBase {
+  public:
+    using Info = std::vector<::aidl::android::media::audio::common::MicrophoneInfo>;
+    virtual ~MicrophoneInfoProvider() = default;
+    // Returns a nullptr if the HAL does not support microphone info retrieval.
+    virtual Info const* getMicrophoneInfo() = 0;
+};
+
+class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl,
+                      public CallbackBroker, public MicrophoneInfoProvider {
+  public:
+    status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) override;
+
+    status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) override;
+
+    status_t getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) override;
+
+    // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
+    status_t getSupportedDevices(uint32_t *devices) override;
+
+    // Check to see if the audio hardware interface has been initialized.
+    status_t initCheck() override;
+
+    // Set the audio volume of a voice call. Range is between 0.0 and 1.0.
+    status_t setVoiceVolume(float volume) override;
+
+    // Set the audio volume for all audio activities other than voice call.
+    status_t setMasterVolume(float volume) override;
+
+    // Get the current master volume value for the HAL.
+    status_t getMasterVolume(float *volume) override;
+
+    // Called when the audio mode changes.
+    status_t setMode(audio_mode_t mode) override;
+
+    // Muting control.
+    status_t setMicMute(bool state) override;
+
+    status_t getMicMute(bool* state) override;
+
+    status_t setMasterMute(bool state) override;
+
+    status_t getMasterMute(bool *state) override;
+
+    // Set global audio parameters.
+    status_t setParameters(const String8& kvPairs) override;
+
+    // Get global audio parameters.
+    status_t getParameters(const String8& keys, String8 *values) override;
+
+    // Returns audio input buffer size according to parameters passed.
+    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+
+    // Creates and opens the audio hardware output stream. The stream is closed
+    // by releasing all references to the returned object.
+    status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
+                              audio_output_flags_t flags, struct audio_config* config,
+                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+
+    // Creates and opens the audio hardware input stream. The stream is closed
+    // by releasing all references to the returned object.
+    status_t openInputStream(audio_io_handle_t handle, audio_devices_t devices,
+                             struct audio_config* config, audio_input_flags_t flags,
+                             const char* address, audio_source_t source,
+                             audio_devices_t outputDevice, const char* outputDeviceAddress,
+                             sp<StreamInHalInterface>* inStream) override;
+
+    // Returns whether createAudioPatch and releaseAudioPatch operations are supported.
+    status_t supportsAudioPatches(bool* supportsPatches) override;
+
+    // Creates an audio patch between several source and sink ports.
+    status_t createAudioPatch(unsigned int num_sources, const struct audio_port_config* sources,
+                              unsigned int num_sinks, const struct audio_port_config* sinks,
+                              audio_patch_handle_t* patch) override;
+
+    // Releases an audio patch.
+    status_t releaseAudioPatch(audio_patch_handle_t patch) override;
+
+    // Fills the list of supported attributes for a given audio port.
+    status_t getAudioPort(struct audio_port* port) override;
+
+    // Fills the list of supported attributes for a given audio port.
+    status_t getAudioPort(struct audio_port_v7 *port) override;
+
+    // Set audio port configuration.
+    status_t setAudioPortConfig(const struct audio_port_config* config) override;
+
+    // List microphones
+    status_t getMicrophones(std::vector<audio_microphone_characteristic_t>* microphones) override;
+
+    status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+
+    status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
+
+    status_t getMmapPolicyInfos(media::audio::common::AudioMMapPolicyType policyType __unused,
+                                std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos
+                                        __unused) override;
+
+    int32_t getAAudioMixerBurstCount() override;
+
+    int32_t getAAudioHardwareBurstMinUsec() override;
+
+    error::Result<audio_hw_sync_t> getHwAvSync() override;
+
+    int32_t supportsBluetoothVariableLatency(bool* supports __unused) override;
+
+    status_t getSoundDoseInterface(const std::string& module,
+                                   ::ndk::SpAIBinder* soundDoseBinder) override;
+
+    status_t prepareToDisconnectExternalDevice(const struct audio_port_v7 *port) override;
+
+    status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
+    status_t setSimulateDeviceConnections(bool enabled) override;
+
+    status_t dump(int __unused, const Vector<String16>& __unused) override;
+
+  private:
+    friend class sp<DeviceHalAidl>;
+
+    struct Callbacks {  // No need to use `atomic_wp` because access is serialized.
+        wp<StreamOutHalInterfaceCallback> out;
+        wp<StreamOutHalInterfaceEventCallback> event;
+        wp<StreamOutHalInterfaceLatencyModeCallback> latency;
+    };
+    struct Microphones {
+        enum Status { UNKNOWN, NOT_SUPPORTED, QUERIED };
+        Status status = Status::UNKNOWN;
+        MicrophoneInfoProvider::Info info;
+    };
+    using Patches = std::map<int32_t /*patch ID*/,
+            ::aidl::android::hardware::audio::core::AudioPatch>;
+    using PortConfigs = std::map<int32_t /*port config ID*/,
+            ::aidl::android::media::audio::common::AudioPortConfig>;
+    using Ports = std::map<int32_t /*port ID*/, ::aidl::android::media::audio::common::AudioPort>;
+    using Routes = std::vector<::aidl::android::hardware::audio::core::AudioRoute>;
+    // Answers the question "whether portID 'first' is reachable from portID 'second'?"
+    // It's not a map because both portIDs are known. The matrix is symmetric.
+    using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
+    using Streams = std::map<wp<StreamHalInterface>, int32_t /*patch ID*/>;
+    class Cleanups;
+
+    // Must not be constructed directly by clients.
+    DeviceHalAidl(
+            const std::string& instance,
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module);
+
+    ~DeviceHalAidl() override = default;
+
+    bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
+            const ::aidl::android::media::audio::common::AudioPort& p);
+    bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
+            const ::aidl::android::media::audio::common::AudioPortConfig& p);
+    status_t createOrUpdatePortConfig(
+            const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+            PortConfigs::iterator* result, bool *created);
+    status_t filterAndUpdateBtA2dpParameters(AudioParameter &parameters);
+    status_t filterAndUpdateBtHfpParameters(AudioParameter &parameters);
+    status_t filterAndUpdateBtLeParameters(AudioParameter &parameters);
+    status_t filterAndUpdateBtScoParameters(AudioParameter &parameters);
+    status_t findOrCreatePatch(
+        const std::set<int32_t>& sourcePortConfigIds,
+        const std::set<int32_t>& sinkPortConfigIds,
+        ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
+    status_t findOrCreatePatch(
+        const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+        ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
+    status_t findOrCreatePortConfig(
+            const ::aidl::android::media::audio::common::AudioDevice& device,
+            const ::aidl::android::media::audio::common::AudioConfig* config,
+            ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
+            bool* created);
+    status_t findOrCreatePortConfig(
+            const ::aidl::android::media::audio::common::AudioConfig& config,
+            const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
+            int32_t ioHandle,
+            ::aidl::android::media::audio::common::AudioSource aidlSource,
+            const std::set<int32_t>& destinationPortIds,
+            ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
+    status_t findOrCreatePortConfig(
+        const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+        const std::set<int32_t>& destinationPortIds,
+        ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
+    Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
+            const std::set<int32_t>& sinkPortConfigIds);
+    Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
+    Ports::iterator findPort(
+            const ::aidl::android::media::audio::common::AudioConfig& config,
+            const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+            const std::set<int32_t>& destinationPortIds);
+    PortConfigs::iterator findPortConfig(
+            const ::aidl::android::media::audio::common::AudioDevice& device);
+    PortConfigs::iterator findPortConfig(
+            const ::aidl::android::media::audio::common::AudioConfig& config,
+            const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
+            int32_t ioHandle);
+    status_t prepareToOpenStream(
+        int32_t aidlHandle,
+        const ::aidl::android::media::audio::common::AudioDevice& aidlDevice,
+        const ::aidl::android::media::audio::common::AudioIoFlags& aidlFlags,
+        ::aidl::android::media::audio::common::AudioSource aidlSource,
+        struct audio_config* config,
+        Cleanups* cleanups,
+        ::aidl::android::media::audio::common::AudioConfig* aidlConfig,
+        ::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
+        ::aidl::android::hardware::audio::core::AudioPatch* aidlPatch);
+    void resetPatch(int32_t patchId);
+    void resetPortConfig(int32_t portConfigId);
+    void resetUnusedPatches();
+    void resetUnusedPatchesAndPortConfigs();
+    void resetUnusedPortConfigs();
+    status_t updateRoutes();
+
+    // CallbackBroker implementation
+    void clearCallbacks(void* cookie) override;
+    sp<StreamOutHalInterfaceCallback> getStreamOutCallback(void* cookie) override;
+    void setStreamOutCallback(void* cookie, const sp<StreamOutHalInterfaceCallback>& cb) override;
+    sp<StreamOutHalInterfaceEventCallback> getStreamOutEventCallback(void* cookie) override;
+    void setStreamOutEventCallback(void* cookie,
+            const sp<StreamOutHalInterfaceEventCallback>& cb) override;
+    sp<StreamOutHalInterfaceLatencyModeCallback> getStreamOutLatencyModeCallback(
+            void* cookie) override;
+    void setStreamOutLatencyModeCallback(
+            void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>& cb) override;
+    // Implementation helpers.
+    template<class C> sp<C> getCallbackImpl(void* cookie, wp<C> Callbacks::* field);
+    template<class C> void setCallbackImpl(void* cookie, wp<C> Callbacks::* field, const sp<C>& cb);
+
+    // MicrophoneInfoProvider implementation
+    MicrophoneInfoProvider::Info const* getMicrophoneInfo() override;
+
+    const std::string mInstance;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp;
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
+    std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose>
+        mSoundDose = nullptr;
+    Ports mPorts;
+    int32_t mDefaultInputPortId = -1;
+    int32_t mDefaultOutputPortId = -1;
+    PortConfigs mPortConfigs;
+    std::set<int32_t> mInitialPortConfigIds;
+    Patches mPatches;
+    Routes mRoutes;
+    RoutingMatrix mRoutingMatrix;
+    Streams mStreams;
+    Microphones mMicrophones;
+    std::mutex mLock;
+    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
+    std::set<audio_port_handle_t> mDeviceDisconnectionNotified;
+};
+
+} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 0cdf621..826461f 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -17,7 +17,7 @@
 #include <stdio.h>
 
 #define LOG_TAG "DeviceHalHidl"
-//#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 
 #include <cutils/native_handle.h>
 #include <cutils/properties.h>
@@ -35,6 +35,17 @@
 #include "ParameterUtils.h"
 #include "StreamHalHidl.h"
 
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+#include <aidl/android/hardware/audio/core/sounddose/BpSoundDose.h>
+#include <aidl/android/hardware/audio/sounddose/BpSoundDoseFactory.h>
+#include <android/binder_manager.h>
+
+constexpr std::string_view kSoundDoseInterfaceModule = "/default";
+
+using aidl::android::hardware::audio::core::sounddose::ISoundDose;
+using aidl::android::hardware::audio::sounddose::ISoundDoseFactory;
+#endif
+
 using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
 using ::android::hardware::audio::common::utils::EnumBitfield;
 using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
@@ -46,11 +57,21 @@
 using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
 using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
-#define TIME_CHECK() auto timeCheck = \
-        mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+class DeviceHalHidl::SoundDoseWrapper {
+public:
+    SoundDoseWrapper() = default;
+    ~SoundDoseWrapper() = default;
+
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    std::shared_ptr<ISoundDoseFactory> mSoundDoseFactory;
+    std::shared_ptr<ISoundDose> mSoundDose;
+#endif
+};
 
 DeviceHalHidl::DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device)
-        : CoreConversionHelperHidl("DeviceHalHidl"), mDevice(device) {
+        : CoreConversionHelperHidl("DeviceHalHidl"),
+          mDevice(device),
+          mSoundDoseWrapper(std::make_unique<DeviceHalHidl::SoundDoseWrapper>()) {
 }
 
 DeviceHalHidl::DeviceHalHidl(
@@ -59,7 +80,8 @@
 #if MAJOR_VERSION <= 6 || (MAJOR_VERSION == 7 && MINOR_VERSION == 0)
           mDevice(device),
 #endif
-          mPrimaryDevice(device) {
+          mPrimaryDevice(device),
+          mSoundDoseWrapper(std::make_unique<DeviceHalHidl::SoundDoseWrapper>()) {
 #if MAJOR_VERSION == 7 && MINOR_VERSION == 1
     auto getDeviceRet = mPrimaryDevice->getDevice();
     if (getDeviceRet.isOk()) {
@@ -82,6 +104,20 @@
     }
 }
 
+status_t DeviceHalHidl::getAudioPorts(
+        std::vector<media::audio::common::AudioPort> *ports __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t DeviceHalHidl::getAudioRoutes(std::vector<media::AudioRoute> *routes __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t DeviceHalHidl::getSupportedModes(
+        std::vector<media::audio::common::AudioMode> *modes __unused) {
+    return INVALID_OPERATION;
+}
+
 status_t DeviceHalHidl::getSupportedDevices(uint32_t*) {
     // Obsolete.
     return INVALID_OPERATION;
@@ -408,6 +444,7 @@
 
 template <typename HalPort>
 status_t DeviceHalHidl::getAudioPortImpl(HalPort *port) {
+    using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPort;
     if (mDevice == 0) return NO_INIT;
     AudioPort hidlPort;
     HidlUtils::audioPortFromHal(*port, &hidlPort);
@@ -450,6 +487,7 @@
 }
 
 status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
+    using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPortConfig;
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioPortConfig hidlConfig;
@@ -459,12 +497,13 @@
 
 #if MAJOR_VERSION == 2
 status_t DeviceHalHidl::getMicrophones(
-        std::vector<media::MicrophoneInfo> *microphonesInfo __unused) {
+        std::vector<audio_microphone_characteristic_t> *microphonesInfo __unused) {
     if (mDevice == 0) return NO_INIT;
     return INVALID_OPERATION;
 }
 #elif MAJOR_VERSION >= 4
-status_t DeviceHalHidl::getMicrophones(std::vector<media::MicrophoneInfo> *microphonesInfo) {
+status_t DeviceHalHidl::getMicrophones(
+        std::vector<audio_microphone_characteristic_t> *microphonesInfo) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
@@ -475,8 +514,7 @@
             audio_microphone_characteristic_t dst;
             //convert
             (void)CoreUtils::microphoneInfoToHal(micArrayHal[k], &dst);
-            media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
-            microphonesInfo->push_back(microphone);
+            microphonesInfo->push_back(dst);
         }
     });
     return processReturn("getMicrophones", ret, retval);
@@ -513,9 +551,29 @@
 }
 #endif
 
+status_t DeviceHalHidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
+    // For HIDL HAL, there is not API to call notify the HAL to prepare for device connected
+    // state changed. Call `setConnectedState` directly.
+    const status_t status = setConnectedState(port, false /*connected*/);
+    if (status == NO_ERROR) {
+        // Cache the port id so that it won't disconnect twice.
+        mDeviceDisconnectionNotified.insert(port->id);
+    }
+    return status;
+}
+
 status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::AudioPort;
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
+    if (!connected && mDeviceDisconnectionNotified.erase(port->id) > 0) {
+        // For device disconnection, APM will first call `prepareToDisconnectExternalDevice` and
+        // then call `setConnectedState`. However, in HIDL HAL, there is no API for
+        // `prepareToDisconnectExternalDevice`. In that case, HIDL HAL will call `setConnectedState`
+        // when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if previous
+        // call is successful. Also remove the cache here to avoid a large cache after a long run.
+        return NO_ERROR;
+    }
 #if MAJOR_VERSION == 7 && MINOR_VERSION == 1
     if (supportsSetConnectedState7_1) {
         AudioPort hidlPort;
@@ -577,4 +635,50 @@
     return processReturn("dump", ret);
 }
 
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+status_t DeviceHalHidl::getSoundDoseInterface(const std::string& module,
+                                              ::ndk::SpAIBinder* soundDoseBinder) {
+    if (mSoundDoseWrapper->mSoundDose != nullptr) {
+        *soundDoseBinder = mSoundDoseWrapper->mSoundDose->asBinder();
+        return OK;
+    }
+
+    if (mSoundDoseWrapper->mSoundDoseFactory == nullptr) {
+        std::string interface =
+            std::string(ISoundDoseFactory::descriptor) + kSoundDoseInterfaceModule.data();
+        AIBinder* binder = AServiceManager_checkService(interface.c_str());
+        if (binder == nullptr) {
+            ALOGW("%s service %s doesn't exist", __func__, interface.c_str());
+            return NO_INIT;
+        }
+        mSoundDoseWrapper->mSoundDoseFactory =
+                ISoundDoseFactory::fromBinder(ndk::SpAIBinder(binder));
+    }
+
+    auto result = mSoundDoseWrapper->mSoundDoseFactory->getSoundDose(
+                        module, &mSoundDoseWrapper->mSoundDose);
+    if (!result.isOk()) {
+        ALOGW("%s could not get sound dose interface: %s", __func__, result.getMessage());
+        return BAD_VALUE;
+    }
+
+    if (mSoundDoseWrapper->mSoundDose == nullptr) {
+        ALOGW("%s standalone sound dose interface is not implemented", __func__);
+        *soundDoseBinder = nullptr;
+        return OK;
+    }
+
+    *soundDoseBinder = mSoundDoseWrapper->mSoundDose->asBinder();
+    ALOGI("%s using standalone sound dose interface", __func__);
+    return OK;
+}
+#else
+status_t DeviceHalHidl::getSoundDoseInterface(const std::string& module,
+                                              ::ndk::SpAIBinder* soundDoseBinder) {
+    (void)module;  // avoid unused param
+    (void)soundDoseBinder;  // avoid unused param
+    return INVALID_OPERATION;
+}
+#endif
+
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index f6519b6..c5addde 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -29,88 +29,81 @@
 class DeviceHalHidl : public DeviceHalInterface, public CoreConversionHelperHidl
 {
   public:
+    status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) override;
+
+    status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) override;
+
+    status_t getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) override;
+
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
-    virtual status_t getSupportedDevices(uint32_t *devices);
+    status_t getSupportedDevices(uint32_t *devices) override;
 
     // Check to see if the audio hardware interface has been initialized.
-    virtual status_t initCheck();
+    status_t initCheck() override;
 
     // Set the audio volume of a voice call. Range is between 0.0 and 1.0.
-    virtual status_t setVoiceVolume(float volume);
+    status_t setVoiceVolume(float volume) override;
 
     // Set the audio volume for all audio activities other than voice call.
-    virtual status_t setMasterVolume(float volume);
+    status_t setMasterVolume(float volume) override;
 
     // Get the current master volume value for the HAL.
-    virtual status_t getMasterVolume(float *volume);
+    status_t getMasterVolume(float *volume) override;
 
     // Called when the audio mode changes.
-    virtual status_t setMode(audio_mode_t mode);
+    status_t setMode(audio_mode_t mode) override;
 
     // Muting control.
-    virtual status_t setMicMute(bool state);
-    virtual status_t getMicMute(bool *state);
-    virtual status_t setMasterMute(bool state);
-    virtual status_t getMasterMute(bool *state);
+    status_t setMicMute(bool state) override;
+    status_t getMicMute(bool *state) override;
+    status_t setMasterMute(bool state) override;
+    status_t getMasterMute(bool *state) override;
 
     // Set global audio parameters.
-    virtual status_t setParameters(const String8& kvPairs);
+    status_t setParameters(const String8& kvPairs) override;
 
     // Get global audio parameters.
-    virtual status_t getParameters(const String8& keys, String8 *values);
+    status_t getParameters(const String8& keys, String8 *values) override;
 
     // Returns audio input buffer size according to parameters passed.
-    virtual status_t getInputBufferSize(const struct audio_config *config,
-            size_t *size);
+    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
-    virtual status_t openOutputStream(
-            audio_io_handle_t handle,
-            audio_devices_t devices,
-            audio_output_flags_t flags,
-            struct audio_config *config,
-            const char *address,
-            sp<StreamOutHalInterface> *outStream);
+    status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
+                              audio_output_flags_t flags, struct audio_config* config,
+                              const char* address, sp<StreamOutHalInterface>* outStream) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
-    virtual status_t openInputStream(
-            audio_io_handle_t handle,
-            audio_devices_t devices,
-            struct audio_config *config,
-            audio_input_flags_t flags,
-            const char *address,
-            audio_source_t source,
-            audio_devices_t outputDevice,
-            const char *outputDeviceAddress,
-            sp<StreamInHalInterface> *inStream);
+    status_t openInputStream(audio_io_handle_t handle, audio_devices_t devices,
+                             struct audio_config* config, audio_input_flags_t flags,
+                             const char* address, audio_source_t source,
+                             audio_devices_t outputDevice, const char* outputDeviceAddress,
+                             sp<StreamInHalInterface>* inStream) override;
 
     // Returns whether createAudioPatch and releaseAudioPatch operations are supported.
-    virtual status_t supportsAudioPatches(bool *supportsPatches);
+    status_t supportsAudioPatches(bool* supportsPatches) override;
 
     // Creates an audio patch between several source and sink ports.
-    virtual status_t createAudioPatch(
-            unsigned int num_sources,
-            const struct audio_port_config *sources,
-            unsigned int num_sinks,
-            const struct audio_port_config *sinks,
-            audio_patch_handle_t *patch);
+    status_t createAudioPatch(unsigned int num_sources, const struct audio_port_config* sources,
+                              unsigned int num_sinks, const struct audio_port_config* sinks,
+                              audio_patch_handle_t* patch) override;
 
     // Releases an audio patch.
-    virtual status_t releaseAudioPatch(audio_patch_handle_t patch);
+    status_t releaseAudioPatch(audio_patch_handle_t patch) override;
 
     // Fills the list of supported attributes for a given audio port.
-    virtual status_t getAudioPort(struct audio_port *port);
+    status_t getAudioPort(struct audio_port *port) override;
 
     // Fills the list of supported attributes for a given audio port.
-    virtual status_t getAudioPort(struct audio_port_v7 *port);
+    status_t getAudioPort(struct audio_port_v7 *port) override;
 
     // Set audio port configuration.
-    virtual status_t setAudioPortConfig(const struct audio_port_config *config);
+    status_t setAudioPortConfig(const struct audio_port_config *config) override;
 
     // List microphones
-    virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+    status_t getMicrophones(std::vector<audio_microphone_characteristic_t>* microphones) override;
 
     status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
     status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
@@ -132,18 +125,36 @@
         return INVALID_OPERATION;
     }
 
+    int32_t supportsBluetoothVariableLatency(bool* supports __unused) override {
+        // TODO: Implement the HAL query when moving to AIDL HAL.
+        return INVALID_OPERATION;
+    }
+
     status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
+    status_t setSimulateDeviceConnections(bool enabled __unused) override {
+        // Only supported by AIDL HALs.
+        return INVALID_OPERATION;
+    }
+
     error::Result<audio_hw_sync_t> getHwAvSync() override;
 
     status_t dump(int fd, const Vector<String16>& args) override;
 
+    status_t getSoundDoseInterface(const std::string& module,
+                                   ::ndk::SpAIBinder* soundDoseBinder) override;
+
+    status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) override;
+
   private:
     friend class DevicesFactoryHalHidl;
     sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
     // Null if it's not a primary device.
     sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice> mPrimaryDevice;
     bool supportsSetConnectedState7_1 = true;
+    class SoundDoseWrapper;
+    const std::unique_ptr<SoundDoseWrapper> mSoundDoseWrapper;
+    std::set<audio_port_handle_t> mDeviceDisconnectionNotified;
 
     // Can not be constructed directly by clients.
     explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
new file mode 100644
index 0000000..8345cd2
--- /dev/null
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <memory>
+
+#define LOG_TAG "DevicesFactoryHalAidl"
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/audio/core/IModule.h>
+#include <android/binder_manager.h>
+#include <binder/IServiceManager.h>
+#include <media/AidlConversionNdkCpp.h>
+#include <media/AidlConversionUtil.h>
+#include <utils/Log.h>
+
+#include "DeviceHalAidl.h"
+#include "DevicesFactoryHalAidl.h"
+
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::core::IConfig;
+using aidl::android::hardware::audio::core::IModule;
+using aidl::android::hardware::audio::core::SurroundSoundConfig;
+using aidl::android::media::audio::common::AudioHalEngineConfig;
+using ::android::detail::AudioHalVersionInfo;
+
+namespace android {
+
+namespace {
+
+ConversionResult<media::SurroundSoundConfig::SurroundFormatFamily>
+ndk2cpp_SurroundSoundConfigFormatFamily(const SurroundSoundConfig::SurroundFormatFamily& ndk) {
+    media::SurroundSoundConfig::SurroundFormatFamily cpp;
+    cpp.primaryFormat = VALUE_OR_RETURN(ndk2cpp_AudioFormatDescription(ndk.primaryFormat));
+    cpp.subFormats = VALUE_OR_RETURN(::aidl::android::convertContainer<std::vector<
+            media::audio::common::AudioFormatDescription>>(ndk.subFormats,
+                    ndk2cpp_AudioFormatDescription));
+    return cpp;
+}
+
+ConversionResult<media::SurroundSoundConfig>
+ndk2cpp_SurroundSoundConfig(const SurroundSoundConfig& ndk) {
+    media::SurroundSoundConfig cpp;
+    cpp.formatFamilies = VALUE_OR_RETURN(::aidl::android::convertContainer<std::vector<
+            media::SurroundSoundConfig::SurroundFormatFamily>>(ndk.formatFamilies,
+                    ndk2cpp_SurroundSoundConfigFormatFamily));
+    return cpp;
+}
+
+}  // namespace
+
+DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> config)
+    : mConfig(std::move(config)) {
+}
+
+status_t DevicesFactoryHalAidl::getDeviceNames(std::vector<std::string> *names) {
+    if (names == nullptr) {
+        return BAD_VALUE;
+    }
+    AServiceManager_forEachDeclaredInstance(IModule::descriptor, static_cast<void*>(names),
+            [](const char* instance, void* context) {
+                if (strcmp(instance, "default") == 0) instance = "primary";
+                static_cast<decltype(names)>(context)->push_back(instance);
+            });
+    return OK;
+}
+
+// Opens a device with the specified name. To close the device, it is
+// necessary to release references to the returned object.
+status_t DevicesFactoryHalAidl::openDevice(const char *name, sp<DeviceHalInterface> *device) {
+    if (name == nullptr || device == nullptr) {
+        return BAD_VALUE;
+    }
+
+    // FIXME: Remove this call and the check for the supported module names
+    // after implementing retrieval of module names on the framework side.
+    // Currently it is still using the legacy XML config.
+    std::vector<std::string> deviceNames;
+    if (status_t status = getDeviceNames(&deviceNames); status != OK) {
+        return status;
+    }
+    std::shared_ptr<IModule> service;
+    if (std::find(deviceNames.begin(), deviceNames.end(), name) != deviceNames.end()) {
+        if (strcmp(name, "primary") == 0) name = "default";
+        auto serviceName = std::string(IModule::descriptor) + "/" + name;
+        service = IModule::fromBinder(
+                ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
+        ALOGE_IF(service == nullptr, "%s fromBinder %s failed", __func__, serviceName.c_str());
+    }
+    // If the service is a nullptr, the device object will not be really functional,
+    // but will not crash either.
+    *device = sp<DeviceHalAidl>::make(name, service);
+    return OK;
+}
+
+status_t DevicesFactoryHalAidl::getHalPids(std::vector<pid_t> *pids) {
+    if (pids == nullptr) {
+        return BAD_VALUE;
+    }
+    // The functionality for retrieving debug infos of services is not exposed via the NDK.
+    sp<IServiceManager> sm = defaultServiceManager();
+    if (sm == nullptr) {
+        return NO_INIT;
+    }
+    std::set<pid_t> pidsSet;
+    const auto moduleServiceName = std::string(IModule::descriptor) + "/";
+    auto debugInfos = sm->getServiceDebugInfo();
+    for (const auto& info : debugInfos) {
+        if (info.pid > 0 &&
+                info.name.size() > moduleServiceName.size() && // '>' as there must be instance name
+                info.name.substr(0, moduleServiceName.size()) == moduleServiceName) {
+            pidsSet.insert(info.pid);
+        }
+    }
+    *pids = {pidsSet.begin(), pidsSet.end()};
+    return NO_ERROR;
+}
+
+status_t DevicesFactoryHalAidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
+    // Dynamic registration of module instances is not supported. The functionality
+    // in the audio server which is related to this callback can be removed together
+    // with HIDL support.
+    ALOG_ASSERT(callback != nullptr);
+    if (callback != nullptr) {
+        callback->onNewDevicesAvailable();
+    }
+    return NO_ERROR;
+}
+
+AudioHalVersionInfo DevicesFactoryHalAidl::getHalVersion() const {
+    int32_t versionNumber = 0;
+    if (ndk::ScopedAStatus status = mConfig->getInterfaceVersion(&versionNumber); !status.isOk()) {
+        ALOGE("%s getInterfaceVersion failed: %s", __func__, status.getDescription().c_str());
+    }
+    // AIDL does not have minor version, fill 0 for all versions
+    return AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, versionNumber);
+}
+
+status_t DevicesFactoryHalAidl::getSurroundSoundConfig(media::SurroundSoundConfig *config) {
+    SurroundSoundConfig ndkConfig;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mConfig->getSurroundSoundConfig(&ndkConfig)));
+    *config = VALUE_OR_RETURN_STATUS(ndk2cpp_SurroundSoundConfig(ndkConfig));
+    return OK;
+}
+
+status_t DevicesFactoryHalAidl::getEngineConfig(
+        media::audio::common::AudioHalEngineConfig *config) {
+    AudioHalEngineConfig ndkConfig;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mConfig->getEngineConfig(&ndkConfig)));
+    *config = VALUE_OR_RETURN_STATUS(ndk2cpp_AudioHalEngineConfig(ndkConfig));
+    return OK;
+}
+
+// Main entry-point to the shared library.
+extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
+    auto serviceName = std::string(IConfig::descriptor) + "/default";
+    auto service = IConfig::fromBinder(
+            ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
+    if (!service) {
+        ALOGE("%s binder service %s not exist", __func__, serviceName.c_str());
+        return nullptr;
+    }
+    return new DevicesFactoryHalAidl(service);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
new file mode 100644
index 0000000..21957bc
--- /dev/null
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/core/IConfig.h>
+#include <media/audiohal/DevicesFactoryHalInterface.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class DevicesFactoryHalAidl : public DevicesFactoryHalInterface
+{
+  public:
+    explicit DevicesFactoryHalAidl(
+            std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> config);
+
+    status_t getDeviceNames(std::vector<std::string> *names) override;
+
+    // Opens a device with the specified name. To close the device, it is
+    // necessary to release references to the returned object.
+    status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
+
+    status_t getHalPids(std::vector<pid_t> *pids) override;
+
+    status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
+
+    android::detail::AudioHalVersionInfo getHalVersion() const override;
+
+    status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) override;
+
+    status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) override;
+
+  private:
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mConfig;
+    ~DevicesFactoryHalAidl() = default;
+};
+
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp b/media/libaudiohal/impl/DevicesFactoryHalEntry.cpp
similarity index 74%
copy from media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
copy to media/libaudiohal/impl/DevicesFactoryHalEntry.cpp
index 2c6f2c6..5bf6d89 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalEntry.cpp
@@ -14,8 +14,10 @@
  * limitations under the License.
  */
 
-extern "C" void* createIEffectsFactoryImpl();
+#include "android/media/AudioHalVersion.h"
 
-extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
-    return createIEffectsFactoryImpl();
+extern "C" void* createIDevicesFactoryImpl();
+
+extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
+    return createIDevicesFactoryImpl();
 }
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 4069a6b..eef60b5 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -29,6 +29,7 @@
 #include "DeviceHalHidl.h"
 #include "DevicesFactoryHalHidl.h"
 
+using ::android::detail::AudioHalVersionInfo;
 using ::android::hardware::audio::CPP_VERSION::IDevice;
 using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
 using ::android::hardware::Return;
@@ -105,6 +106,10 @@
 }
 #endif
 
+status_t DevicesFactoryHalHidl::getDeviceNames(std::vector<std::string> *names __unused) {
+    return INVALID_OPERATION;
+}
+
 status_t DevicesFactoryHalHidl::openDevice(const char *name, sp<DeviceHalInterface> *device) {
     auto factories = copyDeviceFactories();
     if (factories.empty()) return NO_INIT;
@@ -226,8 +231,23 @@
     return mDeviceFactories;
 }
 
+
+AudioHalVersionInfo DevicesFactoryHalHidl::getHalVersion() const {
+    return AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, MAJOR_VERSION, MINOR_VERSION);
+}
+
+status_t DevicesFactoryHalHidl::getSurroundSoundConfig(
+        media::SurroundSoundConfig *config __unused) {
+    return INVALID_OPERATION;
+}
+
+status_t DevicesFactoryHalHidl::getEngineConfig(
+        media::audio::common::AudioHalEngineConfig *config __unused) {
+    return INVALID_OPERATION;
+}
+
 // Main entry-point to the shared library.
-extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
+extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
     auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
     return service ? new DevicesFactoryHalHidl(service) : nullptr;
 }
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index ffd229d..3285af7 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -37,6 +37,8 @@
     explicit DevicesFactoryHalHidl(sp<IDevicesFactory> devicesFactory);
     void onFirstRef() override;
 
+    status_t getDeviceNames(std::vector<std::string> *names) override;
+
     // Opens a device with the specified name. To close the device, it is
     // necessary to release references to the returned object.
     status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
@@ -45,7 +47,11 @@
 
     status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
 
-    float getHalVersion() const override { return MAJOR_VERSION + (float)MINOR_VERSION / 10; }
+    android::detail::AudioHalVersionInfo getHalVersion() const override;
+
+    status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) override;
+
+    status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) override;
 
   private:
     friend class ServiceNotificationListener;
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.cpp b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
new file mode 100644
index 0000000..a701852
--- /dev/null
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <cstdint>
+#include <cstring>
+#include <sys/mman.h>
+#define LOG_TAG "EffectBufferHalAidl"
+//#define LOG_NDEBUG 0
+
+#include <cutils/ashmem.h>
+#include <utils/Log.h>
+
+#include "EffectBufferHalAidl.h"
+
+using ndk::ScopedFileDescriptor;
+
+namespace android {
+namespace effect {
+
+// static
+status_t EffectBufferHalAidl::allocate(size_t size, sp<EffectBufferHalInterface>* buffer) {
+    return mirror(nullptr, size, buffer);
+}
+
+status_t EffectBufferHalAidl::mirror(void* external, size_t size,
+                                     sp<EffectBufferHalInterface>* buffer) {
+    sp<EffectBufferHalAidl> tempBuffer = new EffectBufferHalAidl(size);
+    status_t status = tempBuffer.get()->init();
+    if (status != OK) {
+        ALOGE("%s init failed %d", __func__, status);
+        return status;
+    }
+
+    tempBuffer->setExternalData(external);
+    *buffer = tempBuffer;
+    return OK;
+}
+
+EffectBufferHalAidl::EffectBufferHalAidl(size_t size)
+    : mBufferSize(size),
+      mFrameCountChanged(false),
+      mExternalData(nullptr),
+      mAudioBuffer{0, {nullptr}} {
+}
+
+EffectBufferHalAidl::~EffectBufferHalAidl() {
+}
+
+status_t EffectBufferHalAidl::init() {
+    int fd = ashmem_create_region("audioEffectAidl", mBufferSize);
+    if (fd < 0) {
+        ALOGE("%s create ashmem failed %d", __func__, fd);
+        return fd;
+    }
+
+    ScopedFileDescriptor tempFd(fd);
+    mAudioBuffer.raw = mmap(nullptr /* address */, mBufferSize /* length */, PROT_READ | PROT_WRITE,
+                            MAP_SHARED, fd, 0 /* offset */);
+    if (mAudioBuffer.raw == MAP_FAILED) {
+        ALOGE("mmap failed for fd %d", fd);
+        mAudioBuffer.raw = nullptr;
+        return INVALID_OPERATION;
+    }
+
+    mMemory = {std::move(tempFd), static_cast<int64_t>(mBufferSize)};
+    return OK;
+}
+
+audio_buffer_t* EffectBufferHalAidl::audioBuffer() {
+    return &mAudioBuffer;
+}
+
+void* EffectBufferHalAidl::externalData() const {
+    return mExternalData;
+}
+
+void EffectBufferHalAidl::setFrameCount(size_t frameCount) {
+    mAudioBuffer.frameCount = frameCount;
+    mFrameCountChanged = true;
+}
+
+bool EffectBufferHalAidl::checkFrameCountChange() {
+    bool result = mFrameCountChanged;
+    mFrameCountChanged = false;
+    return result;
+}
+
+void EffectBufferHalAidl::setExternalData(void* external) {
+    mExternalData = external;
+}
+
+void EffectBufferHalAidl::update() {
+    update(mBufferSize);
+}
+
+void EffectBufferHalAidl::commit() {
+    commit(mBufferSize);
+}
+
+void EffectBufferHalAidl::copy(void* dst, const void* src, size_t n) const {
+    if (!dst || !src) {
+        return;
+    }
+    std::memcpy(dst, src, std::min(n, mBufferSize));
+}
+
+void EffectBufferHalAidl::update(size_t n) {
+    copy(mAudioBuffer.raw, mExternalData, n);
+}
+
+void EffectBufferHalAidl::commit(size_t n) {
+    copy(mExternalData, mAudioBuffer.raw, n);
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.h b/media/libaudiohal/impl/EffectBufferHalAidl.h
new file mode 100644
index 0000000..035314b
--- /dev/null
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/common/Ashmem.h>
+
+#include <media/audiohal/EffectBufferHalInterface.h>
+#include <system/audio_effect.h>
+
+namespace android {
+namespace effect {
+
+class EffectBufferHalAidl : public EffectBufferHalInterface {
+  public:
+    static status_t allocate(size_t size, sp<EffectBufferHalInterface>* buffer);
+    static status_t mirror(void* external, size_t size, sp<EffectBufferHalInterface>* buffer);
+
+    audio_buffer_t* audioBuffer() override;
+    void* externalData() const override;
+
+    size_t getSize() const override { return mBufferSize; }
+
+    void setExternalData(void* external) override;
+    void setFrameCount(size_t frameCount) override;
+    bool checkFrameCountChange() override;
+
+    void update() override;
+    void commit() override;
+    void update(size_t size) override;
+    void commit(size_t size) override;
+
+  private:
+    friend class EffectBufferHalInterface;
+
+    // buffer size in bytes
+    const size_t mBufferSize;
+    bool mFrameCountChanged;
+    void* mExternalData;
+    aidl::android::hardware::common::Ashmem mMemory;
+    audio_buffer_t mAudioBuffer;
+
+    // Can not be constructed directly by clients.
+    explicit EffectBufferHalAidl(size_t size);
+
+    ~EffectBufferHalAidl();
+    void copy(void* dst, const void* src, size_t n) const;
+    status_t init();
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
new file mode 100644
index 0000000..52fed91
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -0,0 +1,417 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "EffectConversionHelperAidl"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_visualizer.h>
+
+#include <utils/Log.h>
+
+#include "EffectConversionHelperAidl.h"
+#include "EffectProxy.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioMode;
+using ::aidl::android::media::audio::common::AudioSource;
+using ::android::hardware::EventFlag;
+using android::effect::utils::EffectParamReader;
+using android::effect::utils::EffectParamWriter;
+
+using ::android::status_t;
+
+const std::map<uint32_t /* effect_command_e */, EffectConversionHelperAidl::CommandHandler>
+        EffectConversionHelperAidl::mCommandHandlerMap = {
+                {EFFECT_CMD_INIT, &EffectConversionHelperAidl::handleInit},
+                {EFFECT_CMD_SET_PARAM, &EffectConversionHelperAidl::handleSetParameter},
+                {EFFECT_CMD_GET_PARAM, &EffectConversionHelperAidl::handleGetParameter},
+                {EFFECT_CMD_SET_CONFIG, &EffectConversionHelperAidl::handleSetConfig},
+                {EFFECT_CMD_GET_CONFIG, &EffectConversionHelperAidl::handleGetConfig},
+                {EFFECT_CMD_RESET, &EffectConversionHelperAidl::handleReset},
+                {EFFECT_CMD_ENABLE, &EffectConversionHelperAidl::handleEnable},
+                {EFFECT_CMD_DISABLE, &EffectConversionHelperAidl::handleDisable},
+                {EFFECT_CMD_SET_AUDIO_SOURCE, &EffectConversionHelperAidl::handleSetAudioSource},
+                {EFFECT_CMD_SET_DEVICE, &EffectConversionHelperAidl::handleSetDevice},
+                {EFFECT_CMD_SET_INPUT_DEVICE, &EffectConversionHelperAidl::handleSetDevice},
+                {EFFECT_CMD_SET_VOLUME, &EffectConversionHelperAidl::handleSetVolume},
+                {EFFECT_CMD_OFFLOAD, &EffectConversionHelperAidl::handleSetOffload},
+                // Only visualizer support these commands, reuse of EFFECT_CMD_FIRST_PROPRIETARY
+                {VISUALIZER_CMD_CAPTURE, &EffectConversionHelperAidl::handleVisualizerCapture},
+                {VISUALIZER_CMD_MEASURE, &EffectConversionHelperAidl::handleVisualizerMeasure}};
+
+EffectConversionHelperAidl::EffectConversionHelperAidl(
+        std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+        int32_t sessionId, int32_t ioId, const Descriptor& desc)
+    : mSessionId(sessionId),
+      mIoId(ioId),
+      mDesc(desc),
+      mEffect(std::move(effect)),
+      mIsInputStream(mDesc.common.flags.type == Flags::Type::PRE_PROC),
+      mIsProxyEffect(mDesc.common.id.proxy.has_value() &&
+                     mDesc.common.id.proxy.value() == mDesc.common.id.uuid) {
+    mCommon.session = sessionId;
+    mCommon.ioHandle = ioId;
+    mCommon.input = mCommon.output = kDefaultAudioConfig;
+}
+
+status_t EffectConversionHelperAidl::handleCommand(uint32_t cmdCode, uint32_t cmdSize,
+                                                   void* pCmdData, uint32_t* replySize,
+                                                   void* pReplyData) {
+    const auto& handler = mCommandHandlerMap.find(cmdCode);
+    if (handler == mCommandHandlerMap.end() || !handler->second) {
+        ALOGE("%s handler for command %u doesn't exist", __func__, cmdCode);
+        return BAD_VALUE;
+    }
+    return (this->*handler->second)(cmdSize, pCmdData, replySize, pReplyData);
+}
+
+status_t EffectConversionHelperAidl::handleInit(uint32_t cmdSize __unused,
+                                                const void* pCmdData __unused, uint32_t* replySize,
+                                                void* pReplyData) {
+    if (!replySize || *replySize < sizeof(int) || !pReplyData) {
+        return BAD_VALUE;
+    }
+
+    // Do nothing for EFFECT_CMD_INIT, call IEffect.open() with EFFECT_CMD_SET_CONFIG
+    return *(status_t*)pReplyData = OK;
+}
+
+status_t EffectConversionHelperAidl::handleSetParameter(uint32_t cmdSize, const void* pCmdData,
+                                                        uint32_t* replySize, void* pReplyData) {
+    if (cmdSize < sizeof(effect_param_t) || !pCmdData || !replySize ||
+        *replySize < sizeof(int) || !pReplyData) {
+        return BAD_VALUE;
+    }
+
+    auto reader = EffectParamReader(*(effect_param_t*)pCmdData);
+    if (!reader.validateCmdSize(cmdSize)) {
+        ALOGE("%s illegal param %s size %u", __func__, reader.toString().c_str(), cmdSize);
+        return BAD_VALUE;
+    }
+
+    status_t ret = setParameter(reader);
+    EffectParamWriter writer(*(effect_param_t*)pReplyData);
+    writer.setStatus(ret);
+    return *(status_t*)pReplyData = ret;
+}
+
+status_t EffectConversionHelperAidl::handleGetParameter(uint32_t cmdSize, const void* pCmdData,
+                                                        uint32_t* replySize, void* pReplyData) {
+    if (cmdSize < sizeof(effect_param_t) || !pCmdData || !replySize || !pReplyData) {
+        ALOGE("%s illegal cmdSize %u pCmdData %p replySize %p replyData %p", __func__, cmdSize,
+              pCmdData, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    const auto reader = EffectParamReader(*(effect_param_t*)pCmdData);
+    if (*replySize < sizeof(effect_param_t) + reader.getParameterSize()) {
+        ALOGE("%s illegal param %s, replySize %u", __func__, reader.toString().c_str(), *replySize);
+        return BAD_VALUE;
+    }
+
+    // copy effect_param_t and parameters
+    memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + reader.getParameterSize());
+    auto writer = EffectParamWriter(*(effect_param_t*)pReplyData);
+    status_t ret = getParameter(writer);
+    writer.finishValueWrite();
+    writer.setStatus(ret);
+    *replySize = writer.getTotalSize();
+    if (ret != OK) {
+        ALOGE("%s error ret %d, %s", __func__, ret, writer.toString().c_str());
+    }
+    return ret;
+}
+
+status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize, const void* pCmdData,
+                                                     uint32_t* replySize, void* pReplyData) {
+    if (!replySize || *replySize != sizeof(int) || !pReplyData ||
+        cmdSize != sizeof(effect_config_t)) {
+        ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+              pReplyData);
+        return BAD_VALUE;
+    }
+
+    effect_config_t* config = (effect_config_t*)pCmdData;
+    Parameter::Common common = {
+            .input =
+                    VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+                            config->inputCfg, mIsInputStream)),
+            .output =
+                    VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+                            config->outputCfg, mIsInputStream)),
+            .session = mCommon.session,
+            .ioHandle = mCommon.ioHandle};
+
+    State state;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+    // in case of buffer/ioHandle re-configure for an opened effect, close it and re-open
+    if (state != State::INIT && mCommon != common) {
+        ALOGI("%s at state %s, closing effect", __func__,
+              android::internal::ToString(state).c_str());
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->close()));
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+        mStatusQ.reset();
+        mInputQ.reset();
+        mOutputQ.reset();
+    }
+
+    if (state == State::INIT) {
+        ALOGI("%s at state %s, opening effect with input %s output %s", __func__,
+              android::internal::ToString(state).c_str(), common.input.toString().c_str(),
+              common.output.toString().c_str());
+        IEffect::OpenEffectReturn openReturn;
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
+
+        if (mIsProxyEffect) {
+            const auto& ret =
+                    std::static_pointer_cast<EffectProxy>(mEffect)->getEffectReturnParam();
+            mStatusQ = std::make_shared<StatusMQ>(ret->statusMQ);
+            mInputQ = std::make_shared<DataMQ>(ret->inputDataMQ);
+            mOutputQ = std::make_shared<DataMQ>(ret->outputDataMQ);
+        } else {
+            mStatusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
+            mInputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
+            mOutputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
+        }
+
+        if (status_t status = updateEventFlags(); status != OK) {
+            mEffect->close();
+            return status;
+        }
+        mCommon = common;
+    } else if (mCommon != common) {
+        ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
+        Parameter aidlParam = UNION_MAKE(Parameter, common, mCommon);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+    }
+
+    return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleGetConfig(uint32_t cmdSize __unused,
+                                                     const void* pCmdData __unused,
+                                                     uint32_t* replySize, void* pReplyData) {
+    if (!replySize || *replySize != sizeof(effect_config_t) || !pReplyData) {
+        ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    Parameter param;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(
+            Parameter::Id::make<Parameter::Id::commonTag>(Parameter::common), &param)));
+
+    const auto& common = param.get<Parameter::common>();
+    effect_config_t* pConfig = (effect_config_t*)pReplyData;
+    pConfig->inputCfg = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.input, true));
+    pConfig->outputCfg = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.output, false));
+    return OK;
+}
+
+status_t EffectConversionHelperAidl::handleReset(uint32_t cmdSize __unused,
+                                                 const void* pCmdData __unused, uint32_t* replySize,
+                                                 void* pReplyData) {
+    if (!replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    return statusTFromBinderStatus(mEffect->command(CommandId::RESET));
+}
+
+status_t EffectConversionHelperAidl::handleEnable(uint32_t cmdSize __unused,
+                                                  const void* pCmdData __unused,
+                                                  uint32_t* replySize, void* pReplyData) {
+    if (!replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    return statusTFromBinderStatus(mEffect->command(CommandId::START));
+}
+
+status_t EffectConversionHelperAidl::handleDisable(uint32_t cmdSize __unused,
+                                                   const void* pCmdData __unused,
+                                                   uint32_t* replySize, void* pReplyData) {
+    if (!replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    return statusTFromBinderStatus(mEffect->command(CommandId::STOP));
+}
+
+status_t EffectConversionHelperAidl::handleSetAudioSource(uint32_t cmdSize, const void* pCmdData,
+                                                          uint32_t* replySize, void* pReplyData) {
+    if (cmdSize != sizeof(uint32_t) || !pCmdData || !replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+              pReplyData);
+        return BAD_VALUE;
+    }
+
+    audio_source_t source = *(audio_source_t*)pCmdData;
+    AudioSource aidlSource =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_source_t_AudioSource(source));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mEffect->setParameter(Parameter::make<Parameter::source>(aidlSource))));
+    return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleSetAudioMode(uint32_t cmdSize, const void* pCmdData,
+                                                        uint32_t* replySize, void* pReplyData) {
+    if (cmdSize != sizeof(uint32_t) || !pCmdData || !replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+              pReplyData);
+        return BAD_VALUE;
+    }
+    audio_mode_t mode = *(audio_mode_t *)pCmdData;
+    AudioMode aidlMode =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mEffect->setParameter(Parameter::make<Parameter::mode>(aidlMode))));
+    return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleSetDevice(uint32_t cmdSize, const void* pCmdData,
+                                                     uint32_t* replySize, void* pReplyData) {
+    if (cmdSize != sizeof(uint32_t) || !pCmdData || !replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+              pReplyData);
+        return BAD_VALUE;
+    }
+    // TODO: convert from audio_devices_t to std::vector<AudioDeviceDescription>
+    // const auto& legacyDevice = *(uint32_t*)(pCmdData);
+    std::vector<AudioDeviceDescription> aidlDevices;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mEffect->setParameter(Parameter::make<Parameter::deviceDescription>(aidlDevices))));
+    return *static_cast<int32_t*>(pReplyData) = OK;
+}
+status_t EffectConversionHelperAidl::handleSetVolume(uint32_t cmdSize, const void* pCmdData,
+                                                     uint32_t* replySize __unused,
+                                                     void* pReplyData __unused) {
+    if (cmdSize != 2 * sizeof(uint32_t) || !pCmdData) {
+        ALOGE("%s parameter invalid %u %p", __func__, cmdSize, pCmdData);
+        return BAD_VALUE;
+    }
+    Parameter::VolumeStereo volume = {.left = (float)(*(uint32_t*)pCmdData) / (1 << 24),
+                                      .right = (float)(*(uint32_t*)pCmdData + 1) / (1 << 24)};
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            mEffect->setParameter(Parameter::make<Parameter::volumeStereo>(volume))));
+    return OK;
+}
+
+status_t EffectConversionHelperAidl::handleSetOffload(uint32_t cmdSize, const void* pCmdData,
+                                                      uint32_t* replySize, void* pReplyData) {
+    if (cmdSize < sizeof(effect_offload_param_t) || !pCmdData || !replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+              pReplyData);
+        return BAD_VALUE;
+    }
+    effect_offload_param_t* offload = (effect_offload_param_t*)pCmdData;
+    // send to proxy to update active sub-effect
+    if (mIsProxyEffect) {
+        ALOGI("%s offload param offload %s ioHandle %d", __func__,
+              offload->isOffload ? "true" : "false", offload->ioHandle);
+        mCommon.ioHandle = offload->ioHandle;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                std::static_pointer_cast<EffectProxy>(mEffect)->setOffloadParam(offload)));
+        // update FMQs
+        const auto& ret = std::static_pointer_cast<EffectProxy>(mEffect)->getEffectReturnParam();
+        mStatusQ = std::make_shared<StatusMQ>(ret->statusMQ);
+        mInputQ = std::make_shared<DataMQ>(ret->inputDataMQ);
+        mOutputQ = std::make_shared<DataMQ>(ret->outputDataMQ);
+        RETURN_STATUS_IF_ERROR(updateEventFlags());
+    }
+    return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleVisualizerCapture(uint32_t cmdSize __unused,
+                                                             const void* pCmdData __unused,
+                                                             uint32_t* replySize,
+                                                             void* pReplyData) {
+    if (!replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    const auto& uuid = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioUuid_audio_uuid_t(mDesc.common.id.type));
+    if (0 != memcmp(&uuid, SL_IID_VISUALIZATION, sizeof(effect_uuid_t))) {
+        ALOGE("%s visualizer command not supported by %s", __func__,
+              mDesc.common.id.toString().c_str());
+        return BAD_VALUE;
+    }
+
+    return visualizerCapture(replySize, pReplyData);
+}
+
+status_t EffectConversionHelperAidl::handleVisualizerMeasure(uint32_t cmdSize __unused,
+                                                             const void* pCmdData __unused,
+                                                             uint32_t* replySize,
+                                                             void* pReplyData) {
+    if (!replySize || !pReplyData) {
+        ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    const auto& uuid = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioUuid_audio_uuid_t(mDesc.common.id.type));
+    if (0 != memcmp(&uuid, SL_IID_VISUALIZATION, sizeof(effect_uuid_t))) {
+        ALOGE("%s visualizer command not supported by %s", __func__,
+              mDesc.common.id.toString().c_str());
+        return BAD_VALUE;
+    }
+
+    return visualizerMeasure(replySize, pReplyData);
+}
+
+status_t EffectConversionHelperAidl::updateEventFlags() {
+    status_t status = BAD_VALUE;
+    EventFlag* efGroup = nullptr;
+    if (mStatusQ->isValid()) {
+        status = EventFlag::createEventFlag(mStatusQ->getEventFlagWord(), &efGroup);
+        if (status != OK || !efGroup) {
+            ALOGE("%s: create EventFlagGroup failed, ret %d, egGroup %p", __func__, status,
+                  efGroup);
+            status = (status == OK) ? BAD_VALUE : status;
+        }
+    }
+    mEfGroup.reset(efGroup, EventFlagDeleter());
+    return status;
+}
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
new file mode 100644
index 0000000..0c682ff
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/Errors.h>
+
+#include <aidl/android/hardware/audio/effect/BpEffect.h>
+#include <fmq/AidlMessageQueue.h>
+#include <system/audio_effect.h>
+#include <system/audio_effects/audio_effects_utils.h>
+
+namespace android {
+namespace effect {
+
+class EffectConversionHelperAidl {
+  public:
+    status_t handleCommand(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData, uint32_t* replySize,
+                           void* pReplyData);
+    virtual ~EffectConversionHelperAidl() {}
+
+    using StatusMQ = ::android::AidlMessageQueue<
+            ::aidl::android::hardware::audio::effect::IEffect::Status,
+            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    using DataMQ = ::android::AidlMessageQueue<
+            float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+    std::shared_ptr<StatusMQ> getStatusMQ() { return mStatusQ; }
+    std::shared_ptr<DataMQ> getInputMQ() { return mInputQ; }
+    std::shared_ptr<DataMQ> getOutputMQ() { return mOutputQ; }
+    std::shared_ptr<android::hardware::EventFlag> getEventFlagGroup() { return mEfGroup; }
+
+  protected:
+    const int32_t mSessionId;
+    const int32_t mIoId;
+    const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
+    const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
+    // whether the effect is instantiated on an input stream
+    const bool mIsInputStream;
+    ::aidl::android::hardware::audio::effect::Parameter::Common mCommon;
+
+    EffectConversionHelperAidl(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+
+    status_t handleSetParameter(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                void* pReplyData);
+    status_t handleGetParameter(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                void* pReplyData);
+
+  private:
+    const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
+            .type = aidl::android::media::audio::common::AudioFormatType::PCM,
+            .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
+    const bool mIsProxyEffect;
+
+    static constexpr int kDefaultframeCount = 0x100;
+
+    using AudioChannelLayout = aidl::android::media::audio::common::AudioChannelLayout;
+    const aidl::android::media::audio::common::AudioConfig kDefaultAudioConfig = {
+            .base = {.sampleRate = 44100,
+                     .channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                             AudioChannelLayout::LAYOUT_STEREO),
+                     .format = kDefaultFormatDescription},
+            .frameCount = kDefaultframeCount};
+    // command handler map
+    typedef status_t (EffectConversionHelperAidl::*CommandHandler)(uint32_t /* cmdSize */,
+                                                                   const void* /* pCmdData */,
+                                                                   uint32_t* /* replySize */,
+                                                                   void* /* pReplyData */);
+    static const std::map<uint32_t /* effect_command_e */, CommandHandler> mCommandHandlerMap;
+    // data and status FMQ
+    std::shared_ptr<StatusMQ> mStatusQ = nullptr;
+    std::shared_ptr<DataMQ> mInputQ = nullptr, mOutputQ = nullptr;
+
+
+    struct EventFlagDeleter {
+        void operator()(::android::hardware::EventFlag* flag) const {
+            if (flag) {
+                ::android::hardware::EventFlag::deleteEventFlag(&flag);
+            }
+        }
+    };
+    std::shared_ptr<android::hardware::EventFlag> mEfGroup = nullptr;
+    status_t updateEventFlags();
+
+    status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                        void* pReplyData);
+    status_t handleSetConfig(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                             void* pReplyData);
+    status_t handleGetConfig(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                             void* pReplyData);
+    status_t handleEnable(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                          void* pReplyData);
+    status_t handleDisable(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                           void* pReplyData);
+    status_t handleReset(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                         void* pReplyData);
+    status_t handleSetAudioSource(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                  void* pReplyData);
+    status_t handleSetAudioMode(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                void* pReplyData);
+    status_t handleSetDevice(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                             void* pReplyData);
+    status_t handleSetVolume(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                             void* pReplyData);
+    status_t handleSetOffload(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                              void* pReplyData);
+    status_t handleVisualizerCapture(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                     void* pReplyData);
+    status_t handleVisualizerMeasure(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+                                     void* pReplyData);
+
+    // implemented by conversion of each effect
+    virtual status_t setParameter(utils::EffectParamReader& param) = 0;
+    virtual status_t getParameter(utils::EffectParamWriter& param) = 0;
+    virtual status_t visualizerCapture(uint32_t* replySize __unused, void* pReplyData __unused) {
+        return BAD_VALUE;
+    }
+    virtual status_t visualizerMeasure(uint32_t* replySize __unused, void* pReplyData __unused) {
+        return BAD_VALUE;
+    }
+
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
new file mode 100644
index 0000000..faf5f45
--- /dev/null
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -0,0 +1,262 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#define LOG_TAG "EffectHalAidl"
+//#define LOG_NDEBUG 0
+
+#include <memory>
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <media/AidlConversionUtil.h>
+#include <media/EffectsFactoryApi.h>
+#include <mediautils/TimeCheck.h>
+#include <system/audio.h>
+#include <system/audio_effects/effect_uuid.h>
+#include <utils/Log.h>
+
+#include "EffectHalAidl.h"
+#include "EffectProxy.h"
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+
+#include "effectsAidlConversion/AidlConversionAec.h"
+#include "effectsAidlConversion/AidlConversionAgc1.h"
+#include "effectsAidlConversion/AidlConversionAgc2.h"
+#include "effectsAidlConversion/AidlConversionBassBoost.h"
+#include "effectsAidlConversion/AidlConversionDownmix.h"
+#include "effectsAidlConversion/AidlConversionDynamicsProcessing.h"
+#include "effectsAidlConversion/AidlConversionEnvReverb.h"
+#include "effectsAidlConversion/AidlConversionEq.h"
+#include "effectsAidlConversion/AidlConversionHapticGenerator.h"
+#include "effectsAidlConversion/AidlConversionLoudnessEnhancer.h"
+#include "effectsAidlConversion/AidlConversionNoiseSuppression.h"
+#include "effectsAidlConversion/AidlConversionPresetReverb.h"
+#include "effectsAidlConversion/AidlConversionSpatializer.h"
+#include "effectsAidlConversion/AidlConversionVendorExtension.h"
+#include "effectsAidlConversion/AidlConversionVirtualizer.h"
+#include "effectsAidlConversion/AidlConversionVisualizer.h"
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+
+namespace android {
+namespace effect {
+
+EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
+                             const std::shared_ptr<IEffect>& effect, uint64_t effectId,
+                             int32_t sessionId, int32_t ioId, const Descriptor& desc,
+                             bool isProxyEffect)
+    : mFactory(factory),
+      mEffect(effect),
+      mEffectId(effectId),
+      mSessionId(sessionId),
+      mIoId(ioId),
+      mDesc(desc),
+      mIsProxyEffect(isProxyEffect) {
+    createAidlConversion(effect, sessionId, ioId, desc);
+}
+
+EffectHalAidl::~EffectHalAidl() {
+    if (mEffect) {
+        mIsProxyEffect ? std::static_pointer_cast<EffectProxy>(mEffect)->destroy()
+                       : mFactory->destroyEffect(mEffect);
+    }
+}
+
+status_t EffectHalAidl::createAidlConversion(
+        std::shared_ptr<IEffect> effect,
+        int32_t sessionId, int32_t ioId,
+        const Descriptor& desc) {
+    const auto& typeUuid = desc.common.id.type;
+    ALOGI("%s create UUID %s", __func__, typeUuid.toString().c_str());
+    if (typeUuid ==
+        ::aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler()) {
+        mConversion =
+                std::make_unique<android::effect::AidlConversionAec>(effect, sessionId, ioId, desc);
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::
+                                   getEffectTypeUuidAutomaticGainControlV1()) {
+        mConversion = std::make_unique<android::effect::AidlConversionAgc1>(effect, sessionId, ioId,
+                                                                            desc);
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::
+                                   getEffectTypeUuidAutomaticGainControlV2()) {
+        mConversion = std::make_unique<android::effect::AidlConversionAgc2>(effect, sessionId, ioId,
+                                                                            desc);
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost()) {
+        mConversion = std::make_unique<android::effect::AidlConversionBassBoost>(effect, sessionId,
+                                                                                 ioId, desc);
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix()) {
+        mConversion = std::make_unique<android::effect::AidlConversionDownmix>(effect, sessionId,
+                                                                               ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing()) {
+        mConversion =
+                std::make_unique<android::effect::AidlConversionDp>(effect, sessionId, ioId, desc);
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb()) {
+        mConversion = std::make_unique<android::effect::AidlConversionEnvReverb>(effect, sessionId,
+                                                                                 ioId, desc);
+    } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer()) {
+        mConversion =
+                std::make_unique<android::effect::AidlConversionEq>(effect, sessionId, ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator()) {
+        mConversion = std::make_unique<android::effect::AidlConversionHapticGenerator>(
+                effect, sessionId, ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer()) {
+        mConversion = std::make_unique<android::effect::AidlConversionLoudnessEnhancer>(
+                effect, sessionId, ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression()) {
+        mConversion = std::make_unique<android::effect::AidlConversionNoiseSuppression>(
+                effect, sessionId, ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb()) {
+        mConversion = std::make_unique<android::effect::AidlConversionPresetReverb>(
+                effect, sessionId, ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer()) {
+        mConversion = std::make_unique<android::effect::AidlConversionSpatializer>(
+                effect, sessionId, ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer()) {
+        mConversion = std::make_unique<android::effect::AidlConversionVirtualizer>(
+                effect, sessionId, ioId, desc);
+    } else if (typeUuid ==
+               ::aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer()) {
+        mConversion = std::make_unique<android::effect::AidlConversionVisualizer>(effect, sessionId,
+                                                                                  ioId, desc);
+    } else {
+        // For unknown UUID, use vendor extension implementation
+        mConversion = std::make_unique<android::effect::AidlConversionVendorExtension>(
+                effect, sessionId, ioId, desc);
+    }
+    return OK;
+}
+
+status_t EffectHalAidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    mInBuffer = buffer;
+    return OK;
+}
+
+status_t EffectHalAidl::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    mOutBuffer = buffer;
+    return OK;
+}
+
+// write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
+status_t EffectHalAidl::process() {
+    auto statusQ = mConversion->getStatusMQ();
+    auto inputQ = mConversion->getInputMQ();
+    auto outputQ = mConversion->getOutputMQ();
+    auto efGroup = mConversion->getEventFlagGroup();
+    if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
+        !outputQ->isValid() || !efGroup) {
+        ALOGE("%s invalid FMQ [Status %d I %d O %d] efGroup %p", __func__,
+              statusQ ? statusQ->isValid() : 0, inputQ ? inputQ->isValid() : 0,
+              outputQ ? outputQ->isValid() : 0, efGroup.get());
+        return INVALID_OPERATION;
+    }
+
+    size_t available = inputQ->availableToWrite();
+    size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
+    if (floatsToWrite == 0) {
+        ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
+              mInBuffer->getSize() / sizeof(float), available);
+        return INVALID_OPERATION;
+    }
+    if (!mInBuffer->audioBuffer() ||
+        !inputQ->write((float*)mInBuffer->audioBuffer()->f32, floatsToWrite)) {
+        ALOGE("%s failed to write %zu floats from audiobuffer %p to inputQ [avail %zu]", __func__,
+              floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
+        return INVALID_OPERATION;
+    }
+    efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
+
+    IEffect::Status retStatus{};
+    if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
+        (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
+        ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
+        return INVALID_OPERATION;
+    }
+
+    available = outputQ->availableToRead();
+    size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
+    if (floatsToRead == 0) {
+        ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
+              mOutBuffer->getSize() / sizeof(float), available);
+        return INVALID_OPERATION;
+    }
+    // always read floating point data for AIDL
+    if (!mOutBuffer->audioBuffer() ||
+        !outputQ->read(mOutBuffer->audioBuffer()->f32, floatsToRead)) {
+        ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
+              mOutBuffer->audioBuffer());
+        return INVALID_OPERATION;
+    }
+
+    ALOGD("%s %s consumed %zu produced %zu", __func__, mDesc.common.name.c_str(), floatsToWrite,
+          floatsToRead);
+    return OK;
+}
+
+// TODO: no one using, maybe deprecate this interface
+status_t EffectHalAidl::processReverse() {
+    ALOGW("%s not implemented yet", __func__);
+    return OK;
+}
+
+status_t EffectHalAidl::command(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
+                                uint32_t* replySize, void* pReplyData) {
+    TIME_CHECK();
+    if (!mConversion) {
+        ALOGE("%s can not handle command %d when conversion not exist", __func__, cmdCode);
+        return INVALID_OPERATION;
+    }
+
+    return mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+}
+
+status_t EffectHalAidl::getDescriptor(effect_descriptor_t* pDescriptor) {
+    TIME_CHECK();
+    if (pDescriptor == nullptr) {
+        ALOGE("%s null descriptor pointer", __func__);
+        return BAD_VALUE;
+    }
+    Descriptor aidlDesc;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getDescriptor(&aidlDesc)));
+
+    *pDescriptor = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(aidlDesc));
+    return OK;
+}
+
+status_t EffectHalAidl::close() {
+    TIME_CHECK();
+    return statusTFromBinderStatus(mEffect->close());
+}
+
+status_t EffectHalAidl::dump(int fd) {
+    TIME_CHECK();
+    return mEffect->dump(fd, nullptr, 0);
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
new file mode 100644
index 0000000..47049d7
--- /dev/null
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <fmq/AidlMessageQueue.h>
+#include <media/audiohal/EffectHalInterface.h>
+#include <system/audio_effect.h>
+#include <system/audio_effects/aidl_effects_utils.h>
+
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class EffectHalAidl : public EffectHalInterface {
+  public:
+
+    // Set the input buffer.
+    status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer) override;
+
+    // Set the output buffer.
+    status_t setOutBuffer(const sp<EffectBufferHalInterface>& buffer) override;
+
+    // Effect process function.
+    status_t process() override;
+
+    // Process reverse stream function. This function is used to pass
+    // a reference stream to the effect engine.
+    status_t processReverse() override;
+
+    // Send a command and receive a response to/from effect engine.
+    status_t command(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData, uint32_t* replySize,
+                     void* pReplyData) override;
+
+    // Returns the effect descriptor.
+    status_t getDescriptor(effect_descriptor_t *pDescriptor) override;
+
+    // Free resources on the remote side.
+    status_t close() override;
+
+    // Whether it's a local implementation.
+    bool isLocal() const override { return false; }
+
+    status_t dump(int fd) override;
+
+    uint64_t effectId() const override { return mEffectId; }
+
+    const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> getIEffect() const {
+        return mEffect;
+    }
+
+    // for TIME_CHECK
+    const std::string getClassName() const { return "EffectHalAidl"; }
+
+  private:
+    friend class sp<EffectHalAidl>;
+
+    const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory> mFactory;
+    const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
+    const uint64_t mEffectId;
+    const int32_t mSessionId;
+    const int32_t mIoId;
+    const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
+    const bool mIsProxyEffect;
+
+    std::unique_ptr<EffectConversionHelperAidl> mConversion;
+
+    sp<EffectBufferHalInterface> mInBuffer, mOutBuffer;
+
+    status_t createAidlConversion(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+    // Can not be constructed directly by clients.
+    EffectHalAidl(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
+            uint64_t effectId, int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+            bool isProxyEffect);
+    bool setEffectReverse(bool reverse);
+    bool needUpdateReturnParam(uint32_t cmdCode);
+
+    // The destructor automatically releases the effect.
+    virtual ~EffectHalAidl();
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index fdfe225..7ecdbd2 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -17,11 +17,16 @@
 #define LOG_TAG "EffectHalHidl"
 //#define LOG_NDEBUG 0
 
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <android-base/stringprintf.h>
 #include <common/all-versions/VersionUtils.h>
 #include <cutils/native_handle.h>
+#include <cutils/properties.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/EffectsFactoryApi.h>
+#include <mediautils/SchedulingPolicyService.h>
 #include <mediautils/TimeCheck.h>
+#include <system/audio_effects/effect_spatializer.h>
 #include <utils/Log.h>
 
 #include <util/EffectUtils.h>
@@ -41,15 +46,24 @@
 using namespace ::android::hardware::audio::common::CPP_VERSION;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
-#define TIME_CHECK() auto timeCheck = \
-        mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
-
 EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
         : EffectConversionHelperHidl("EffectHalHidl"),
           mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
     effect_descriptor_t halDescriptor{};
     if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
         mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
+        const bool isSpatializer =
+                memcmp(&halDescriptor.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0;
+        if (isSpatializer) {
+            constexpr int32_t kRTPriorityMin = 1;
+            constexpr int32_t kRTPriorityMax = 3;
+            const int32_t priorityBoost = property_get_int32("audio.spatializer.priority", 1);
+            if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+                ALOGD("%s: audio.spatializer.priority %d on effect %lld",
+                         __func__, priorityBoost, (long long)effectId);
+                mHalThreadPriority = priorityBoost;
+            }
+        }
     }
 }
 
@@ -93,13 +107,13 @@
 }
 
 status_t EffectHalHidl::process() {
-    TIME_CHECK();
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
 
     return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS));
 }
 
 status_t EffectHalHidl::processReverse() {
-    TIME_CHECK();
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
 
     return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS_REVERSE));
 }
@@ -127,6 +141,8 @@
         ALOGE_IF(!mEfGroup, "Event flag creation for effects failed");
         return NO_INIT;
     }
+
+    (void)checkHalThreadPriority();
     mStatusMQ = std::move(tempStatusMQ);
     return OK;
 }
@@ -317,5 +333,67 @@
     return result;
 }
 
+status_t EffectHalHidl::getHalPid(pid_t *pid) const {
+    using ::android::hidl::base::V1_0::DebugInfo;
+    using ::android::hidl::manager::V1_0::IServiceManager;
+    DebugInfo debugInfo;
+    const auto ret = mEffect->getDebugInfo([&] (const auto &info) {
+        debugInfo = info;
+    });
+    if (!ret.isOk()) {
+        ALOGW("%s: cannot get effect debug info", __func__);
+        return INVALID_OPERATION;
+    }
+    if (debugInfo.pid != (int)IServiceManager::PidConstant::NO_PID) {
+        *pid = debugInfo.pid;
+        return NO_ERROR;
+    }
+    ALOGW("%s: effect debug info does not contain pid", __func__);
+    return NAME_NOT_FOUND;
+}
+
+status_t EffectHalHidl::getHalWorkerTid(pid_t *tid) {
+    int32_t reply = -1;
+    uint32_t replySize = sizeof(reply);
+    const status_t status =
+            command('gtid', 0 /* cmdSize */, nullptr /* pCmdData */, &replySize, &reply);
+    if (status == OK) {
+        *tid = (pid_t)reply;
+    } else {
+        ALOGW("%s: failed with status:%d", __func__, status);
+    }
+    return status;
+}
+
+bool EffectHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
+    if (mHalThreadPriority == kRTPriorityDisabled) {
+        return true;
+    }
+    const int err = requestPriority(
+            threadPid, threadId,
+            mHalThreadPriority, false /*isForApp*/, true /*asynchronous*/);
+    ALOGW_IF(err, "%s: failed to set RT priority %d for pid %d tid %d; error %d",
+            __func__, mHalThreadPriority, threadPid, threadId, err);
+    // Audio will still work, but may be more susceptible to glitches.
+    return err == 0;
+}
+
+status_t EffectHalHidl::checkHalThreadPriority() {
+    if (mHalThreadPriority == kRTPriorityDisabled) return OK;
+    if (mHalThreadPriority < kRTPriorityMin
+            || mHalThreadPriority > kRTPriorityMax) return BAD_VALUE;
+
+    pid_t halPid, halWorkerTid;
+    const status_t status = getHalPid(&halPid) ?: getHalWorkerTid(&halWorkerTid);
+    const bool success = status == OK && requestHalThreadPriority(halPid, halWorkerTid);
+    ALOGD("%s: effectId %lld RT priority(%d) request %s%s",
+            __func__, (long long)mEffectId, mHalThreadPriority,
+            success ? "succeeded" : "failed",
+            status == OK
+                    ? base::StringPrintf(" for pid:%d tid:%d", halPid, halWorkerTid).c_str()
+                    : " (pid / tid cannot be read)");
+    return success ? OK : status != OK ? status : INVALID_OPERATION /* request failed */;
+}
+
 } // namespace effect
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index e139768..94dcd7e 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -78,6 +78,11 @@
     std::unique_ptr<StatusMQ> mStatusMQ;
     EventFlag* mEfGroup;
     bool mIsInput = false;
+    static constexpr int32_t kRTPriorityMin = 1;
+    static constexpr int32_t kRTPriorityMax = 3;
+    static constexpr int kRTPriorityDisabled = 0;
+    // Typical RealTime mHalThreadPriority ranges from 1 (low) to 3 (high).
+    int mHalThreadPriority = kRTPriorityDisabled;
 
     // Can not be constructed directly by clients.
     EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
@@ -93,6 +98,10 @@
             uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
             uint32_t *replySize, void *pReplyData);
     status_t setProcessBuffers();
+    status_t getHalPid(pid_t *pid) const;
+    status_t getHalWorkerTid(pid_t *tid);
+    bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
+    status_t checkHalThreadPriority();
 };
 
 } // namespace effect
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
new file mode 100644
index 0000000..b61532d
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -0,0 +1,291 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <memory>
+#define LOG_TAG "EffectProxy"
+//#define LOG_NDEBUG 0
+
+#include <fmq/AidlMessageQueue.h>
+#include <utils/Log.h>
+
+#include "EffectProxy.h"
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioUuid;
+
+namespace android {
+namespace effect {
+
+EffectProxy::EffectProxy(const Descriptor::Identity& id, const std::shared_ptr<IFactory>& factory)
+    : mIdentity([](const Descriptor::Identity& subId) {
+          // update EffectProxy implementation UUID to the sub-effect proxy UUID
+          ALOG_ASSERT(subId.proxy.has_value(), "Sub-effect Identity must have valid proxy UUID");
+          Descriptor::Identity tempId = subId;
+          tempId.uuid = subId.proxy.value();
+          return tempId;
+      }(id)),
+      mFactory(factory) {}
+
+EffectProxy::~EffectProxy() {
+    close();
+    destroy();
+    mSubEffects.clear();
+}
+
+// sub effect must have same proxy UUID as EffectProxy, and the type UUID must match.
+ndk::ScopedAStatus EffectProxy::addSubEffect(const Descriptor& sub) {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    if (0 != mSubEffects.count(sub.common.id) || !sub.common.id.proxy.has_value() ||
+        sub.common.id.proxy.value() != mIdentity.uuid) {
+        ALOGE("%s sub effect already exist or mismatch %s", __func__, sub.toString().c_str());
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                "illegalSubEffect");
+    }
+
+    // not create sub-effect yet
+    std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[sub.common.id]) = nullptr;
+    std::get<SubEffectTupleIndex::DESCRIPTOR>(mSubEffects[sub.common.id]) = sub;
+    // set the last added sub-effect to active before setOffloadParam()
+    mActiveSub = sub.common.id;
+    ALOGI("%s add %s to proxy %s flag %s", __func__, mActiveSub.toString().c_str(),
+          mIdentity.toString().c_str(), sub.common.flags.toString().c_str());
+
+    if (sub.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
+        mSubFlags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+    }
+
+    // initial flag values before we know which sub-effect to active (with setOffloadParam)
+    // same as HIDL EffectProxy flags
+    mSubFlags.type = Flags::Type::INSERT;
+    mSubFlags.insert = Flags::Insert::LAST;
+    mSubFlags.volume = Flags::Volume::CTRL;
+
+    // set indication if any sub-effect indication was set
+    mSubFlags.offloadIndication |= sub.common.flags.offloadIndication;
+    mSubFlags.deviceIndication |= sub.common.flags.deviceIndication;
+    mSubFlags.audioModeIndication |= sub.common.flags.audioModeIndication;
+    mSubFlags.audioSourceIndication |= sub.common.flags.audioSourceIndication;
+
+    // set bypass when all sub-effects are bypassing
+    mSubFlags.bypass &= sub.common.flags.bypass;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectProxy::create() {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+
+    for (auto& sub : mSubEffects) {
+        auto& effectHandle = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        ALOGI("%s sub-effect %s", __func__, sub.first.uuid.toString().c_str());
+        status = mFactory->createEffect(sub.first.uuid, &effectHandle);
+        if (!status.isOk() || !effectHandle) {
+            ALOGE("%s sub-effect failed %s", __func__, sub.first.uuid.toString().c_str());
+            break;
+        }
+    }
+
+    // destroy all created effects if failure
+    if (!status.isOk()) {
+        destroy();
+    }
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::destroy() {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+        ndk::ScopedAStatus status = mFactory->destroyEffect(effect);
+        if (status.isOk()) {
+            effect.reset();
+        }
+        return status;
+    });
+}
+
+const IEffect::OpenEffectReturn* EffectProxy::getEffectReturnParam() {
+    return &std::get<SubEffectTupleIndex::RETURN>(mSubEffects[mActiveSub]);
+}
+
+ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
+    const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
+        const auto& desc = std::get<SubEffectTupleIndex::DESCRIPTOR>(sub.second);
+        ALOGI("%s: isOffload %d sub-effect: %s, flags %s", __func__, offload->isOffload,
+              desc.common.id.uuid.toString().c_str(), desc.common.flags.toString().c_str());
+        return offload->isOffload ==
+               (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+    });
+    if (itor == mSubEffects.end()) {
+        ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+                                                                "noActiveEffctFound");
+    }
+
+    mActiveSub = itor->first;
+    ALOGI("%s: active %soffload sub-effect: %s, flags %s", __func__,
+          offload->isOffload ? "" : "non-", mActiveSub.uuid.toString().c_str(),
+          std::get<SubEffectTupleIndex::DESCRIPTOR>(itor->second).common.flags.toString().c_str());
+    return ndk::ScopedAStatus::ok();
+}
+
+// EffectProxy go over sub-effects and call IEffect interfaces
+ndk::ScopedAStatus EffectProxy::open(const Parameter::Common& common,
+                                     const std::optional<Parameter::Specific>& specific,
+                                     IEffect::OpenEffectReturn* ret __unused) {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+            EX_ILLEGAL_ARGUMENT, "nullEffectHandle");
+    for (auto& sub : mSubEffects) {
+        auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        auto& openRet = std::get<SubEffectTupleIndex::RETURN>(sub.second);
+        if (!effect || !(status = effect->open(common, specific, &openRet)).isOk()) {
+            ALOGE("%s: failed to open UUID %s", __func__, sub.first.uuid.toString().c_str());
+            break;
+        }
+    }
+
+    // close all opened effects if failure
+    if (!status.isOk()) {
+        close();
+    }
+
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::close() {
+    ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+    return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+        return effect->close();
+    });
+}
+
+ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
+    if (!desc) {
+        ALOGE("%s: nuull descriptor pointer", __func__);
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER, "nullptr");
+    }
+
+    auto& activeSubEffect = std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[mActiveSub]);
+    // return initial descriptor if no active sub-effect exist
+    if (!activeSubEffect) {
+        desc->common.id = mIdentity;
+        desc->common.flags = mSubFlags;
+        desc->common.name = "Proxy";
+        desc->common.implementor = "AOSP";
+    } else {
+        *desc = std::get<SubEffectTupleIndex::DESCRIPTOR>(mSubEffects[mActiveSub]);
+        desc->common.id = mIdentity;
+    }
+
+    ALOGI("%s with %s", __func__, desc->toString().c_str());
+    return ndk::ScopedAStatus::ok();
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::command(CommandId id) {
+    ALOGV("%s: %s, command %s", __func__, mIdentity.type.toString().c_str(),
+          android::internal::ToString(id).c_str());
+    return runWithActiveSubEffectThenOthers(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->command(id);
+            });
+}
+
+// Return the active sub-effect state
+ndk::ScopedAStatus EffectProxy::getState(State* state) {
+    return runWithActiveSubEffect(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->getState(state);
+            });
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::setParameter(const Parameter& param) {
+    return runWithActiveSubEffectThenOthers(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->setParameter(param);
+            });
+}
+
+// Return the active sub-effect parameter
+ndk::ScopedAStatus EffectProxy::getParameter(const Parameter::Id& id, Parameter* param) {
+    return runWithActiveSubEffect(
+            [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+                return effect->getParameter(id, param);
+            });
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffectThenOthers(
+        std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+    ndk::ScopedAStatus status = runWithActiveSubEffect(func);
+    if (!status.isOk()) {
+        return status;
+    }
+
+    // proceed with others if active sub-effect success
+    for (const auto& sub : mSubEffects) {
+        auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        if (sub.first != mActiveSub) {
+            if (!effect) {
+                ALOGE("%s null sub-effect interface for %s", __func__,
+                      sub.first.toString().c_str());
+                continue;
+            }
+            func(effect);
+        }
+    }
+    return status;
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffect(
+        std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+    auto& effect = std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[mActiveSub]);
+    if (!effect) {
+        ALOGE("%s null active sub-effect interface, active %s", __func__,
+              mActiveSub.toString().c_str());
+        return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+                                                                "activeSubEffectNull");
+    }
+    return func(effect);
+}
+
+ndk::ScopedAStatus EffectProxy::runWithAllSubEffects(
+        std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func) {
+    ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+    // proceed with others if active sub-effect success
+    for (auto& sub : mSubEffects) {
+        auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+        if (!effect) {
+            ALOGW("%s null sub-effect interface for %s", __func__, sub.first.toString().c_str());
+            continue;
+        }
+        ndk::ScopedAStatus temp = func(effect);
+        if (!temp.isOk()) {
+            status = ndk::ScopedAStatus::fromStatus(temp.getStatus());
+        }
+    }
+    return status;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectProxy.h b/media/libaudiohal/impl/EffectProxy.h
new file mode 100644
index 0000000..ffb8a19
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+#include <fmq/AidlMessageQueue.h>
+#include <system/audio_effect.h>
+
+namespace android {
+namespace effect {
+
+/**
+ * EffectProxy is the proxy for one or more effect AIDL implementations (sub effect) of same type.
+ * The audio framework use EffectProxy as a composite implementation of all sub effect
+ * implementations.
+ *
+ * At any given time, there is only one active effect which consuming and producing data for each
+ * proxy. All setter commands (except the legacy EFFECT_CMD_OFFLOAD, it will be handled by the audio
+ * framework directly) and parameters will be pass through to all sub effects, the getter commands
+ * and parameters will only passthrough to the active sub-effect.
+ *
+ */
+class EffectProxy final : public ::aidl::android::hardware::audio::effect::BnEffect {
+  public:
+    EffectProxy(const ::aidl::android::hardware::audio::effect::Descriptor::Identity& id,
+                const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory);
+
+    /**
+     * Add a sub effect into the proxy, the descriptor of candidate sub-effect need to have same
+     * proxy UUID as mUuid.
+     */
+    ndk::ScopedAStatus addSubEffect(
+            const ::aidl::android::hardware::audio::effect::Descriptor& sub);
+
+    /**
+     * Create all sub-effects via AIDL IFactory, always call create() after all sub-effects added
+     * successfully with addSubEffect.
+     */
+    ndk::ScopedAStatus create();
+
+    /**
+     * Destroy all sub-effects via AIDL IFactory, always call create() after all sub-effects added
+     * successfully with addSubEffect.
+     */
+    ndk::ScopedAStatus destroy();
+
+    /**
+     * Handle offload parameter setting from framework.
+     */
+    ndk::ScopedAStatus setOffloadParam(const effect_offload_param_t* offload);
+
+    /**
+     * Get the const reference of the active sub-effect return parameters.
+     * Always use this interface to get the effect open return parameters (FMQs) after a success
+     * setOffloadParam() call.
+     */
+    const IEffect::OpenEffectReturn* getEffectReturnParam();
+
+    // IEffect interfaces override
+    ndk::ScopedAStatus open(
+            const ::aidl::android::hardware::audio::effect::Parameter::Common& common,
+            const std::optional<::aidl::android::hardware::audio::effect::Parameter::Specific>&
+                    specific,
+            ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
+    ndk::ScopedAStatus close() override;
+    ndk::ScopedAStatus getDescriptor(
+            ::aidl::android::hardware::audio::effect::Descriptor* desc) override;
+    ndk::ScopedAStatus command(::aidl::android::hardware::audio::effect::CommandId id) override;
+    ndk::ScopedAStatus getState(::aidl::android::hardware::audio::effect::State* state) override;
+    ndk::ScopedAStatus setParameter(
+            const ::aidl::android::hardware::audio::effect::Parameter& param) override;
+    ndk::ScopedAStatus getParameter(
+            const ::aidl::android::hardware::audio::effect::Parameter::Id& id,
+            ::aidl::android::hardware::audio::effect::Parameter* param) override;
+
+  private:
+    // Proxy identity, copy from one sub-effect, and update the implementation UUID to proxy UUID
+    const ::aidl::android::hardware::audio::effect::Descriptor::Identity mIdentity;
+    const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory> mFactory;
+
+    // A map of sub effects descriptor to the IEffect handle and return FMQ
+    enum SubEffectTupleIndex { HANDLE, DESCRIPTOR, RETURN };
+    using EffectProxySub =
+            std::tuple<std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>,
+                       ::aidl::android::hardware::audio::effect::Descriptor,
+                       ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn>;
+    std::map<const ::aidl::android::hardware::audio::effect::Descriptor::Identity, EffectProxySub>
+            mSubEffects;
+
+    // Descriptor of the only active effect in the mSubEffects map
+    ::aidl::android::hardware::audio::effect::Descriptor::Identity mActiveSub;
+
+    // keep the flag of sub-effects
+    ::aidl::android::hardware::audio::effect::Flags mSubFlags;
+
+    ndk::ScopedAStatus runWithActiveSubEffectThenOthers(
+            std::function<ndk::ScopedAStatus(
+                    const std::shared_ptr<
+                            ::aidl::android::hardware::audio::effect::IEffect>&)> const& func);
+
+    ndk::ScopedAStatus runWithActiveSubEffect(
+            std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func);
+
+    ndk::ScopedAStatus runWithAllSubEffects(
+            std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func);
+
+    // close and release all sub-effects
+    ~EffectProxy();
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
new file mode 100644
index 0000000..7b9088e
--- /dev/null
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -0,0 +1,377 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <cstddef>
+#include <cstdint>
+#include <iterator>
+#include <memory>
+#define LOG_TAG "EffectsFactoryHalAidl"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <aidl/android/media/audio/common/AudioStreamType.h>
+#include <android/binder_manager.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include "EffectBufferHalAidl.h"
+#include "EffectHalAidl.h"
+#include "EffectProxy.h"
+#include "EffectsFactoryHalAidl.h"
+
+using ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::media::audio::common::AudioSource;
+using ::aidl::android::media::audio::common::AudioStreamType;
+using ::aidl::android::media::audio::common::AudioUuid;
+using ::android::base::unexpected;
+using ::android::detail::AudioHalVersionInfo;
+
+namespace android {
+namespace effect {
+
+EffectsFactoryHalAidl::EffectsFactoryHalAidl(std::shared_ptr<IFactory> effectsFactory)
+    : mFactory(effectsFactory),
+      mHalVersion(AudioHalVersionInfo(
+              AudioHalVersionInfo::Type::AIDL,
+              [this]() {
+                  int32_t majorVersion = 0;
+                  return (mFactory && mFactory->getInterfaceVersion(&majorVersion).isOk())
+                                 ? majorVersion
+                                 : 0;
+              }())),
+      mHalDescList([this]() {
+          std::vector<Descriptor> list;
+          if (mFactory) {
+              mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list).isOk();
+          }
+          return list;
+      }()),
+      mUuidProxyMap([this]() {
+          std::map<AudioUuid, std::shared_ptr<EffectProxy>> proxyMap;
+          for (const auto& desc : mHalDescList) {
+              // create EffectProxy
+              if (desc.common.id.proxy.has_value()) {
+                  const auto& uuid = desc.common.id.proxy.value();
+                  if (0 == proxyMap.count(uuid)) {
+                      proxyMap.insert({uuid, ndk::SharedRefBase::make<EffectProxy>(desc.common.id,
+                                                                                   mFactory)});
+                  }
+                  proxyMap[uuid]->addSubEffect(desc);
+                  ALOGI("%s addSubEffect %s", __func__, desc.common.toString().c_str());
+              }
+          }
+          return proxyMap;
+      }()),
+      mProxyDescList([this]() {
+          std::vector<Descriptor> list;
+          for (const auto& proxy : mUuidProxyMap) {
+              if (Descriptor desc; proxy.second && proxy.second->getDescriptor(&desc).isOk()) {
+                  list.emplace_back(std::move(desc));
+              }
+          }
+          return list;
+      }()),
+      mNonProxyDescList([this]() {
+          std::vector<Descriptor> list;
+          std::copy_if(mHalDescList.begin(), mHalDescList.end(), std::back_inserter(list),
+                       [](const Descriptor& desc) { return !desc.common.id.proxy.has_value(); });
+          return list;
+      }()),
+      mEffectCount(mNonProxyDescList.size() + mProxyDescList.size()),
+      mAidlProcessings([this]() -> std::vector<Processing> {
+          std::vector<Processing> processings;
+          if (!mFactory || !mFactory->queryProcessing(std::nullopt, &processings).isOk()) {
+              ALOGE("%s queryProcessing failed", __func__);
+          }
+          return processings;
+      }()) {
+    ALOG_ASSERT(mFactory != nullptr, "Provided IEffectsFactory service is NULL");
+    ALOGI("%s with %zu nonProxyEffects and %zu proxyEffects", __func__, mNonProxyDescList.size(),
+          mProxyDescList.size());
+}
+
+status_t EffectsFactoryHalAidl::queryNumberEffects(uint32_t *pNumEffects) {
+    if (pNumEffects == nullptr) {
+        return BAD_VALUE;
+    }
+
+    *pNumEffects = mEffectCount;
+    ALOGI("%s %d", __func__, *pNumEffects);
+    return OK;
+}
+
+status_t EffectsFactoryHalAidl::getDescriptor(uint32_t index, effect_descriptor_t* pDescriptor) {
+    if (pDescriptor == nullptr) {
+        return BAD_VALUE;
+    }
+
+    if (index >= mEffectCount) {
+        ALOGE("%s index %d exceed max number %zu", __func__, index, mEffectCount);
+        return INVALID_OPERATION;
+    }
+
+    if (index >= mNonProxyDescList.size()) {
+        *pDescriptor =
+                VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+                        mProxyDescList.at(index - mNonProxyDescList.size())));
+    } else {
+        *pDescriptor =
+                VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+                        mNonProxyDescList.at(index)));
+    }
+    return OK;
+}
+
+status_t EffectsFactoryHalAidl::getDescriptor(const effect_uuid_t* halUuid,
+                                              effect_descriptor_t* pDescriptor) {
+    if (halUuid == nullptr) {
+        return BAD_VALUE;
+    }
+
+    AudioUuid uuid =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
+    return getHalDescriptorWithImplUuid(uuid, pDescriptor);
+}
+
+status_t EffectsFactoryHalAidl::getDescriptors(const effect_uuid_t* halType,
+                                               std::vector<effect_descriptor_t>* descriptors) {
+    if (halType == nullptr) {
+        return BAD_VALUE;
+    }
+
+    AudioUuid type =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
+    return getHalDescriptorWithTypeUuid(type, descriptors);
+}
+
+status_t EffectsFactoryHalAidl::createEffect(const effect_uuid_t* uuid, int32_t sessionId,
+                                             int32_t ioId, int32_t deviceId __unused,
+                                             sp<EffectHalInterface>* effect) {
+    if (uuid == nullptr || effect == nullptr) {
+        return BAD_VALUE;
+    }
+    if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
+        return INVALID_OPERATION;
+    }
+    ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
+
+    AudioUuid aidlUuid =
+            VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+    std::shared_ptr<IEffect> aidlEffect;
+    // Use EffectProxy interface instead of IFactory to create
+    const bool isProxy = isProxyEffect(aidlUuid);
+    if (isProxy) {
+        aidlEffect = mUuidProxyMap.at(aidlUuid);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mUuidProxyMap.at(aidlUuid)->create()));
+    } else {
+        RETURN_STATUS_IF_ERROR(
+                statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+    }
+    if (aidlEffect == nullptr) {
+        ALOGE("%s failed to create effect with UUID: %s", __func__, aidlUuid.toString().c_str());
+        return NAME_NOT_FOUND;
+    }
+    Descriptor desc;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aidlEffect->getDescriptor(&desc)));
+
+    uint64_t effectId;
+    {
+        std::lock_guard lg(mLock);
+        effectId = ++mEffectIdCounter;
+    }
+
+    *effect =
+            sp<EffectHalAidl>::make(mFactory, aidlEffect, effectId, sessionId, ioId, desc, isProxy);
+    return OK;
+}
+
+status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
+    status_t ret = OK;
+    // record the error ret and continue dump as many effects as possible
+    for (const auto& proxy : mUuidProxyMap) {
+        if (proxy.second) {
+            if (status_t temp = proxy.second->dump(fd, nullptr, 0); temp != OK) {
+                ret = temp;
+            }
+        }
+    }
+    RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
+    return ret;
+}
+
+status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
+    ALOGI("%s size %zu buffer %p", __func__, size, buffer);
+    return EffectBufferHalAidl::allocate(size, buffer);
+}
+
+status_t EffectsFactoryHalAidl::mirrorBuffer(void* external, size_t size,
+                                             sp<EffectBufferHalInterface>* buffer) {
+    ALOGI("%s extern %p size %zu buffer %p", __func__, external, size, buffer);
+    return EffectBufferHalAidl::mirror(external, size, buffer);
+}
+
+AudioHalVersionInfo EffectsFactoryHalAidl::getHalVersion() const {
+    return mHalVersion;
+}
+
+status_t EffectsFactoryHalAidl::getHalDescriptorWithImplUuid(const AudioUuid& uuid,
+                                                             effect_descriptor_t* pDescriptor) {
+    if (pDescriptor == nullptr) {
+        return BAD_VALUE;
+    }
+
+    const auto& list = isProxyEffect(uuid) ? mProxyDescList : mNonProxyDescList;
+    auto matchIt = std::find_if(list.begin(), list.end(),
+                                [&](const auto& desc) { return desc.common.id.uuid == uuid; });
+    if (matchIt == list.end()) {
+        ALOGE("%s UUID not found in HAL and proxy list %s", __func__, uuid.toString().c_str());
+        return BAD_VALUE;
+    }
+    ALOGI("%s UUID impl found %s", __func__, uuid.toString().c_str());
+
+    *pDescriptor = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(*matchIt));
+    return OK;
+}
+
+status_t EffectsFactoryHalAidl::getHalDescriptorWithTypeUuid(
+        const AudioUuid& type, std::vector<effect_descriptor_t>* descriptors) {
+    if (descriptors == nullptr) {
+        return BAD_VALUE;
+    }
+
+    std::vector<Descriptor> result;
+    std::copy_if(mNonProxyDescList.begin(), mNonProxyDescList.end(), std::back_inserter(result),
+                 [&](auto& desc) { return desc.common.id.type == type; });
+    std::copy_if(mProxyDescList.begin(), mProxyDescList.end(), std::back_inserter(result),
+                 [&](auto& desc) { return desc.common.id.type == type; });
+    if (result.empty()) {
+        ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, type.toString().c_str());
+        return BAD_VALUE;
+    }
+    ALOGI("%s UUID type found %zu \n %s", __func__, result.size(), type.toString().c_str());
+
+    *descriptors = VALUE_OR_RETURN_STATUS(
+            aidl::android::convertContainer<std::vector<effect_descriptor_t>>(
+                    result, ::aidl::android::aidl2legacy_Descriptor_effect_descriptor));
+    return OK;
+}
+
+bool EffectsFactoryHalAidl::isProxyEffect(const AudioUuid& uuid) const {
+    return 0 != mUuidProxyMap.count(uuid);
+}
+
+std::shared_ptr<const effectsConfig::Processings> EffectsFactoryHalAidl::getProcessings() const {
+
+    auto getConfigEffectWithDescriptor =
+            [](const auto& desc) -> std::shared_ptr<const effectsConfig::Effect> {
+        effectsConfig::Effect effect = {.name = desc.common.name, .isProxy = false};
+        if (const auto uuid =
+                    ::aidl::android::aidl2legacy_AudioUuid_audio_uuid_t(desc.common.id.uuid);
+            uuid.ok()) {
+            static_cast<effectsConfig::EffectImpl>(effect).uuid = uuid.value();
+            return std::make_shared<const effectsConfig::Effect>(effect);
+        } else {
+            return nullptr;
+        }
+    };
+
+    auto getConfigProcessingWithAidlProcessing =
+            [&](const auto& aidlProcess, std::vector<effectsConfig::InputStream>& preprocess,
+                std::vector<effectsConfig::OutputStream>& postprocess) {
+                if (aidlProcess.type.getTag() == Processing::Type::streamType) {
+                    AudioStreamType aidlType =
+                            aidlProcess.type.template get<Processing::Type::streamType>();
+                    const auto type =
+                            ::aidl::android::aidl2legacy_AudioStreamType_audio_stream_type_t(
+                                    aidlType);
+                    if (!type.ok()) {
+                        return;
+                    }
+
+                    std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+                    std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+                                   std::back_inserter(effects), getConfigEffectWithDescriptor);
+                    effectsConfig::OutputStream stream = {.type = type.value(),
+                                                          .effects = std::move(effects)};
+                    postprocess.emplace_back(stream);
+                } else if (aidlProcess.type.getTag() == Processing::Type::source) {
+                    AudioSource aidlType =
+                            aidlProcess.type.template get<Processing::Type::source>();
+                    const auto type =
+                            ::aidl::android::aidl2legacy_AudioSource_audio_source_t(aidlType);
+                    if (!type.ok()) {
+                        return;
+                    }
+
+                    std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+                    std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+                                   std::back_inserter(effects), getConfigEffectWithDescriptor);
+                    effectsConfig::InputStream stream = {.type = type.value(),
+                                                         .effects = std::move(effects)};
+                    preprocess.emplace_back(stream);
+                }
+            };
+
+    static std::shared_ptr<const effectsConfig::Processings> processings(
+            [&]() -> std::shared_ptr<const effectsConfig::Processings> {
+                std::vector<effectsConfig::InputStream> preprocess;
+                std::vector<effectsConfig::OutputStream> postprocess;
+                for (const auto& processing : mAidlProcessings) {
+                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess);
+                }
+
+                if (0 == preprocess.size() && 0 == postprocess.size()) {
+                    return nullptr;
+                }
+
+                return std::make_shared<const effectsConfig::Processings>(
+                        effectsConfig::Processings({.preprocess = std::move(preprocess),
+                                                    .postprocess = std::move(postprocess)}));
+            }());
+
+    return processings;
+}
+
+// Return 0 for AIDL, as the AIDL interface is not aware of the configuration file.
+::android::error::Result<size_t> EffectsFactoryHalAidl::getSkippedElements() const {
+    return 0;
+}
+
+} // namespace effect
+
+// When a shared library is built from a static library, even explicit
+// exports from a static library are optimized out unless actually used by
+// the shared library. See EffectsFactoryHalEntry.cpp.
+extern "C" void* createIEffectsFactoryImpl() {
+    auto serviceName = std::string(IFactory::descriptor) + "/default";
+    auto service = IFactory::fromBinder(
+            ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
+    if (!service) {
+        ALOGE("%s binder service %s not exist", __func__, serviceName.c_str());
+        return nullptr;
+    }
+    return new effect::EffectsFactoryHalAidl(service);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
new file mode 100644
index 0000000..39beea2
--- /dev/null
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstddef>
+#include <memory>
+#include <mutex>
+
+#include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <aidl/android/hardware/audio/effect/Processing.h>
+#include <android-base/thread_annotations.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <system/thread_defs.h>
+
+#include "EffectProxy.h"
+
+namespace android {
+namespace effect {
+
+using namespace aidl::android::hardware::audio::effect;
+
+class EffectsFactoryHalAidl final : public EffectsFactoryHalInterface {
+  public:
+    explicit EffectsFactoryHalAidl(std::shared_ptr<IFactory> effectsFactory);
+
+    // Returns the number of different effects in all loaded libraries.
+    status_t queryNumberEffects(uint32_t *pNumEffects) override;
+
+    // Returns a descriptor of the next available effect.
+    status_t getDescriptor(uint32_t index, effect_descriptor_t* pDescriptor) override;
+
+    status_t getDescriptor(const effect_uuid_t* pEffectUuid,
+                           effect_descriptor_t* pDescriptor) override;
+
+    status_t getDescriptors(const effect_uuid_t* pEffectType,
+                            std::vector<effect_descriptor_t>* descriptors) override;
+
+    // Creates an effect engine of the specified type.
+    // To release the effect engine, it is necessary to release references to the returned effect
+    // object.
+    status_t createEffect(const effect_uuid_t* pEffectUuid, int32_t sessionId, int32_t ioId,
+                          int32_t deviceId, sp<EffectHalInterface>* effect) override;
+
+    status_t dumpEffects(int fd) override;
+
+    status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
+    status_t mirrorBuffer(void* external, size_t size,
+                          sp<EffectBufferHalInterface>* buffer) override;
+
+    detail::AudioHalVersionInfo getHalVersion() const override;
+
+    std::shared_ptr<const effectsConfig::Processings> getProcessings() const override;
+
+    ::android::error::Result<size_t> getSkippedElements() const override;
+
+  private:
+    const std::shared_ptr<IFactory> mFactory;
+    const detail::AudioHalVersionInfo mHalVersion;
+    // Full list of HAL effect descriptors
+    const std::vector<Descriptor> mHalDescList;
+    // Map of proxy UUID (key) to the proxy object
+    const std::map<::aidl::android::media::audio::common::AudioUuid /* proxy impl UUID */,
+                   std::shared_ptr<EffectProxy>>
+            mUuidProxyMap;
+    // List of effect proxy, initialize after mUuidProxyMap because it need to have all sub-effects
+    const std::vector<Descriptor> mProxyDescList;
+    // List of non-proxy effects
+    const std::vector<Descriptor> mNonProxyDescList;
+    // total number of effects including proxy effects
+    const size_t mEffectCount;
+    // Query result of pre and post processing from effect factory
+    const std::vector<Processing> mAidlProcessings;
+
+    std::mutex mLock;
+    uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0;  // Align with HIDL (0 is INVALID_ID)
+
+    virtual ~EffectsFactoryHalAidl() = default;
+    status_t getHalDescriptorWithImplUuid(
+            const aidl::android::media::audio::common::AudioUuid& uuid,
+            effect_descriptor_t* pDescriptor);
+
+    status_t getHalDescriptorWithTypeUuid(
+            const aidl::android::media::audio::common::AudioUuid& type,
+            std::vector<effect_descriptor_t>* descriptors);
+
+    bool isProxyEffect(const aidl::android::media::audio::common::AudioUuid& uuid) const;
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp b/media/libaudiohal/impl/EffectsFactoryHalEntry.cpp
similarity index 94%
rename from media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
rename to media/libaudiohal/impl/EffectsFactoryHalEntry.cpp
index 2c6f2c6..f6ad99a 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalEntry.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include "android/media/AudioHalVersion.h"
+
 extern "C" void* createIEffectsFactoryImpl();
 
 extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index d7217fc..210c4b5 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -17,6 +17,9 @@
 #define LOG_TAG "EffectsFactoryHalHidl"
 //#define LOG_NDEBUG 0
 
+#include <optional>
+#include <tuple>
+
 #include <cutils/native_handle.h>
 
 #include <UuidUtils.h>
@@ -28,9 +31,13 @@
 #include "EffectHalHidl.h"
 #include "EffectsFactoryHalHidl.h"
 
+#include "android/media/AudioHalVersion.h"
+
+using ::android::base::unexpected;
+using ::android::detail::AudioHalVersionInfo;
+using ::android::hardware::Return;
 using ::android::hardware::audio::common::CPP_VERSION::implementation::UuidUtils;
 using ::android::hardware::audio::effect::CPP_VERSION::implementation::EffectUtils;
-using ::android::hardware::Return;
 
 namespace android {
 namespace effect {
@@ -38,51 +45,73 @@
 using namespace ::android::hardware::audio::common::CPP_VERSION;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
-EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
-        : EffectConversionHelperHidl("EffectsFactory") {
-    ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
-    mEffectsFactory = effectsFactory;
-}
+class EffectDescriptorCache {
+  public:
+    using QueryResult = std::tuple<Return<void>, Result, hidl_vec<EffectDescriptor>>;
+    QueryResult queryAllDescriptors(IEffectsFactory* effectsFactory);
+  private:
+    std::mutex mLock;
+    std::optional<hidl_vec<EffectDescriptor>> mLastDescriptors;  // GUARDED_BY(mLock)
+};
 
-status_t EffectsFactoryHalHidl::queryAllDescriptors() {
-    if (mEffectsFactory == 0) return NO_INIT;
+EffectDescriptorCache::QueryResult EffectDescriptorCache::queryAllDescriptors(
+        IEffectsFactory* effectsFactory) {
+    {
+        std::lock_guard l(mLock);
+        if (mLastDescriptors.has_value()) {
+            return {::android::hardware::Void(), Result::OK, mLastDescriptors.value()};
+        }
+    }
     Result retval = Result::NOT_INITIALIZED;
-    Return<void> ret = mEffectsFactory->getAllDescriptors(
+    hidl_vec<EffectDescriptor> descriptors;
+    Return<void> ret = effectsFactory->getAllDescriptors(
             [&](Result r, const hidl_vec<EffectDescriptor>& result) {
                 retval = r;
                 if (retval == Result::OK) {
-                    mLastDescriptors = result;
+                    descriptors = result;
                 }
             });
-    if (ret.isOk()) {
-        return retval == Result::OK ? OK : NO_INIT;
+    if (ret.isOk() && retval == Result::OK) {
+        std::lock_guard l(mLock);
+        mLastDescriptors = descriptors;
     }
-    mLastDescriptors.resize(0);
-    return processReturn(__FUNCTION__, ret);
+    return {std::move(ret), retval, std::move(descriptors)};
+}
+
+EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
+    : EffectConversionHelperHidl("EffectsFactory"),
+      mEffectsFactory(std::move(effectsFactory)),
+      mCache(new EffectDescriptorCache),
+      mParsingResult(effectsConfig::parse()) {
+    ALOG_ASSERT(mEffectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
 }
 
 status_t EffectsFactoryHalHidl::queryNumberEffects(uint32_t *pNumEffects) {
-    status_t queryResult = queryAllDescriptors();
-    if (queryResult == OK) {
-        *pNumEffects = mLastDescriptors.size();
+    if (mEffectsFactory == 0) return NO_INIT;
+    auto [ret, retval, descriptors] = mCache->queryAllDescriptors(mEffectsFactory.get());
+    if (ret.isOk() && retval == Result::OK) {
+        *pNumEffects = descriptors.size();
+        return OK;
+    } else if (ret.isOk()) {
+        return NO_INIT;
     }
-    return queryResult;
+    return processReturn(__FUNCTION__, ret);
 }
 
 status_t EffectsFactoryHalHidl::getDescriptor(
         uint32_t index, effect_descriptor_t *pDescriptor) {
-    // TODO: We need somehow to track the changes on the server side
-    // or figure out how to convert everybody to query all the descriptors at once.
     if (pDescriptor == nullptr) {
         return BAD_VALUE;
     }
-    if (mLastDescriptors.size() == 0) {
-        status_t queryResult = queryAllDescriptors();
-        if (queryResult != OK) return queryResult;
+    if (mEffectsFactory == 0) return NO_INIT;
+    auto [ret, retval, descriptors] = mCache->queryAllDescriptors(mEffectsFactory.get());
+    if (ret.isOk() && retval == Result::OK) {
+        if (index >= descriptors.size()) return NAME_NOT_FOUND;
+        EffectUtils::effectDescriptorToHal(descriptors[index], pDescriptor);
+    } else if (ret.isOk()) {
+        return NO_INIT;
     }
-    if (index >= mLastDescriptors.size()) return NAME_NOT_FOUND;
-    EffectUtils::effectDescriptorToHal(mLastDescriptors[index], pDescriptor);
-    return OK;
+    return processReturn(__FUNCTION__, ret);
 }
 
 status_t EffectsFactoryHalHidl::getDescriptor(
@@ -114,21 +143,15 @@
     if (pEffectType == nullptr || descriptors == nullptr) {
         return BAD_VALUE;
     }
+    if (mEffectsFactory == 0) return NO_INIT;
 
-    uint32_t numEffects = 0;
-    status_t status = queryNumberEffects(&numEffects);
-    if (status != NO_ERROR) {
-        ALOGW("%s error %d from FactoryHal queryNumberEffects", __func__, status);
-        return status;
+    auto [ret, retval, hidlDescs] = mCache->queryAllDescriptors(mEffectsFactory.get());
+    if (!ret.isOk() || retval != Result::OK) {
+        return processReturn(__FUNCTION__, ret, retval);
     }
-
-    for (uint32_t i = 0; i < numEffects; i++) {
+    for (const auto& hidlDesc : hidlDescs) {
         effect_descriptor_t descriptor;
-        status = getDescriptor(i, &descriptor);
-        if (status != NO_ERROR) {
-            ALOGW("%s error %d from FactoryHal getDescriptor", __func__, status);
-            continue;
-        }
+        EffectUtils::effectDescriptorToHal(hidlDesc, &descriptor);
         if (memcmp(&descriptor.type, pEffectType, sizeof(effect_uuid_t)) == 0) {
             descriptors->push_back(descriptor);
         }
@@ -204,11 +227,26 @@
     return EffectBufferHalHidl::mirror(external, size, buffer);
 }
 
+AudioHalVersionInfo EffectsFactoryHalHidl::getHalVersion() const {
+    return AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, MAJOR_VERSION, MINOR_VERSION);
+}
+
+std::shared_ptr<const effectsConfig::Processings> EffectsFactoryHalHidl::getProcessings() const {
+    return mParsingResult.parsedConfig;
+}
+
+::android::error::Result<size_t> EffectsFactoryHalHidl::getSkippedElements() const {
+    if (!mParsingResult.parsedConfig) {
+        return ::android::base::unexpected(BAD_VALUE);
+    }
+    return mParsingResult.nbSkippedElement;
+}
+
 } // namespace effect
 
 // When a shared library is built from a static library, even explicit
 // exports from a static library are optimized out unless actually used by
-// the shared library. See EffectsFactoryHalHidlEntry.cpp.
+// the shared library. See EffectsFactoryHalEntry.cpp.
 extern "C" void* createIEffectsFactoryImpl() {
     auto service = hardware::audio::effect::CPP_VERSION::IEffectsFactory::getService();
     return service ? new effect::EffectsFactoryHalHidl(service) : nullptr;
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index e1882e1..4110ba3 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -14,8 +14,10 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_HIDL_H
-#define ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_HIDL_H
+#pragma once
+
+#include <memory>
+#include <vector>
 
 #include PATH(android/hardware/audio/effect/FILE_VERSION/IEffectsFactory.h)
 #include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -28,47 +30,53 @@
 using ::android::hardware::hidl_vec;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
-class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public EffectConversionHelperHidl
-{
+class EffectDescriptorCache;
+
+class EffectsFactoryHalHidl final : public EffectsFactoryHalInterface,
+                                    public EffectConversionHelperHidl {
   public:
     EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory);
 
     // Returns the number of different effects in all loaded libraries.
-    virtual status_t queryNumberEffects(uint32_t *pNumEffects);
+    status_t queryNumberEffects(uint32_t *pNumEffects) override;
 
     // Returns a descriptor of the next available effect.
-    virtual status_t getDescriptor(uint32_t index,
-            effect_descriptor_t *pDescriptor);
+    status_t getDescriptor(uint32_t index, effect_descriptor_t* pDescriptor) override;
 
-    virtual status_t getDescriptor(const effect_uuid_t *pEffectUuid,
-            effect_descriptor_t *pDescriptor);
+    status_t getDescriptor(const effect_uuid_t* pEffectUuid,
+                           effect_descriptor_t* pDescriptor) override;
 
-    virtual status_t getDescriptors(const effect_uuid_t *pEffectType,
-                                    std::vector<effect_descriptor_t> *descriptors);
+    status_t getDescriptors(const effect_uuid_t* pEffectType,
+                            std::vector<effect_descriptor_t>* descriptors) override;
 
     // Creates an effect engine of the specified type.
     // To release the effect engine, it is necessary to release references
     // to the returned effect object.
-    virtual status_t createEffect(const effect_uuid_t *pEffectUuid,
-            int32_t sessionId, int32_t ioId, int32_t deviceId,
-            sp<EffectHalInterface> *effect);
+    status_t createEffect(const effect_uuid_t* pEffectUuid, int32_t sessionId, int32_t ioId,
+                          int32_t deviceId, sp<EffectHalInterface>* effect) override;
 
-    virtual status_t dumpEffects(int fd);
-
-    virtual float getHalVersion() { return MAJOR_VERSION + (float)MINOR_VERSION / 10; }
+    status_t dumpEffects(int fd) override;
 
     status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
     status_t mirrorBuffer(void* external, size_t size,
                           sp<EffectBufferHalInterface>* buffer) override;
 
-  private:
-    sp<IEffectsFactory> mEffectsFactory;
-    hidl_vec<EffectDescriptor> mLastDescriptors;
+    android::detail::AudioHalVersionInfo getHalVersion() const override;
 
-    status_t queryAllDescriptors();
+    std::shared_ptr<const effectsConfig::Processings> getProcessings() const override;
+
+    ::android::error::Result<size_t> getSkippedElements() const override;
+
+  private:
+    const sp<IEffectsFactory> mEffectsFactory;
+    const std::unique_ptr<EffectDescriptorCache> mCache;
+    /**
+     * Configuration file parser result, used by getProcessings() and getConfigParseResult().
+     * This struct holds the result of parsing a configuration file. The result includes the parsed
+     * configuration data, as well as any errors that occurred during parsing.
+     */
+    const effectsConfig::ParsingResult mParsingResult;
 };
 
 } // namespace effect
 } // namespace android
-
-#endif // ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
new file mode 100644
index 0000000..d1044dc
--- /dev/null
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -0,0 +1,961 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "StreamHalAidl"
+//#define LOG_NDEBUG 0
+
+#include <algorithm>
+#include <cstdint>
+
+#include <audio_utils/clock.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionCore.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionUtil.h>
+#include <media/AudioParameter.h>
+#include <mediautils/TimeCheck.h>
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include "DeviceHalAidl.h"
+#include "StreamHalAidl.h"
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using ::aidl::android::hardware::audio::common::RecordTrackMetadata;
+using ::aidl::android::hardware::audio::core::IStreamCommon;
+using ::aidl::android::hardware::audio::core::IStreamIn;
+using ::aidl::android::hardware::audio::core::IStreamOut;
+using ::aidl::android::hardware::audio::core::StreamDescriptor;
+using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
+using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
+
+namespace android {
+
+using HalCommand = StreamDescriptor::Command;
+namespace {
+template<HalCommand::Tag cmd> HalCommand makeHalCommand() {
+    return HalCommand::make<cmd>(::aidl::android::media::audio::common::Void{});
+}
+template<HalCommand::Tag cmd, typename T> HalCommand makeHalCommand(T data) {
+    return HalCommand::make<cmd>(data);
+}
+}  // namespace
+
+// static
+template<class T>
+std::shared_ptr<IStreamCommon> StreamHalAidl::getStreamCommon(const std::shared_ptr<T>& stream) {
+    std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> streamCommon;
+    if (stream != nullptr) {
+        if (ndk::ScopedAStatus status = stream->getStreamCommon(&streamCommon);
+                !status.isOk()) {
+            ALOGE("%s: failed to retrieve IStreamCommon instance: %s", __func__,
+                    status.getDescription().c_str());
+        }
+    }
+    return streamCommon;
+}
+
+StreamHalAidl::StreamHalAidl(
+        std::string_view className, bool isInput, const audio_config& config,
+        int32_t nominalLatency, StreamContextAidl&& context,
+        const std::shared_ptr<IStreamCommon>& stream)
+        : ConversionHelperAidl(className),
+          mIsInput(isInput),
+          mConfig(configToBase(config)),
+          mContext(std::move(context)),
+          mStream(stream) {
+    {
+        std::lock_guard l(mLock);
+        mLastReply.latencyMs = nominalLatency;
+    }
+    // Instrument audio signal power logging.
+    // Note: This assumes channel mask, format, and sample rate do not change after creation.
+    if (audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+            /* mStreamPowerLog.isUserDebugOrEngBuild() && */
+            StreamHalAidl::getAudioProperties(&config) == NO_ERROR) {
+        mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
+    }
+}
+
+StreamHalAidl::~StreamHalAidl() {
+    if (mStream != nullptr) {
+        ndk::ScopedAStatus status = mStream->close();
+        ALOGE_IF(!status.isOk(), "%s: status %s", __func__, status.getDescription().c_str());
+    }
+}
+
+status_t StreamHalAidl::getBufferSize(size_t *size) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (size == nullptr) {
+        return BAD_VALUE;
+    }
+    if (mContext.getFrameSizeBytes() == 0 || mContext.getBufferSizeFrames() == 0 ||
+            !mStream) {
+        return NO_INIT;
+    }
+    *size = mContext.getBufferSizeBytes();
+    return OK;
+}
+
+status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (configBase == nullptr) {
+        return BAD_VALUE;
+    }
+    if (!mStream) return NO_INIT;
+    *configBase = mConfig;
+    return OK;
+}
+
+status_t StreamHalAidl::setParameters(const String8& kvPairs) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+
+    AudioParameter parameters(kvPairs);
+    ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str());
+
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                    parameters, String8(AudioParameter::keyStreamHwAvSync),
+            [&](int hwAvSyncId) {
+                return statusTFromBinderStatus(mStream->updateHwAvSyncId(hwAvSyncId));
+            }));
+
+    ALOGW_IF(parameters.size() != 0, "%s: unknown parameters, ignored: %s",
+            __func__, parameters.toString().c_str());
+    return OK;
+}
+
+status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    values->clear();
+    // AIDL HAL doesn't support getParameters API.
+    return INVALID_OPERATION;
+}
+
+status_t StreamHalAidl::getFrameSize(size_t *size) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (size == nullptr) {
+        return BAD_VALUE;
+    }
+    if (mContext.getFrameSizeBytes() == 0 || !mStream) {
+        return NO_INIT;
+    }
+    *size = mContext.getFrameSizeBytes();
+    return OK;
+}
+
+status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect __unused) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ALOGE("%s not implemented yet", __func__);
+    return OK;
+}
+
+status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect __unused) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ALOGE("%s not implemented yet", __func__);
+    return OK;
+}
+
+status_t StreamHalAidl::standby() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    const auto state = getState();
+    StreamDescriptor::Reply reply;
+    switch (state) {
+        case StreamDescriptor::State::ACTIVE:
+            if (status_t status = pause(&reply); status != OK) return status;
+            if (reply.state != StreamDescriptor::State::PAUSED) {
+                ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::PAUSED:
+        case StreamDescriptor::State::DRAIN_PAUSED:
+            if (mIsInput) return flush();
+            if (status_t status = flush(&reply); status != OK) return status;
+            if (reply.state != StreamDescriptor::State::IDLE) {
+                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::IDLE:
+            if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::standby>(),
+                            &reply, true /*safeFromNonWorkerThread*/); status != OK) {
+                return status;
+            }
+            if (reply.state != StreamDescriptor::State::STANDBY) {
+                ALOGE("%s: unexpected stream state: %s (expected STANDBY)",
+                        __func__, toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::STANDBY:
+            return OK;
+        default:
+            ALOGE("%s: not supported from %s stream state %s",
+                    __func__, mIsInput ? "input" : "output", toString(state).c_str());
+            return INVALID_OPERATION;
+    }
+}
+
+status_t StreamHalAidl::dump(int fd, const Vector<String16>& args) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return mStream->dump(fd, Args(args).args(), args.size());
+}
+
+status_t StreamHalAidl::start() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    const auto state = getState();
+    StreamDescriptor::Reply reply;
+    if (state == StreamDescriptor::State::STANDBY) {
+        if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true);
+                status != OK) {
+            return status;
+        }
+        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+    }
+
+    return INVALID_OPERATION;
+}
+
+status_t StreamHalAidl::stop() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!mStream) return NO_INIT;
+    return standby();
+}
+
+status_t StreamHalAidl::getLatency(uint32_t *latency) {
+    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!mStream) return NO_INIT;
+    StreamDescriptor::Reply reply;
+    if (status_t status = updateCountersIfNeeded(&reply); status != OK) {
+        return status;
+    }
+    *latency = std::max<int32_t>(0, reply.latencyMs);
+    return OK;
+}
+
+status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
+    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!mStream) return NO_INIT;
+    StreamDescriptor::Reply reply;
+    if (status_t status = updateCountersIfNeeded(&reply); status != OK) {
+        return status;
+    }
+    *frames = reply.observable.frames;
+    *timestamp = reply.observable.timeNs;
+    return OK;
+}
+
+status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
+    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!mStream) return NO_INIT;
+    StreamDescriptor::Reply reply;
+    // TODO: switch to updateCountersIfNeeded once we sort out mWorkerTid initialization
+    if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), &reply, true);
+            status != OK) {
+        return status;
+    }
+    *frames = reply.hardware.frames;
+    *timestamp = reply.hardware.timeNs;
+    return OK;
+}
+
+status_t StreamHalAidl::getXruns(int32_t *frames) {
+    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    if (!mStream) return NO_INIT;
+    StreamDescriptor::Reply reply;
+    if (status_t status = updateCountersIfNeeded(&reply); status != OK) {
+        return status;
+    }
+    *frames = reply.xrunFrames;
+    return OK;
+}
+
+status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) {
+    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
+    if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT;
+    mWorkerTid.store(gettid(), std::memory_order_release);
+    // Switch the stream into an active state if needed.
+    // Note: in future we may add support for priming the audio pipeline
+    // with data prior to enabling output (thus we can issue a "burst" command in the "standby"
+    // stream state), however this scenario wasn't supported by the HIDL HAL.
+    if (getState() == StreamDescriptor::State::STANDBY) {
+        StreamDescriptor::Reply reply;
+        if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply);
+                status != OK) {
+            return status;
+        }
+        if (reply.state != StreamDescriptor::State::IDLE) {
+            ALOGE("%s: failed to get the stream out of standby, actual state: %s",
+                    __func__, toString(reply.state).c_str());
+            return INVALID_OPERATION;
+        }
+    }
+    if (!mIsInput) {
+        bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
+    }
+    StreamDescriptor::Command burst =
+            StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
+    if (!mIsInput) {
+        if (!mContext.getDataMQ()->write(static_cast<const int8_t*>(buffer), bytes)) {
+            ALOGE("%s: failed to write %zu bytes to data MQ", __func__, bytes);
+            return NOT_ENOUGH_DATA;
+        }
+    }
+    StreamDescriptor::Reply reply;
+    if (status_t status = sendCommand(burst, &reply); status != OK) {
+        return status;
+    }
+    *transferred = reply.fmqByteCount;
+    if (mIsInput) {
+        LOG_ALWAYS_FATAL_IF(*transferred > bytes,
+                "%s: HAL module read %zu bytes, which exceeds requested count %zu",
+                __func__, *transferred, bytes);
+        if (auto toRead = mContext.getDataMQ()->availableToRead();
+                toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
+            ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
+            return NOT_ENOUGH_DATA;
+        }
+    }
+    mStreamPowerLog.log(buffer, *transferred);
+    return OK;
+}
+
+status_t StreamHalAidl::pause(StreamDescriptor::Reply* reply) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return sendCommand(makeHalCommand<HalCommand::Tag::pause>(), reply,
+            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+}
+
+status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (mIsInput) {
+        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), reply);
+    } else {
+        if (mContext.isAsynchronous()) {
+            // Handle pause-flush-resume sequence. 'flush' from PAUSED goes to
+            // IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED).
+            const auto state = getState();
+            if (state == StreamDescriptor::State::IDLE) {
+                StreamDescriptor::Reply localReply{};
+                StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+                if (status_t status =
+                        sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply);
+                        status != OK) {
+                    return status;
+                }
+                if (innerReply->state != StreamDescriptor::State::ACTIVE) {
+                    ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+                            __func__, toString(innerReply->state).c_str());
+                    return INVALID_OPERATION;
+                }
+                return OK;
+            }
+        }
+        return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+    }
+}
+
+status_t StreamHalAidl::drain(bool earlyNotify, StreamDescriptor::Reply* reply) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return sendCommand(makeHalCommand<HalCommand::Tag::drain>(
+                    mIsInput ? StreamDescriptor::DrainMode::DRAIN_UNSPECIFIED :
+                    earlyNotify ? StreamDescriptor::DrainMode::DRAIN_EARLY_NOTIFY :
+                    StreamDescriptor::DrainMode::DRAIN_ALL), reply,
+                    true /*safeFromNonWorkerThread*/);
+}
+
+status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return sendCommand(makeHalCommand<HalCommand::Tag::flush>(), reply,
+            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+}
+
+status_t StreamHalAidl::exit() {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return statusTFromBinderStatus(mStream->prepareToClose());
+}
+
+status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
+                                         struct audio_mmap_buffer_info *info) {
+    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    const MmapBufferDescriptor& bufferDescriptor = mContext.getMmapBufferDescriptor();
+    info->shared_memory_fd = bufferDescriptor.sharedMemory.fd.get();
+    info->buffer_size_frames = mContext.getBufferSizeFrames();
+    info->burst_size_frames = bufferDescriptor.burstSizeFrames;
+    info->flags = static_cast<audio_mmap_buffer_flag>(bufferDescriptor.flags);
+
+    return OK;
+}
+
+status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    int64_t aidlPosition = 0, aidlTimestamp = 0;
+    if (status_t status = getHardwarePosition(&aidlPosition, &aidlTimestamp); status != OK) {
+        return status;
+    }
+
+    position->time_nanoseconds = aidlTimestamp;
+    position->position_frames = static_cast<int32_t>(aidlPosition);
+    return OK;
+}
+
+status_t StreamHalAidl::setHalThreadPriority(int priority __unused) {
+    // Obsolete, must be done by the HAL module.
+    return OK;
+}
+
+status_t StreamHalAidl::legacyCreateAudioPatch(const struct audio_port_config& port __unused,
+                                               std::optional<audio_source_t> source __unused,
+                                               audio_devices_t type __unused) {
+    // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'.
+    return INVALID_OPERATION;
+}
+
+status_t StreamHalAidl::legacyReleaseAudioPatch() {
+    // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'.
+    return INVALID_OPERATION;
+}
+
+status_t StreamHalAidl::sendCommand(
+        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+        bool safeFromNonWorkerThread) {
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
+    if (!safeFromNonWorkerThread) {
+        const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
+        LOG_ALWAYS_FATAL_IF(workerTid != gettid(),
+                "%s %s: must be invoked from the worker thread (%d)",
+                __func__, command.toString().c_str(), workerTid);
+    }
+    if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
+        ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+        return NOT_ENOUGH_DATA;
+    }
+    StreamDescriptor::Reply localReply{};
+    if (reply == nullptr) {
+        reply = &localReply;
+    }
+    if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
+        ALOGE("%s: failed to read from reply MQ, command %s", __func__, command.toString().c_str());
+        return NOT_ENOUGH_DATA;
+    }
+    {
+        std::lock_guard l(mLock);
+        mLastReply = *reply;
+    }
+    switch (reply->status) {
+        case STATUS_OK: return OK;
+        case STATUS_BAD_VALUE: return BAD_VALUE;
+        case STATUS_INVALID_OPERATION: return INVALID_OPERATION;
+        case STATUS_NOT_ENOUGH_DATA: return NOT_ENOUGH_DATA;
+        default:
+            ALOGE("%s: unexpected status %d returned for command %s",
+                    __func__, reply->status, command.toString().c_str());
+            return INVALID_OPERATION;
+    }
+}
+
+status_t StreamHalAidl::updateCountersIfNeeded(
+        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
+    if (mWorkerTid.load(std::memory_order_acquire) == gettid()) {
+        if (const auto state = getState(); state != StreamDescriptor::State::ACTIVE &&
+                state != StreamDescriptor::State::DRAINING &&
+                state != StreamDescriptor::State::TRANSFERRING) {
+            return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), reply);
+        }
+    }
+    if (reply != nullptr) {
+        std::lock_guard l(mLock);
+        *reply = mLastReply;
+    }
+    return OK;
+}
+
+// static
+ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
+StreamOutHalAidl::legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy) {
+    ::aidl::android::hardware::audio::common::SourceMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            ::aidl::android::convertContainer<std::vector<PlaybackTrackMetadata>>(
+                    legacy.tracks,
+                    ::aidl::android::legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+    return aidl;
+}
+
+StreamOutHalAidl::StreamOutHalAidl(
+        const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
+        const std::shared_ptr<IStreamOut>& stream, const sp<CallbackBroker>& callbackBroker)
+        : StreamHalAidl("StreamOutHalAidl", false /*isInput*/, config, nominalLatency,
+                std::move(context), getStreamCommon(stream)),
+          mStream(stream), mCallbackBroker(callbackBroker) {
+    // Initialize the offload metadata
+    mOffloadMetadata.sampleRate = static_cast<int32_t>(config.sample_rate);
+    mOffloadMetadata.channelMask = VALUE_OR_FATAL(
+            ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                    config.channel_mask, false));
+    mOffloadMetadata.averageBitRatePerSecond = static_cast<int32_t>(config.offload_info.bit_rate);
+}
+
+StreamOutHalAidl::~StreamOutHalAidl() {
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        broker->clearCallbacks(this);
+    }
+}
+
+status_t StreamOutHalAidl::setParameters(const String8& kvPairs) {
+    if (!mStream) return NO_INIT;
+
+    AudioParameter parameters(kvPairs);
+    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+
+    if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
+        ALOGW("%s: filtering or updating offload metadata failed: %d", __func__, status);
+    }
+
+    return StreamHalAidl::setParameters(parameters.toString());
+}
+
+status_t StreamOutHalAidl::getLatency(uint32_t *latency) {
+    return StreamHalAidl::getLatency(latency);
+}
+
+status_t StreamOutHalAidl::setVolume(float left, float right) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return statusTFromBinderStatus(mStream->setHwVolume({left, right}));
+}
+
+status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return statusTFromBinderStatus(mStream->selectPresentation(presentationId, programId));
+}
+
+status_t StreamOutHalAidl::write(const void *buffer, size_t bytes, size_t *written) {
+    if (buffer == nullptr || written == nullptr) {
+        return BAD_VALUE;
+    }
+    // For the output scenario, 'transfer' does not modify the buffer.
+    return transfer(const_cast<void*>(buffer), bytes, written);
+}
+
+status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+    if (dspFrames == nullptr) {
+        return BAD_VALUE;
+    }
+    int64_t aidlFrames = 0, aidlTimestamp = 0;
+    if (status_t status = getObservablePosition(&aidlFrames, &aidlTimestamp); status != OK) {
+        return OK;
+    }
+    *dspFrames = std::clamp<int64_t>(aidlFrames, 0, UINT32_MAX);
+    return OK;
+}
+
+status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
+    // Obsolete, use getPresentationPosition.
+    return INVALID_OPERATION;
+}
+
+status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (!mContext.isAsynchronous()) {
+        ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
+        return INVALID_OPERATION;
+    }
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        if (auto cb = callback.promote(); cb != nullptr) {
+            broker->setStreamOutCallback(this, cb);
+        } else {
+            // It is expected that the framework never passes a null pointer.
+            // In the AIDL model callbacks can't be "unregistered".
+            LOG_ALWAYS_FATAL("%s: received an expired or null callback pointer", __func__);
+        }
+    }
+    return OK;
+}
+
+status_t StreamOutHalAidl::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
+    if (supportsPause == nullptr || supportsResume == nullptr) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    *supportsPause = *supportsResume = true;
+    return OK;
+}
+
+status_t StreamOutHalAidl::pause() {
+    return StreamHalAidl::pause();
+}
+
+status_t StreamOutHalAidl::resume() {
+    return StreamHalAidl::resume();
+}
+
+status_t StreamOutHalAidl::supportsDrain(bool *supportsDrain) {
+    if (supportsDrain == nullptr) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    *supportsDrain = true;
+    return OK;
+}
+
+status_t StreamOutHalAidl::drain(bool earlyNotify) {
+    return StreamHalAidl::drain(earlyNotify);
+}
+
+status_t StreamOutHalAidl::flush() {
+    return StreamHalAidl::flush();
+}
+
+status_t StreamOutHalAidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
+    if (frames == nullptr || timestamp == nullptr) {
+        return BAD_VALUE;
+    }
+    int64_t aidlFrames = 0, aidlTimestamp = 0;
+    if (status_t status = getObservablePosition(&aidlFrames, &aidlTimestamp); status != OK) {
+        return status;
+    }
+    *frames = std::max<int64_t>(0, aidlFrames);
+    timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
+    timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
+    return OK;
+}
+
+status_t StreamOutHalAidl::updateSourceMetadata(
+        const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ::aidl::android::hardware::audio::common::SourceMetadata aidlMetadata =
+              VALUE_OR_RETURN_STATUS(legacy2aidl_SourceMetadata(sourceMetadata));
+    return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
+}
+
+status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (mode == nullptr) {
+        return BAD_VALUE;
+    }
+    ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getDualMonoMode(&aidlMode)));
+    *mode = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(aidlMode));
+    return OK;
+}
+
+status_t StreamOutHalAidl::setDualMonoMode(audio_dual_mono_mode_t mode) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(mode));
+    return statusTFromBinderStatus(mStream->setDualMonoMode(aidlMode));
+}
+
+status_t StreamOutHalAidl::getAudioDescriptionMixLevel(float* leveldB) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (leveldB == nullptr) {
+        return BAD_VALUE;
+    }
+    return statusTFromBinderStatus(mStream->getAudioDescriptionMixLevel(leveldB));
+}
+
+status_t StreamOutHalAidl::setAudioDescriptionMixLevel(float leveldB) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return statusTFromBinderStatus(mStream->setAudioDescriptionMixLevel(leveldB));
+}
+
+status_t StreamOutHalAidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (playbackRate == nullptr) {
+        return BAD_VALUE;
+    }
+    ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getPlaybackRateParameters(&aidlRate)));
+    *playbackRate = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(aidlRate));
+    return OK;
+}
+
+status_t StreamOutHalAidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate));
+    return statusTFromBinderStatus(mStream->setPlaybackRateParameters(aidlRate));
+}
+
+status_t StreamOutHalAidl::setEventCallback(
+        const sp<StreamOutHalInterfaceEventCallback>& callback) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        broker->setStreamOutEventCallback(this, callback);
+    }
+    return OK;
+}
+
+status_t StreamOutHalAidl::setLatencyMode(audio_latency_mode_t mode) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ::aidl::android::media::audio::common::AudioLatencyMode aidlMode = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode));
+    return statusTFromBinderStatus(mStream->setLatencyMode(aidlMode));
+};
+
+status_t StreamOutHalAidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (modes == nullptr) {
+        return BAD_VALUE;
+    }
+    std::vector<::aidl::android::media::audio::common::AudioLatencyMode> aidlModes;
+    RETURN_STATUS_IF_ERROR(
+            statusTFromBinderStatus(mStream->getRecommendedLatencyModes(&aidlModes)));
+    *modes = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::convertContainer<std::vector<audio_latency_mode_t>>(
+                    aidlModes,
+                    ::aidl::android::aidl2legacy_AudioLatencyMode_audio_latency_mode_t));
+    return OK;
+};
+
+status_t StreamOutHalAidl::setLatencyModeCallback(
+        const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
+        broker->setStreamOutLatencyModeCallback(this, callback);
+    }
+    return OK;
+};
+
+status_t StreamOutHalAidl::exit() {
+    return StreamHalAidl::exit();
+}
+
+status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter &parameters) {
+    TIME_CHECK();
+    bool updateMetadata = false;
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate),
+                [&](int value) {
+                    return value > 0 ?
+                            mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecSampleRate),
+                [&](int value) {
+                    return value > 0 ? mOffloadMetadata.sampleRate = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecChannels),
+                [&](int value) -> status_t {
+                    if (value > 0) {
+                        audio_channel_mask_t channel_mask = audio_channel_out_mask_from_count(
+                                static_cast<uint32_t>(value));
+                        if (channel_mask == AUDIO_CHANNEL_INVALID) return BAD_VALUE;
+                        mOffloadMetadata.channelMask = VALUE_OR_RETURN_STATUS(
+                                ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                                        channel_mask, false /*isInput*/));
+                    }
+                    return BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecDelaySamples),
+                [&](int value) {
+                    // The legacy keys are misnamed, the value is in frames.
+                    return value > 0 ? mOffloadMetadata.delayFrames = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+                parameters, String8(AudioParameter::keyOffloadCodecPaddingSamples),
+                [&](int value) {
+                    // The legacy keys are misnamed, the value is in frames.
+                    return value > 0 ? mOffloadMetadata.paddingFrames = value, OK : BAD_VALUE;
+                }))) {
+        updateMetadata = true;
+    }
+    if (updateMetadata) {
+        ALOGD("%s set offload metadata %s", __func__, mOffloadMetadata.toString().c_str());
+        if (status_t status = statusTFromBinderStatus(
+                        mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) {
+            ALOGE("%s: updateOffloadMetadata failed %d", __func__, status);
+            return status;
+        }
+    }
+    return OK;
+}
+
+// static
+ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
+StreamInHalAidl::legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy) {
+    ::aidl::android::hardware::audio::common::SinkMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            ::aidl::android::convertContainer<std::vector<RecordTrackMetadata>>(
+                    legacy.tracks,
+                    ::aidl::android::legacy2aidl_record_track_metadata_v7_RecordTrackMetadata));
+    return aidl;
+}
+
+StreamInHalAidl::StreamInHalAidl(
+        const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
+        const std::shared_ptr<IStreamIn>& stream, const sp<MicrophoneInfoProvider>& micInfoProvider)
+        : StreamHalAidl("StreamInHalAidl", true /*isInput*/, config, nominalLatency,
+                std::move(context), getStreamCommon(stream)),
+          mStream(stream), mMicInfoProvider(micInfoProvider) {}
+
+status_t StreamInHalAidl::setGain(float gain) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return statusTFromBinderStatus(mStream->setHwGain({gain}));
+}
+
+status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) {
+    if (buffer == nullptr || read == nullptr) {
+        return BAD_VALUE;
+    }
+    return transfer(buffer, bytes, read);
+}
+
+status_t StreamInHalAidl::getInputFramesLost(uint32_t *framesLost) {
+    if (framesLost == nullptr) {
+        return BAD_VALUE;
+    }
+    int32_t aidlXruns = 0;
+    if (status_t status = getXruns(&aidlXruns); status != OK) {
+        return status;
+    }
+    *framesLost = std::max<int32_t>(0, aidlXruns);
+    return OK;
+}
+
+status_t StreamInHalAidl::getCapturePosition(int64_t *frames, int64_t *time) {
+    if (frames == nullptr || time == nullptr) {
+        return BAD_VALUE;
+    }
+    return getObservablePosition(frames, time);
+}
+
+status_t StreamInHalAidl::getActiveMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) {
+    if (!microphones) {
+        return BAD_VALUE;
+    }
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    sp<MicrophoneInfoProvider> micInfoProvider = mMicInfoProvider.promote();
+    if (!micInfoProvider) return NO_INIT;
+    auto staticInfo = micInfoProvider->getMicrophoneInfo();
+    if (!staticInfo) return INVALID_OPERATION;
+    std::vector<MicrophoneDynamicInfo> dynamicInfo;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getActiveMicrophones(&dynamicInfo)));
+    std::vector<media::MicrophoneInfoFw> result;
+    result.reserve(dynamicInfo.size());
+    for (const auto& d : dynamicInfo) {
+        const auto staticInfoIt = std::find_if(staticInfo->begin(), staticInfo->end(),
+                [&](const auto& s) { return s.id == d.id; });
+        if (staticInfoIt != staticInfo->end()) {
+            // Convert into the c++ backend type from the ndk backend type via the legacy structure.
+            audio_microphone_characteristic_t legacy = VALUE_OR_RETURN_STATUS(
+                    ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+                            *staticInfoIt, d));
+            media::MicrophoneInfoFw info = VALUE_OR_RETURN_STATUS(
+                    ::android::legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(
+                            legacy));
+            // Note: info.portId is not filled because it's a bit of framework info.
+            result.push_back(std::move(info));
+        } else {
+            ALOGE("%s: no static info for active microphone with id '%s'", __func__, d.id.c_str());
+        }
+    }
+    *microphones = std::move(result);
+    return OK;
+}
+
+status_t StreamInHalAidl::updateSinkMetadata(
+        const StreamInHalInterface::SinkMetadata& sinkMetadata) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ::aidl::android::hardware::audio::common::SinkMetadata aidlMetadata =
+              VALUE_OR_RETURN_STATUS(legacy2aidl_SinkMetadata(sinkMetadata));
+    return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
+}
+
+status_t StreamInHalAidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    ::aidl::android::hardware::audio::core::IStreamIn::MicrophoneDirection aidlDirection =
+              VALUE_OR_RETURN_STATUS(
+                      ::aidl::android::legacy2aidl_audio_microphone_direction_t_MicrophoneDirection(
+                              direction));
+    return statusTFromBinderStatus(mStream->setMicrophoneDirection(aidlDirection));
+}
+
+status_t StreamInHalAidl::setPreferredMicrophoneFieldDimension(float zoom) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    return statusTFromBinderStatus(mStream->setMicrophoneFieldDimension(zoom));
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
new file mode 100644
index 0000000..75a1dd9
--- /dev/null
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -0,0 +1,409 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <memory>
+#include <mutex>
+#include <string_view>
+
+#include <aidl/android/hardware/audio/common/AudioOffloadMetadata.h>
+#include <aidl/android/hardware/audio/core/BpStreamCommon.h>
+#include <aidl/android/hardware/audio/core/BpStreamIn.h>
+#include <aidl/android/hardware/audio/core/BpStreamOut.h>
+#include <aidl/android/hardware/audio/core/MmapBufferDescriptor.h>
+#include <fmq/AidlMessageQueue.h>
+#include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
+#include <media/AudioParameter.h>
+
+#include "ConversionHelperAidl.h"
+#include "StreamPowerLog.h"
+
+using ::aidl::android::hardware::audio::common::AudioOffloadMetadata;
+using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
+
+namespace android {
+
+class StreamContextAidl {
+  public:
+    typedef AidlMessageQueue<::aidl::android::hardware::audio::core::StreamDescriptor::Command,
+          ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> CommandMQ;
+    typedef AidlMessageQueue<::aidl::android::hardware::audio::core::StreamDescriptor::Reply,
+            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> ReplyMQ;
+    typedef AidlMessageQueue<int8_t,
+            ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> DataMQ;
+
+    explicit StreamContextAidl(
+            ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+            bool isAsynchronous)
+        : mFrameSizeBytes(descriptor.frameSizeBytes),
+          mCommandMQ(new CommandMQ(descriptor.command)),
+          mReplyMQ(new ReplyMQ(descriptor.reply)),
+          mBufferSizeFrames(descriptor.bufferSizeFrames),
+          mDataMQ(maybeCreateDataMQ(descriptor)),
+          mIsAsynchronous(isAsynchronous),
+          mIsMmapped(isMmapped(descriptor)),
+          mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)) {}
+    StreamContextAidl(StreamContextAidl&& other) :
+            mFrameSizeBytes(other.mFrameSizeBytes),
+            mCommandMQ(std::move(other.mCommandMQ)),
+            mReplyMQ(std::move(other.mReplyMQ)),
+            mBufferSizeFrames(other.mBufferSizeFrames),
+            mDataMQ(std::move(other.mDataMQ)),
+            mIsAsynchronous(other.mIsAsynchronous),
+            mIsMmapped(other.mIsMmapped),
+            mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)) {}
+    StreamContextAidl& operator=(StreamContextAidl&& other) {
+        mFrameSizeBytes = other.mFrameSizeBytes;
+        mCommandMQ = std::move(other.mCommandMQ);
+        mReplyMQ = std::move(other.mReplyMQ);
+        mBufferSizeFrames = other.mBufferSizeFrames;
+        mDataMQ = std::move(other.mDataMQ);
+        mIsAsynchronous = other.mIsAsynchronous;
+        mIsMmapped = other.mIsMmapped;
+        mMmapBufferDescriptor = std::move(other.mMmapBufferDescriptor);
+        return *this;
+    }
+    bool isValid() const {
+        return mFrameSizeBytes != 0 &&
+                mCommandMQ != nullptr && mCommandMQ->isValid() &&
+                mReplyMQ != nullptr && mReplyMQ->isValid() &&
+                (mDataMQ == nullptr || (
+                        mDataMQ->isValid() &&
+                        mDataMQ->getQuantumCount() * mDataMQ->getQuantumSize() >=
+                        mFrameSizeBytes * mBufferSizeFrames)) &&
+                (!mIsMmapped || mMmapBufferDescriptor.sharedMemory.fd.get() >= 0);
+    }
+    size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
+    size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
+    CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
+    DataMQ* getDataMQ() const { return mDataMQ.get(); }
+    size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
+    ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
+    bool isAsynchronous() const { return mIsAsynchronous; }
+    bool isMmapped() const { return mIsMmapped; }
+    const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
+
+  private:
+    static std::unique_ptr<DataMQ> maybeCreateDataMQ(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+        using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+        if (descriptor.audio.getTag() == Tag::fmq) {
+            return std::make_unique<DataMQ>(descriptor.audio.get<Tag::fmq>());
+        }
+        return nullptr;
+    }
+    static bool isMmapped(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+        using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+        return descriptor.audio.getTag() == Tag::mmap;
+    }
+    static MmapBufferDescriptor maybeGetMmapBuffer(
+            ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+        using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+        if (descriptor.audio.getTag() == Tag::mmap) {
+            return std::move(descriptor.audio.get<Tag::mmap>());
+        }
+        return {};
+    }
+
+    size_t mFrameSizeBytes;
+    std::unique_ptr<CommandMQ> mCommandMQ;
+    std::unique_ptr<ReplyMQ> mReplyMQ;
+    size_t mBufferSizeFrames;
+    std::unique_ptr<DataMQ> mDataMQ;
+    bool mIsAsynchronous;
+    bool mIsMmapped;
+    MmapBufferDescriptor mMmapBufferDescriptor;
+};
+
+class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
+  public:
+    // Return size of input/output buffer in bytes for this stream - eg. 4800.
+    status_t getBufferSize(size_t *size) override;
+
+    // Return the base configuration of the stream:
+    //   - channel mask;
+    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+    //   - sampling rate in Hz - eg. 44100.
+    status_t getAudioProperties(audio_config_base_t *configBase) override;
+
+    // Set audio stream parameters.
+    status_t setParameters(const String8& kvPairs) override;
+
+    // Get audio stream parameters.
+    status_t getParameters(const String8& keys, String8 *values) override;
+
+    // Return the frame size (number of bytes per sample) of a stream.
+    status_t getFrameSize(size_t *size) override;
+
+    // Add or remove the effect on the stream.
+    status_t addEffect(sp<EffectHalInterface> effect) override;
+    status_t removeEffect(sp<EffectHalInterface> effect) override;
+
+    // Put the audio hardware input/output into standby mode.
+    status_t standby() override;
+
+    status_t dump(int fd, const Vector<String16>& args) override;
+
+    // Start a stream operating in mmap mode.
+    status_t start() override;
+
+    // Stop a stream operating in mmap mode.
+    status_t stop() override;
+
+    // Retrieve information on the data buffer in mmap mode.
+    status_t createMmapBuffer(int32_t minSizeFrames,
+            struct audio_mmap_buffer_info *info) override;
+
+    // Get current read/write position in the mmap buffer
+    status_t getMmapPosition(struct audio_mmap_position *position) override;
+
+    // Set the priority of the thread that interacts with the HAL
+    // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
+    status_t setHalThreadPriority(int priority) override;
+
+    status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+            std::optional<audio_source_t> source,
+            audio_devices_t type) override;
+
+    status_t legacyReleaseAudioPatch() override;
+
+  protected:
+    template<class T>
+    static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
+            const std::shared_ptr<T>& stream);
+
+    // Subclasses can not be constructed directly by clients.
+    StreamHalAidl(std::string_view className,
+            bool isInput,
+            const audio_config& config,
+            int32_t nominalLatency,
+            StreamContextAidl&& context,
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>& stream);
+
+    ~StreamHalAidl() override;
+
+    status_t getLatency(uint32_t *latency);
+
+    status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+
+    status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
+
+    status_t getXruns(int32_t *frames);
+
+    status_t transfer(void *buffer, size_t bytes, size_t *transferred);
+
+    status_t pause(
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+
+    status_t resume(
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+
+    status_t drain(bool earlyNotify,
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+
+    status_t flush(
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+
+    status_t exit();
+
+    const bool mIsInput;
+    const audio_config_base_t mConfig;
+    const StreamContextAidl mContext;
+
+  private:
+    static audio_config_base_t configToBase(const audio_config& config) {
+        audio_config_base_t result = AUDIO_CONFIG_BASE_INITIALIZER;
+        result.sample_rate = config.sample_rate;
+        result.channel_mask = config.channel_mask;
+        result.format = config.format;
+        return result;
+    }
+    ::aidl::android::hardware::audio::core::StreamDescriptor::State getState() {
+        std::lock_guard l(mLock);
+        return mLastReply.state;
+    }
+    status_t sendCommand(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+            bool safeFromNonWorkerThread = false);
+    status_t updateCountersIfNeeded(
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
+    std::mutex mLock;
+    ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
+    // mStreamPowerLog is used for audio signal power logging.
+    StreamPowerLog mStreamPowerLog;
+    std::atomic<pid_t> mWorkerTid = -1;
+};
+
+class CallbackBroker;
+
+class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
+  public:
+    // Extract the output stream parameters and set by AIDL APIs.
+    status_t setParameters(const String8& kvPairs) override;
+
+    // Return the audio hardware driver estimated latency in milliseconds.
+    status_t getLatency(uint32_t *latency) override;
+
+    // Use this method in situations where audio mixing is done in the hardware.
+    status_t setVolume(float left, float right) override;
+
+    // Selects the audio presentation (if available).
+    status_t selectPresentation(int presentationId, int programId) override;
+
+    // Write audio buffer to driver.
+    status_t write(const void *buffer, size_t bytes, size_t *written) override;
+
+    // Return the number of audio frames written by the audio dsp to DAC since
+    // the output has exited standby.
+    status_t getRenderPosition(uint32_t *dspFrames) override;
+
+    // Get the local time at which the next write to the audio driver will be presented.
+    status_t getNextWriteTimestamp(int64_t *timestamp) override;
+
+    // Set the callback for notifying completion of non-blocking write and drain.
+    status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
+
+    // Returns whether pause and resume operations are supported.
+    status_t supportsPauseAndResume(bool *supportsPause, bool *supportsResume) override;
+
+    // Notifies to the audio driver to resume playback following a pause.
+    status_t pause() override;
+
+    // Notifies to the audio driver to resume playback following a pause.
+    status_t resume() override;
+
+    // Returns whether drain operation is supported.
+    status_t supportsDrain(bool *supportsDrain) override;
+
+    // Requests notification when data buffered by the driver/hardware has been played.
+    status_t drain(bool earlyNotify) override;
+
+    // Notifies to the audio driver to flush the queued data.
+    status_t flush() override;
+
+    // Return a recent count of the number of audio frames presented to an external observer.
+    status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
+
+    // Called when the metadata of the stream's source has been changed.
+    status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
+
+    // Returns the Dual Mono mode presentation setting.
+    status_t getDualMonoMode(audio_dual_mono_mode_t* mode) override;
+
+    // Sets the Dual Mono mode presentation on the output device.
+    status_t setDualMonoMode(audio_dual_mono_mode_t mode) override;
+
+    // Returns the Audio Description Mix level in dB.
+    status_t getAudioDescriptionMixLevel(float* leveldB) override;
+
+    // Sets the Audio Description Mix level in dB.
+    status_t setAudioDescriptionMixLevel(float leveldB) override;
+
+    // Retrieves current playback rate parameters.
+    status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) override;
+
+    // Sets the playback rate parameters that control playback behavior.
+    status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) override;
+
+    status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
+
+    status_t setLatencyMode(audio_latency_mode_t mode) override;
+    status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
+    status_t setLatencyModeCallback(
+            const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) override;
+
+    status_t exit() override;
+
+  private:
+    friend class sp<StreamOutHalAidl>;
+
+    static ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
+    legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy);
+
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
+    const wp<CallbackBroker> mCallbackBroker;
+
+    AudioOffloadMetadata mOffloadMetadata;
+
+    // Can not be constructed directly by clients.
+    StreamOutHalAidl(
+            const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut>& stream,
+            const sp<CallbackBroker>& callbackBroker);
+
+    ~StreamOutHalAidl() override;
+
+    // Filter and update the offload metadata. The parameters which are related to the offload
+    // metadata will be removed after filtering.
+    status_t filterAndUpdateOffloadMetadata(AudioParameter &parameters);
+};
+
+class MicrophoneInfoProvider;
+
+class StreamInHalAidl : public StreamInHalInterface, public StreamHalAidl {
+  public:
+    // Set the input gain for the audio driver.
+    status_t setGain(float gain) override;
+
+    // Read audio buffer in from driver.
+    status_t read(void *buffer, size_t bytes, size_t *read) override;
+
+    // Return the amount of input frames lost in the audio driver.
+    status_t getInputFramesLost(uint32_t *framesLost) override;
+
+    // Return a recent count of the number of audio frames received and
+    // the clock time associated with that frame count.
+    status_t getCapturePosition(int64_t *frames, int64_t *time) override;
+
+    // Get active microphones
+    status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) override;
+
+    // Set microphone direction (for processing)
+    status_t setPreferredMicrophoneDirection(
+                            audio_microphone_direction_t direction) override;
+
+    // Set microphone zoom (for processing)
+    status_t setPreferredMicrophoneFieldDimension(float zoom) override;
+
+    // Called when the metadata of the stream's sink has been changed.
+    status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
+
+  private:
+    friend class sp<StreamInHalAidl>;
+
+    static ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
+    legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy);
+
+    const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn> mStream;
+    const wp<MicrophoneInfoProvider> mMicInfoProvider;
+
+    // Can not be constructed directly by clients.
+    StreamInHalAidl(
+            const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
+            const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn>& stream,
+            const sp<MicrophoneInfoProvider>& micInfoProvider);
+
+    ~StreamInHalAidl() override = default;
+};
+
+} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 021ec51..2b0af49 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -24,6 +24,9 @@
 #include <mediautils/SchedulingPolicyService.h>
 #include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
+#if MAJOR_VERSION >= 4
+#include <media/AidlConversion.h>
+#endif
 
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutCallback.h)
 #include <HidlUtils.h>
@@ -46,9 +49,6 @@
 using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
 using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
-#define TIME_CHECK() auto TimeCheck = \
-       mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
-
 StreamHalHidl::StreamHalHidl(std::string_view className, IStream *stream)
         : CoreConversionHelperHidl(className),
           mStream(stream),
@@ -441,7 +441,7 @@
 #endif
 
 status_t StreamOutHalHidl::write(const void *buffer, size_t bytes, size_t *written) {
-    TIME_CHECK();
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     *written = 0;
 
@@ -587,7 +587,7 @@
 }
 
 status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
-    TIME_CHECK();
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getRenderPosition(
@@ -668,7 +668,7 @@
 }
 
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
-    TIME_CHECK();
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     if (mWriterClient == gettid() && mCommandMQ) {
         return callWriterThread(
@@ -979,9 +979,10 @@
 }
 
 status_t StreamOutHalHidl::exit() {
-    // FIXME this is using hard-coded strings but in the future, this functionality will be
-    //       converted to use audio HAL extensions required to support tunneling
-    return setParameters(String8("exiting=1"));
+    // Signal exiting to remote_submix HAL.
+    AudioParameter param;
+    param.addInt(String8(AudioParameter::keyExiting), 1);
+    return setParameters(param.toString());
 }
 
 StreamInHalHidl::StreamInHalHidl(
@@ -1012,7 +1013,7 @@
 }
 
 status_t StreamInHalHidl::read(void *buffer, size_t bytes, size_t *read) {
-    TIME_CHECK();
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     *read = 0;
 
@@ -1146,7 +1147,7 @@
 }
 
 status_t StreamInHalHidl::getCapturePosition(int64_t *frames, int64_t *time) {
-    TIME_CHECK();
+    // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     if (mReaderClient == gettid() && mCommandMQ) {
         ReadParameters params;
@@ -1172,7 +1173,7 @@
 
 #if MAJOR_VERSION == 2
 status_t StreamInHalHidl::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo> *microphones __unused) {
+        std::vector<media::MicrophoneInfoFw> *microphones __unused) {
     if (mStream == 0) return NO_INIT;
     return INVALID_OPERATION;
 }
@@ -1185,7 +1186,7 @@
 
 #elif MAJOR_VERSION >= 4
 status_t StreamInHalHidl::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo> *microphonesInfo) {
+        std::vector<media::MicrophoneInfoFw> *microphonesInfo) {
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     Result retval;
@@ -1193,11 +1194,17 @@
             [&](Result r, hidl_vec<MicrophoneInfo> micArrayHal) {
         retval = r;
         for (size_t k = 0; k < micArrayHal.size(); k++) {
+            // Convert via legacy.
             audio_microphone_characteristic_t dst;
-            // convert
             (void)CoreUtils::microphoneInfoToHal(micArrayHal[k], &dst);
-            media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
-            microphonesInfo->push_back(microphone);
+            auto conv = legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(dst);
+            if (conv.ok()) {
+                microphonesInfo->push_back(conv.value());
+            } else {
+                ALOGW("getActiveMicrophones: could not convert %s to AIDL: %d",
+                        toString(micArrayHal[k]).c_str(), conv.error());
+                microphonesInfo->push_back(media::MicrophoneInfoFw{});
+            }
         }
     });
     return processReturn("getActiveMicrophones", ret, retval);
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 54fbefe..5361047 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -253,7 +253,7 @@
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
 
     // Get active microphones
-    virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+    status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) override;
 
     // Set microphone direction (for processing)
     virtual status_t setPreferredMicrophoneDirection(
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
new file mode 100644
index 0000000..92b77d8
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionAec"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_aec.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionAec.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::AcousticEchoCanceler;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionAec::setParameter(EffectParamReader& param) {
+    uint32_t type, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+        OK != param.readFromParameter(&type) ||
+        OK != param.readFromValue(&value)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+
+    Parameter aidlParam;
+    switch (type) {
+        case AEC_PARAM_ECHO_DELAY:
+            FALLTHROUGH_INTENDED;
+        case AEC_PARAM_PROPERTIES: {
+            aidlParam = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_uint32_echoDelay_Parameter_aec(value));
+            break;
+        }
+        case AEC_PARAM_MOBILE_MODE: {
+            aidlParam = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_uint32_mobileMode_Parameter_aec(value));
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(AcousticEchoCanceler, acousticEchoCanceler, vendor,
+                                                ext);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+            break;
+        }
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAec::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+        OK != param.readFromParameter(&type)) {
+        param.setStatus(BAD_VALUE);
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case AEC_PARAM_ECHO_DELAY:
+            FALLTHROUGH_INTENDED;
+        case AEC_PARAM_PROPERTIES: {
+            int32_t delay = 0;
+            Parameter::Id id =
+                    MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler, acousticEchoCancelerTag,
+                                               AcousticEchoCanceler::echoDelayUs);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            delay = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_aec_uint32_echoDelay(aidlParam));
+            return param.writeToValue(&delay);
+        }
+        case AEC_PARAM_MOBILE_MODE: {
+            int32_t mode = 0;
+            Parameter::Id id =
+                    MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler, acousticEchoCancelerTag,
+                                               AcousticEchoCanceler::mobileMode);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            mode = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_aec_uint32_mobileMode(aidlParam));
+            return param.writeToValue(&mode);
+        }
+        default: {
+            // use vendor extension implementation, the first 32bits (param type) won't pass to HAL
+            VENDOR_EXTENSION_GET_AND_RETURN(AcousticEchoCanceler, acousticEchoCanceler, param);
+        }
+    }
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.h
new file mode 100644
index 0000000..3ee419a
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionAec : public EffectConversionHelperAidl {
+  public:
+    AidlConversionAec(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+                      int32_t sessionId, int32_t ioId,
+                      const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionAec() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp
new file mode 100644
index 0000000..1363ba4
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionAgc1"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_agc.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionAgc1.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::AutomaticGainControlV1;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionAgc1::setParameterLevel(EffectParamReader& param) {
+    int16_t level;
+    RETURN_STATUS_IF_ERROR(param.readFromValue(&level));
+    Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+                                                  targetPeakLevelDbFs, level);
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameterGain(EffectParamReader& param) {
+    int16_t gain;
+    RETURN_STATUS_IF_ERROR(param.readFromValue(&gain));
+    Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+                                                  maxCompressionGainDb, gain);
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameterLimiterEnable(EffectParamReader& param) {
+    bool enable;
+    RETURN_STATUS_IF_ERROR(param.readFromValue(&enable));
+    Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+                                                  enableLimiter, enable);
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    switch (type) {
+        case AGC_PARAM_TARGET_LEVEL: {
+            return setParameterLevel(param);
+        }
+        case AGC_PARAM_COMP_GAIN: {
+            return setParameterGain(param);
+        }
+        case AGC_PARAM_LIMITER_ENA: {
+            return setParameterLimiterEnable(param);
+        }
+        case AGC_PARAM_PROPERTIES: {
+            RETURN_STATUS_IF_ERROR(setParameterLevel(param));
+            RETURN_STATUS_IF_ERROR(setParameterGain(param));
+            RETURN_STATUS_IF_ERROR(setParameterLimiterEnable(param));
+            return OK;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1,
+                                                          automaticGainControlV1, vendor, ext);
+            return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+        }
+    }
+}
+
+status_t AidlConversionAgc1::getParameterLevel(EffectParamWriter& param) {
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+                                                  AutomaticGainControlV1::targetPeakLevelDbFs);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    int32_t level = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+                                         AutomaticGainControlV1::targetPeakLevelDbFs, int32_t));
+    return param.writeToValue(&level);
+}
+
+status_t AidlConversionAgc1::getParameterGain(EffectParamWriter& param) {
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+                                                  AutomaticGainControlV1::maxCompressionGainDb);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    int32_t gain = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+                                         AutomaticGainControlV1::maxCompressionGainDb, int32_t));
+    return param.writeToValue(&gain);
+}
+
+status_t AidlConversionAgc1::getParameterLimiterEnable(EffectParamWriter& param) {
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+                                                  AutomaticGainControlV1::enableLimiter);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    bool enable = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+                                         AutomaticGainControlV1::enableLimiter, bool));
+    return param.writeToValue(&enable);
+}
+
+status_t AidlConversionAgc1::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    switch (type) {
+        case AGC_PARAM_TARGET_LEVEL: {
+            return getParameterLevel(param);
+        }
+        case AGC_PARAM_COMP_GAIN: {
+            return getParameterGain(param);
+        }
+        case AGC_PARAM_LIMITER_ENA: {
+            return getParameterLimiterEnable(param);
+        }
+        case AGC_PARAM_PROPERTIES: {
+            RETURN_STATUS_IF_ERROR(getParameterLevel(param));
+            RETURN_STATUS_IF_ERROR(getParameterGain(param));
+            RETURN_STATUS_IF_ERROR(getParameterLimiterEnable(param));
+            return OK;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(AutomaticGainControlV1, automaticGainControlV1, param);
+        }
+    }
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
new file mode 100644
index 0000000..b0509fd
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionAgc1 : public EffectConversionHelperAidl {
+  public:
+    AidlConversionAgc1(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+                       int32_t sessionId, int32_t ioId,
+                       const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionAgc1() {}
+
+  private:
+    status_t setParameterLevel(utils::EffectParamReader& param);
+    status_t setParameterGain(utils::EffectParamReader& param);
+    status_t setParameterLimiterEnable(utils::EffectParamReader& param);
+    status_t setParameter(utils::EffectParamReader& param) override;
+
+    status_t getParameterLevel(utils::EffectParamWriter& param);
+    status_t getParameterGain(utils::EffectParamWriter& param);
+    status_t getParameterLimiterEnable(utils::EffectParamWriter& param);
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
new file mode 100644
index 0000000..b35a1c6
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionAgc2"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_agc2.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionAgc2.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::AutomaticGainControlV2;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionAgc2::setParameter(EffectParamReader& param) {
+    uint32_t type = 0, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+        OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case AGC2_PARAM_FIXED_DIGITAL_GAIN: {
+            aidlParam = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_uint32_fixedDigitalGain_Parameter_agc(value));
+            break;
+        }
+        case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR: {
+            aidlParam = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_uint32_levelEstimator_Parameter_agc(value));
+            break;
+        }
+        case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN: {
+            aidlParam = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_uint32_saturationMargin_Parameter_agc(value));
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV2, automaticGainControlV2,
+                                                vendor, ext);
+            break;
+        }
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc2::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+        OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case AGC2_PARAM_FIXED_DIGITAL_GAIN: {
+            Parameter::Id id =
+                    MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV2, automaticGainControlV2Tag,
+                                               AutomaticGainControlV2::fixedDigitalGainMb);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_agc_uint32_fixedDigitalGain(aidlParam));
+            break;
+        }
+        case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR: {
+            Parameter::Id id =
+                    MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV2, automaticGainControlV2Tag,
+                                               AutomaticGainControlV2::levelEstimator);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_agc_uint32_levelEstimator(aidlParam));
+            break;
+        }
+        case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN: {
+            Parameter::Id id =
+                    MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV2, automaticGainControlV2Tag,
+                                               AutomaticGainControlV2::saturationMarginMb);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_agc_uint32_saturationMargin(aidlParam));
+            break;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(AutomaticGainControlV2, automaticGainControlV2, param);
+        }
+    }
+
+    return param.writeToValue(&value);
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.h
new file mode 100644
index 0000000..8f7eac7
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionAgc2 : public EffectConversionHelperAidl {
+  public:
+    AidlConversionAgc2(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+                       int32_t sessionId, int32_t ioId,
+                       const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionAgc2() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
new file mode 100644
index 0000000..7c6a5a2
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionBassBoost"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/aidl_effects_utils.h>
+#include <system/audio_effects/effect_bassboost.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionBassBoost.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::BassBoost;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionBassBoost::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    uint16_t value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case BASSBOOST_PARAM_STRENGTH: {
+            aidlParam = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_uint16_strengthPm_Parameter_BassBoost(value));
+            break;
+        }
+        case BASSBOOST_PARAM_STRENGTH_SUPPORTED: {
+            ALOGW("%s set BASSBOOST_PARAM_STRENGTH_SUPPORTED not supported", __func__);
+            return BAD_VALUE;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(BassBoost, bassBoost, vendor, ext);
+            break;
+        }
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionBassBoost::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case BASSBOOST_PARAM_STRENGTH: {
+            uint16_t value;
+            Parameter::Id id =
+                    MAKE_SPECIFIC_PARAMETER_ID(BassBoost, bassBoostTag, BassBoost::strengthPm);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_BassBoost_uint16_strengthPm(aidlParam));
+            return param.writeToValue(&value);
+        }
+        case BASSBOOST_PARAM_STRENGTH_SUPPORTED: {
+            // an invalid range indicates not setting support for this parameter
+            uint32_t value =
+                    ::aidl::android::hardware::audio::effect::isRangeValid<Range::Tag::bassBoost>(
+                            BassBoost::strengthPm, mDesc.capability);
+            return param.writeToValue(&value);
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(BassBoost, bassBoost, param);
+        }
+    }
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.h
new file mode 100644
index 0000000..9664aa1
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionBassBoost : public EffectConversionHelperAidl {
+  public:
+    AidlConversionBassBoost(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionBassBoost() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
new file mode 100644
index 0000000..b57971c
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionDownmix"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_downmix.h>
+
+#include <system/audio_effect.h>
+#include <utils/Log.h>
+
+#include "AidlConversionDownmix.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Downmix;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionDownmix::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    int16_t value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int16_t)) ||
+        OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case DOWNMIX_PARAM_TYPE: {
+            aidlParam = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_int16_type_Parameter_Downmix(value));
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Downmix, downmix, vendor, ext);
+        }
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionDownmix::getParameter(EffectParamWriter& param) {
+    int16_t value = 0;
+    uint32_t type = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type)) {
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case DOWNMIX_PARAM_TYPE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Downmix, downmixTag, Downmix::type);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_Downmix_int16_type(aidlParam));
+            break;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(Downmix, downmix, param);
+        }
+    }
+
+    return param.writeToValue(&value);
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.h
new file mode 100644
index 0000000..8b28ca3
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BpEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionDownmix : public EffectConversionHelperAidl {
+  public:
+    AidlConversionDownmix(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+                          int32_t sessionId, int32_t ioId,
+                          const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionDownmix() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
new file mode 100644
index 0000000..fe845ab
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
@@ -0,0 +1,467 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionDp"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effect.h>
+#include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <Utils.h>
+#include <utils/Log.h>
+
+#include "AidlConversionDynamicsProcessing.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Capability;
+using ::aidl::android::hardware::audio::effect::DynamicsProcessing;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::toString;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionDp::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&type));
+    Parameter aidlParam;
+    switch (type) {
+        case DP_PARAM_INPUT_GAIN: {
+            DynamicsProcessing::InputGain inputGainAidl;
+            RETURN_STATUS_IF_ERROR(param.readFromParameter(&inputGainAidl.channel));
+            RETURN_STATUS_IF_ERROR(param.readFromValue(&inputGainAidl.gainDb));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, inputGain,
+                                                {inputGainAidl});
+            break;
+        }
+        case DP_PARAM_ENGINE_ARCHITECTURE: {
+            DynamicsProcessing::EngineArchitecture engine =
+                    VALUE_OR_RETURN_STATUS(readEngineArchitectureFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing,
+                                                engineArchitecture, engine);
+            mEngine = engine;
+            break;
+        }
+        case DP_PARAM_PRE_EQ: {
+            DynamicsProcessing::ChannelConfig chConfig =
+                    VALUE_OR_RETURN_STATUS(readChannelConfigFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, preEq,
+                                                {chConfig});
+            break;
+        }
+        case DP_PARAM_POST_EQ: {
+            DynamicsProcessing::ChannelConfig chConfig =
+                    VALUE_OR_RETURN_STATUS(readChannelConfigFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, postEq,
+                                                {chConfig});
+            break;
+        }
+        case DP_PARAM_MBC: {
+            DynamicsProcessing::ChannelConfig chConfig =
+                    VALUE_OR_RETURN_STATUS(readChannelConfigFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, mbc,
+                                                {chConfig});
+            break;
+        }
+        case DP_PARAM_PRE_EQ_BAND: {
+            DynamicsProcessing::EqBandConfig bandConfig =
+                    VALUE_OR_RETURN_STATUS(readEqBandConfigFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, preEqBand,
+                                                {bandConfig});
+            break;
+        }
+        case DP_PARAM_POST_EQ_BAND: {
+            DynamicsProcessing::EqBandConfig bandConfig =
+                    VALUE_OR_RETURN_STATUS(readEqBandConfigFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, postEqBand,
+                                                {bandConfig});
+            break;
+        }
+        case DP_PARAM_MBC_BAND: {
+            DynamicsProcessing::MbcBandConfig bandConfig =
+                    VALUE_OR_RETURN_STATUS(readMbcBandConfigFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, mbcBand,
+                                                {bandConfig});
+            break;
+        }
+        case DP_PARAM_LIMITER: {
+            DynamicsProcessing::LimiterConfig config =
+                    VALUE_OR_RETURN_STATUS(readLimiterConfigFromParam(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, limiter,
+                                                {config});
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam =
+                    MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, vendor, ext);
+            break;
+        }
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionDp::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&type));
+    Parameter aidlParam;
+    switch (type) {
+        case DP_PARAM_INPUT_GAIN: {
+            int32_t channel;
+            RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
+                                                          DynamicsProcessing::inputGain);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+
+            std::vector<DynamicsProcessing::InputGain> gains =
+                    VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                            aidlParam, DynamicsProcessing, dynamicsProcessing,
+                            DynamicsProcessing::inputGain,
+                            std::vector<DynamicsProcessing::InputGain>));
+            for (const auto& gain : gains) {
+                if (gain.channel == channel) {
+                    return param.writeToValue(&gain.gainDb);
+                }
+            }
+            ALOGE("%s not able to find channel %d", __func__, channel);
+            return BAD_VALUE;
+        }
+        case DP_PARAM_ENGINE_ARCHITECTURE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
+                                                          DynamicsProcessing::engineArchitecture);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+
+            DynamicsProcessing::EngineArchitecture engine =
+                    VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                            aidlParam, DynamicsProcessing, dynamicsProcessing,
+                            DynamicsProcessing::engineArchitecture,
+                            DynamicsProcessing::EngineArchitecture));
+            int32_t resolution = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_DynamicsProcessing_ResolutionPreference_int32(
+                            engine.resolutionPreference));
+            int32_t preEqInUse =
+                    VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.preEqStage.inUse));
+            int32_t mbcInUse =
+                    VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.mbcStage.inUse));
+            int32_t postEqInUse =
+                    VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.postEqStage.inUse));
+            int32_t limiterInUse =
+                    VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.limiterInUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&resolution));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.preferredProcessingDurationMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&preEqInUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.preEqStage.bandCount));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&mbcInUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.mbcStage.bandCount));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&postEqInUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.postEqStage.bandCount));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&limiterInUse));
+            mEngine = engine;
+            return OK;
+        }
+        case DP_PARAM_PRE_EQ: {
+            return getChannelConfig(DynamicsProcessing::preEq, param);
+        }
+        case DP_PARAM_POST_EQ: {
+            return getChannelConfig(DynamicsProcessing::postEq, param);
+        }
+        case DP_PARAM_MBC: {
+            return getChannelConfig(DynamicsProcessing::mbc, param);
+        }
+        case DP_PARAM_PRE_EQ_BAND: {
+            return getEqBandConfig(DynamicsProcessing::preEqBand, param);
+        }
+        case DP_PARAM_POST_EQ_BAND: {
+            return getEqBandConfig(DynamicsProcessing::postEqBand, param);
+        }
+        case DP_PARAM_MBC_BAND: {
+            return getMbcBandConfig(param);
+        }
+        case DP_PARAM_LIMITER: {
+            return getLimiterConfig(param);
+        }
+        case DP_PARAM_GET_CHANNEL_COUNT: {
+            uint32_t channel = ::aidl::android::hardware::audio::common::getChannelCount(
+                    mCommon.input.base.channelMask);
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&channel));
+            return OK;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(DynamicsProcessing, dynamicsProcessing, param);
+        }
+    }
+}
+
+ConversionResult<DynamicsProcessing::ChannelConfig>
+AidlConversionDp::readChannelConfigFromParam(EffectParamReader& param) {
+    int32_t enable, channel;
+    RETURN_IF_ERROR(param.readFromParameter(&channel));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+
+    return DynamicsProcessing::ChannelConfig(
+            {.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable)), .channel = channel});
+}
+
+ConversionResult<DynamicsProcessing::EqBandConfig>
+AidlConversionDp::readEqBandConfigFromParam(EffectParamReader& param) {
+    DynamicsProcessing::EqBandConfig config;
+    int32_t enable;
+    RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+    RETURN_IF_ERROR(param.readFromParameter(&config.band));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+    RETURN_IF_ERROR(param.readFromValue(&config.cutoffFrequencyHz));
+    RETURN_IF_ERROR(param.readFromValue(&config.gainDb));
+
+    config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
+    return config;
+}
+
+ConversionResult<DynamicsProcessing::MbcBandConfig>
+AidlConversionDp::readMbcBandConfigFromParam(EffectParamReader& param) {
+    DynamicsProcessing::MbcBandConfig config;
+    int32_t enable;
+    RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+    RETURN_IF_ERROR(param.readFromParameter(&config.band));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+    RETURN_IF_ERROR(param.readFromValue(&config.cutoffFrequencyHz));
+    RETURN_IF_ERROR(param.readFromValue(&config.attackTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.releaseTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.ratio));
+    RETURN_IF_ERROR(param.readFromValue(&config.thresholdDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.kneeWidthDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.noiseGateThresholdDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.expanderRatio));
+    RETURN_IF_ERROR(param.readFromValue(&config.preGainDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.postGainDb));
+
+    config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
+    return config;
+}
+
+ConversionResult<DynamicsProcessing::LimiterConfig>
+AidlConversionDp::readLimiterConfigFromParam(EffectParamReader& param) {
+    DynamicsProcessing::LimiterConfig config;
+    int32_t enable, inUse;
+    RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+    RETURN_IF_ERROR(param.readFromValue(&inUse));
+    RETURN_IF_ERROR(param.readFromValue(&enable));
+    RETURN_IF_ERROR(param.readFromValue(&config.linkGroup));
+    RETURN_IF_ERROR(param.readFromValue(&config.attackTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.releaseTimeMs));
+    RETURN_IF_ERROR(param.readFromValue(&config.ratio));
+    RETURN_IF_ERROR(param.readFromValue(&config.thresholdDb));
+    RETURN_IF_ERROR(param.readFromValue(&config.postGainDb));
+
+    config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
+    return config;
+}
+
+ConversionResult<DynamicsProcessing::EngineArchitecture>
+AidlConversionDp::readEngineArchitectureFromParam(EffectParamReader& param) {
+    DynamicsProcessing::EngineArchitecture engine;
+    int32_t variant, preEqInUse, mbcInUse, postEqInUse, limiterInUse;
+    RETURN_IF_ERROR(param.readFromValue(&variant));
+    RETURN_IF_ERROR(param.readFromValue(&engine.preferredProcessingDurationMs));
+    RETURN_IF_ERROR(param.readFromValue(&preEqInUse));
+    RETURN_IF_ERROR(param.readFromValue(&engine.preEqStage.bandCount));
+    RETURN_IF_ERROR(param.readFromValue(&mbcInUse));
+    RETURN_IF_ERROR(param.readFromValue(&engine.mbcStage.bandCount));
+    RETURN_IF_ERROR(param.readFromValue(&postEqInUse));
+    RETURN_IF_ERROR(param.readFromValue(&engine.postEqStage.bandCount));
+    RETURN_IF_ERROR(param.readFromValue(&limiterInUse));
+
+    engine.resolutionPreference = VALUE_OR_RETURN(
+            aidl::android::legacy2aidl_int32_DynamicsProcessing_ResolutionPreference(variant));
+    engine.preEqStage.inUse = VALUE_OR_RETURN(convertIntegral<bool>(preEqInUse));
+    engine.mbcStage.inUse = VALUE_OR_RETURN(convertIntegral<bool>(mbcInUse));
+    engine.postEqStage.inUse = VALUE_OR_RETURN(convertIntegral<bool>(postEqInUse));
+    engine.limiterInUse = VALUE_OR_RETURN(convertIntegral<bool>(limiterInUse));
+    return engine;
+}
+
+status_t AidlConversionDp::getChannelConfig(DynamicsProcessing::Tag tag, EffectParamWriter& param) {
+    int32_t channel;
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+
+    Parameter aidlParam;
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag, tag);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+
+    std::vector<DynamicsProcessing::ChannelConfig> channels;
+    int32_t inUse, bandCount;
+    switch (tag) {
+        case DynamicsProcessing::preEq: {
+            inUse = mEngine.preEqStage.inUse;
+            bandCount = mEngine.preEqStage.bandCount;
+            channels = VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, DynamicsProcessing, dynamicsProcessing, DynamicsProcessing::preEq,
+                    std::vector<DynamicsProcessing::ChannelConfig>));
+            break;
+        }
+        case DynamicsProcessing::postEq: {
+            inUse = mEngine.postEqStage.inUse;
+            bandCount = mEngine.postEqStage.bandCount;
+            channels = VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, DynamicsProcessing, dynamicsProcessing, DynamicsProcessing::postEq,
+                    std::vector<DynamicsProcessing::ChannelConfig>));
+            break;
+        }
+        case DynamicsProcessing::mbc: {
+            inUse = mEngine.mbcStage.inUse;
+            bandCount = mEngine.mbcStage.bandCount;
+            channels = VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, DynamicsProcessing, dynamicsProcessing, DynamicsProcessing::mbc,
+                    std::vector<DynamicsProcessing::ChannelConfig>));
+            break;
+        }
+        default: {
+            ALOGE("%s unsupported tag %s", __func__, toString(tag).c_str());
+            return BAD_VALUE;
+        }
+    }
+
+    for (const auto& ch : channels) {
+        if (ch.channel == channel) {
+            int32_t enable = ch.enable;
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&inUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandCount));
+            return OK;
+        }
+    }
+    ALOGE("%s not able to find channel %d", __func__, channel);
+    return BAD_VALUE;
+}
+
+status_t AidlConversionDp::getEqBandConfig(DynamicsProcessing::Tag tag, EffectParamWriter& param) {
+    int32_t channel, band;
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&band));
+
+    Parameter aidlParam;
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag, tag);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+
+    std::vector<DynamicsProcessing::EqBandConfig> bands;
+    if (tag == DynamicsProcessing::preEqBand) {
+        bands = VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                aidlParam, DynamicsProcessing, dynamicsProcessing, preEqBand,
+                std::vector<DynamicsProcessing::EqBandConfig>));
+    } else if (tag == DynamicsProcessing::postEqBand) {
+        bands = VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                aidlParam, DynamicsProcessing, dynamicsProcessing, postEqBand,
+                std::vector<DynamicsProcessing::EqBandConfig>));
+    } else {
+        return BAD_VALUE;
+    }
+
+    for (const auto& bandIt : bands) {
+        if (bandIt.channel == channel && bandIt.band == band) {
+            int32_t enable = bandIt.enable;
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.cutoffFrequencyHz));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.gainDb));
+            return OK;
+        }
+    }
+    ALOGE("%s not able to find channel %d band %d", __func__, channel, band);
+    return BAD_VALUE;
+}
+
+status_t AidlConversionDp::getMbcBandConfig(EffectParamWriter& param) {
+    int32_t channel, band;
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&band));
+    Parameter aidlParam;
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
+                                                  DynamicsProcessing::mbcBand);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+
+    std::vector<DynamicsProcessing::MbcBandConfig> bands =
+            VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, DynamicsProcessing, dynamicsProcessing, mbcBand,
+                    std::vector<DynamicsProcessing::MbcBandConfig>));
+
+    for (const auto& bandIt : bands) {
+        if (bandIt.channel == channel && bandIt.band == band) {
+            int32_t enable = bandIt.enable;
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.cutoffFrequencyHz));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.attackTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.releaseTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.ratio));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.thresholdDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.kneeWidthDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.noiseGateThresholdDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.expanderRatio));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.preGainDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.postGainDb));
+            return OK;
+        }
+    }
+    ALOGE("%s not able to find channel %d band %d", __func__, channel, band);
+    return BAD_VALUE;
+}
+
+status_t AidlConversionDp::getLimiterConfig(EffectParamWriter& param) {
+    int32_t channel;
+    RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+    Parameter aidlParam;
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
+                                                  DynamicsProcessing::limiter);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+
+    std::vector<DynamicsProcessing::LimiterConfig> configs =
+            VALUE_OR_RETURN_STATUS(aidl::android::GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, DynamicsProcessing, dynamicsProcessing, limiter,
+                    std::vector<DynamicsProcessing::LimiterConfig>));
+
+    for (const auto& config : configs) {
+        if (config.channel == channel) {
+            int32_t inUse = mEngine.limiterInUse;
+            int32_t enable = config.enable;
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&inUse));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.linkGroup));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.attackTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.releaseTimeMs));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.ratio));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.thresholdDb));
+            RETURN_STATUS_IF_ERROR(param.writeToValue(&config.postGainDb));
+            return OK;
+        }
+    }
+    ALOGE("%s not able to find channel %d", __func__, channel);
+    return BAD_VALUE;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
new file mode 100644
index 0000000..c5d5a54
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BpEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionDp : public EffectConversionHelperAidl {
+  public:
+    AidlConversionDp(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+                     int32_t sessionId, int32_t ioId,
+                     const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionDp() {}
+
+  private:
+    aidl::android::hardware::audio::effect::DynamicsProcessing::EngineArchitecture mEngine;
+
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+
+    ConversionResult<
+            aidl::android::hardware::audio::effect::DynamicsProcessing::ChannelConfig>
+    readChannelConfigFromParam(utils::EffectParamReader& param);
+    ConversionResult<aidl::android::hardware::audio::effect::DynamicsProcessing::EqBandConfig>
+    readEqBandConfigFromParam(utils::EffectParamReader& param);
+    ConversionResult<
+            aidl::android::hardware::audio::effect::DynamicsProcessing::MbcBandConfig>
+    readMbcBandConfigFromParam(utils::EffectParamReader& param);
+    ConversionResult<
+            aidl::android::hardware::audio::effect::DynamicsProcessing::LimiterConfig>
+    readLimiterConfigFromParam(utils::EffectParamReader& param);
+    ConversionResult<
+            aidl::android::hardware::audio::effect::DynamicsProcessing::EngineArchitecture>
+    readEngineArchitectureFromParam(utils::EffectParamReader& param);
+
+    status_t getChannelConfig(aidl::android::hardware::audio::effect::DynamicsProcessing::Tag tag,
+                              utils::EffectParamWriter& writer);
+    status_t getEqBandConfig(aidl::android::hardware::audio::effect::DynamicsProcessing::Tag tag,
+                             utils::EffectParamWriter& param);
+    status_t getMbcBandConfig(utils::EffectParamWriter& param);
+    status_t getLimiterConfig(utils::EffectParamWriter& param);
+
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
new file mode 100644
index 0000000..754da43
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
@@ -0,0 +1,256 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionEnvReverb"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_environmentalreverb.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionEnvReverb.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::EnvironmentalReverb;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+/**
+ * Macro to get a parameter from effect_param_t wrapper and set it to AIDL effect.
+ *
+ * Return if there is any error, otherwise continue execution.
+ *
+ * @param param EffectParamReader, a reader wrapper of effect_param_t.
+ * @param aidlType Type of the AIDL parameter field, used to construct AIDL Parameter union.
+ * @param valueType Type of the value get from effect_param_t.
+ * @param tag The AIDL parameter union field tag.
+ */
+#define SET_AIDL_PARAMETER(param, aidlType, valueType, tag)                                \
+    {                                                                                      \
+        Parameter aidlParam;                                                               \
+        valueType value;                                                                   \
+        if (status_t status = param.readFromValue(&value); status != OK) {                 \
+            ALOGE("%s  %s read from parameter failed, ret %d", __func__, #tag, status);    \
+            return status;                                                                 \
+        }                                                                                  \
+        aidlParam = MAKE_SPECIFIC_PARAMETER(                                               \
+                EnvironmentalReverb, environmentalReverb, tag,                             \
+                VALUE_OR_RETURN_STATUS(aidl::android::convertIntegral<aidlType>(value)));  \
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam))); \
+    }
+
+/**
+ * Macro to get a parameter from AIDL effect and write the value to effect_param_t with wrapper.
+ *
+ * Return if there is any error, otherwise continue execution.
+ *
+ * @param param EffectParamWriter, a writer wrapper of effect_param_t.
+ * @param aidlType Type of the AIDL parameter field, used to construct AIDL Parameter union.
+ * @param valueType  Type of the value get from effect_param_t.
+ * @param tag The AIDL parameter union field tag.
+ */
+#define GET_AIDL_PARAMETER(param, aidltype, valueType, tag)                                        \
+    {                                                                                              \
+        aidltype value;                                                                            \
+        Parameter aidlParam;                                                                       \
+        Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(EnvironmentalReverb, environmentalReverbTag, \
+                                                      EnvironmentalReverb::tag);                   \
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));    \
+        value = VALUE_OR_RETURN_STATUS(                                                            \
+                GET_PARAMETER_SPECIFIC_FIELD(aidlParam, EnvironmentalReverb, environmentalReverb,  \
+                                             EnvironmentalReverb::tag, std::decay_t<aidltype>));   \
+        if (status_t status = param.writeToValue((valueType*)&value); status != OK) {              \
+            param.setStatus(status);                                                               \
+            ALOGE("%s %s write to parameter failed %d, ret %d", __func__, #tag, value, status);    \
+            return status;                                                                         \
+        }                                                                                          \
+    }
+
+status_t AidlConversionEnvReverb::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    if (status_t status = param.readFromParameter(&type); status != OK) {
+        ALOGE("%s failed to read type from %s, ret %d", __func__, param.toString().c_str(), status);
+        return BAD_VALUE;
+    }
+
+    switch (type) {
+        case REVERB_PARAM_ROOM_LEVEL: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
+            break;
+        }
+        case REVERB_PARAM_ROOM_HF_LEVEL: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
+            break;
+        }
+        case REVERB_PARAM_DECAY_TIME: {
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
+            break;
+        }
+        case REVERB_PARAM_DECAY_HF_RATIO: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
+            break;
+        }
+        case REVERB_PARAM_REFLECTIONS_LEVEL: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
+            break;
+        }
+        case REVERB_PARAM_REFLECTIONS_DELAY: {
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            break;
+        }
+        case REVERB_PARAM_REVERB_LEVEL: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            break;
+        }
+        case REVERB_PARAM_REVERB_DELAY: {
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            break;
+        }
+        case REVERB_PARAM_DIFFUSION: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            break;
+        }
+        case REVERB_PARAM_DENSITY: {
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
+            break;
+        }
+        case REVERB_PARAM_BYPASS: {
+            SET_AIDL_PARAMETER(param, bool, int32_t, bypass);
+            break;
+        }
+        case REVERB_PARAM_PROPERTIES: {
+            if (sizeof(t_reverb_settings) > param.getValueSize()) {
+                ALOGE("%s vsize %zu less than t_reverb_settings size %zu", __func__,
+                      param.getValueSize(), sizeof(t_reverb_settings));
+                return BAD_VALUE;
+            }
+            // this sequency needs to be aligned with t_reverb_settings
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            SET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            SET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(EnvironmentalReverb,
+                                                          environmentalReverb, vendor, ext);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+            break;
+        }
+    }
+    return OK;
+}
+
+status_t AidlConversionEnvReverb::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (status_t status = param.readFromParameter(&type); status != OK) {
+        ALOGE("%s failed to read type from %s", __func__, param.toString().c_str());
+        param.setStatus(status);
+        return status;
+    }
+
+    switch (type) {
+        case REVERB_PARAM_ROOM_LEVEL: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
+            break;
+        }
+        case REVERB_PARAM_ROOM_HF_LEVEL: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
+            break;
+        }
+        case REVERB_PARAM_DECAY_TIME: {
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
+            break;
+        }
+        case REVERB_PARAM_DECAY_HF_RATIO: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
+            break;
+        }
+        case REVERB_PARAM_REFLECTIONS_LEVEL: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
+            break;
+        }
+        case REVERB_PARAM_REFLECTIONS_DELAY: {
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            break;
+        }
+        case REVERB_PARAM_REVERB_LEVEL: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            break;
+        }
+        case REVERB_PARAM_REVERB_DELAY: {
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            break;
+        }
+        case REVERB_PARAM_DIFFUSION: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            break;
+        }
+        case REVERB_PARAM_DENSITY: {
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
+            break;
+        }
+        case REVERB_PARAM_BYPASS: {
+            GET_AIDL_PARAMETER(param, bool, int32_t, bypass);
+            break;
+        }
+        case REVERB_PARAM_PROPERTIES: {
+            // this sequency needs to be aligned with t_reverb_settings
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomLevelMb);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, roomHfLevelMb);
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, decayTimeMs);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, decayHfRatioPm);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, reflectionsLevelMb);
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, reflectionsDelayMs);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, levelMb);
+            GET_AIDL_PARAMETER(param, int32_t, uint32_t, delayMs);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, diffusionPm);
+            GET_AIDL_PARAMETER(param, int32_t, int16_t, densityPm);
+            break;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(EnvironmentalReverb, environmentalReverb, param);
+        }
+    }
+    return OK;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.h
new file mode 100644
index 0000000..8b92374
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionEnvReverb : public EffectConversionHelperAidl {
+  public:
+    AidlConversionEnvReverb(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionEnvReverb() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
new file mode 100644
index 0000000..45b98a1
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionEQ"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_equalizer.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionEq.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Equalizer;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::base::unexpected;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionEq::setParameter(EffectParamReader& param) {
+    uint32_t type;
+    if (OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+
+    Parameter aidlParam;
+    switch (type) {
+        case EQ_PARAM_CUR_PRESET: {
+            uint16_t value = 0;
+            if (OK != param.readFromValue(&value)) {
+                ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, preset, (int)value);
+            break;
+        }
+        case EQ_PARAM_BAND_LEVEL: {
+            int32_t band;
+            int16_t level;
+            if (OK != param.readFromParameter(&band) || OK != param.readFromValue(&level)) {
+                ALOGE("%s invalid bandLevel param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            std::vector<Equalizer::BandLevel> bandLevels = {{.index = band, .levelMb = level}};
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, bandLevels, bandLevels);
+            break;
+        }
+        case EQ_PARAM_PROPERTIES: {
+            int16_t num;
+            if (OK != param.readFromValue(&num)) {
+                ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            // set preset if it's valid
+            if (num >= 0) {
+                aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, preset, (int)num);
+                break;
+            }
+            // set bandLevel if no preset was set
+            if (OK != param.readFromValue(&num)) {
+                ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            std::vector<Equalizer::BandLevel> bandLevels;
+            for (int i = 0; i < num; i++) {
+                Equalizer::BandLevel level({.index = i});
+                if (OK != param.readFromValue((uint16_t*)&level.levelMb)) {
+                    ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+                bandLevels.push_back(level);
+            }
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, bandLevels, bandLevels);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, vendor, ext);
+            break;
+        }
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+ConversionResult<Parameter> AidlConversionEq::getAidlParameter(Equalizer::Tag tag) {
+    Parameter aidlParam;
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Equalizer, equalizerTag, tag);
+    RETURN_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    return aidlParam;
+}
+
+ConversionResult<int32_t> AidlConversionEq::getParameterPreset() {
+    Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::preset));
+    return VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Equalizer, equalizer,
+                                                               Equalizer::preset, int32_t));
+}
+
+ConversionResult<std::string> AidlConversionEq::getParameterPresetName(
+        EffectParamWriter& param) {
+    int32_t presetIdx;
+    if (OK != param.readFromParameter(&presetIdx)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return unexpected(BAD_VALUE);
+    }
+    Parameter aidlParam = VALUE_OR_RETURN(getAidlParameter(Equalizer::presets));
+    const auto& presets = VALUE_OR_RETURN(GET_PARAMETER_SPECIFIC_FIELD(
+            aidlParam, Equalizer, equalizer, Equalizer::presets, std::vector<Equalizer::Preset>));
+    for (const auto& preset : presets) {
+        if (presetIdx == preset.index) {
+            return preset.name;
+        }
+    }
+    return unexpected(BAD_VALUE);
+}
+
+status_t AidlConversionEq::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
+        param.setStatus(BAD_VALUE);
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+
+    switch (type) {
+        case EQ_PARAM_NUM_BANDS: {
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandLevels));
+            const auto& bandLevels = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::bandLevels,
+                    std::vector<Equalizer::BandLevel>));
+            uint16_t bands = bandLevels.size();
+            return param.writeToValue(&bands);
+        }
+        case EQ_PARAM_LEVEL_RANGE: {
+            const auto& ranges = mDesc.capability.range.get<Range::equalizer>();
+            for (const auto& r : ranges) {
+                if (r.min.getTag() == Equalizer::bandLevels &&
+                    r.max.getTag() == Equalizer::bandLevels) {
+                    const auto& aidlMin = r.min.get<Equalizer::bandLevels>();
+                    const auto& aidlMax = r.max.get<Equalizer::bandLevels>();
+                    int16_t min =
+                            std::min_element(aidlMin.begin(), aidlMin.end(), [](auto& a, auto& b) {
+                                return a.levelMb < b.levelMb;
+                            })->levelMb;
+                    int16_t max =
+                            std::max_element(aidlMax.begin(), aidlMax.end(), [](auto& a, auto& b) {
+                                return a.levelMb < b.levelMb;
+                            })->levelMb;
+                    return (OK == param.writeToValue(&min) && OK == param.writeToValue(&max))
+                                   ? OK
+                                   : BAD_VALUE;
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_BAND_LEVEL: {
+            int32_t bandIdx;
+            if (OK != param.readFromParameter(&bandIdx)) {
+                break;
+            }
+
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandLevels));
+            const auto& bandLevels = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::bandLevels,
+                    std::vector<Equalizer::BandLevel>));
+            for (const auto& band : bandLevels) {
+                if (band.index == bandIdx) {
+                    return param.writeToValue((uint16_t *)&band.levelMb);
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_CENTER_FREQ: {
+            int32_t index;
+            if (OK != param.readFromParameter(&index)) {
+                break;
+            }
+
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::centerFreqMh));
+            const auto& freqs = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::centerFreqMh, std::vector<int>));
+            if ((size_t)index >= freqs.size()) {
+                ALOGE("%s index %d exceed size %zu", __func__, index, freqs.size());
+                break;
+            }
+            return param.writeToValue(&freqs[index]);
+        }
+        case EQ_PARAM_BAND_FREQ_RANGE: {
+            int32_t index;
+            if (OK != param.readFromParameter(&index)) {
+                break;
+            }
+
+            Parameter aidlParam =
+                    VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandFrequencies));
+            const auto& bands = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::bandFrequencies,
+                    std::vector<Equalizer::BandFrequency>));
+            for (const auto& band : bands) {
+                if (band.index == index) {
+                    return (OK == param.writeToValue(&band.minMh) &&
+                            OK == param.writeToValue(&band.maxMh))
+                                   ? OK
+                                   : BAD_VALUE;
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_GET_BAND: {
+            int32_t freq;
+            if (OK != param.readFromParameter(&freq)) {
+                break;
+            }
+
+            Parameter aidlParam =
+                    VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandFrequencies));
+            const auto& bands = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::bandFrequencies,
+                    std::vector<Equalizer::BandFrequency>));
+            for (const auto& band : bands) {
+                if (freq >= band.minMh && freq <= band.maxMh) {
+                    return param.writeToValue((uint16_t*)&band.index);
+                }
+            }
+            break;
+        }
+        case EQ_PARAM_CUR_PRESET: {
+            int32_t preset = VALUE_OR_RETURN_STATUS(getParameterPreset());
+            return param.writeToValue((uint16_t*)&preset);
+        }
+        case EQ_PARAM_GET_NUM_OF_PRESETS: {
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::presets));
+            const auto& presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Equalizer, equalizer, Equalizer::presets,
+                    std::vector<Equalizer::Preset>));
+            uint16_t num = presets.size();
+            return param.writeToValue(&num);
+        }
+        case EQ_PARAM_GET_PRESET_NAME: {
+            std::string name = VALUE_OR_RETURN_STATUS(getParameterPresetName(param));
+            return param.writeToValue(name.c_str(), name.length());
+        }
+        case EQ_PARAM_PROPERTIES: {
+            int32_t preset = VALUE_OR_RETURN_STATUS(getParameterPreset());
+            if (OK != param.writeToValue((uint16_t*)&preset)) {
+                break;
+            }
+            Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::bandLevels));
+            std::vector<Equalizer::BandLevel> bandLevels =
+                    VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                            aidlParam, Equalizer, equalizer, Equalizer::bandLevels,
+                            std::vector<Equalizer::BandLevel>));
+            uint16_t bands = bandLevels.size();
+            if (OK != param.writeToValue(&bands)) {
+                break;
+            }
+            std::sort(bandLevels.begin(), bandLevels.end(),
+                      [](const auto& a, const auto& b) { return a.index < b.index; });
+            for (const auto& level : bandLevels) {
+                if (status_t status = param.writeToValue((uint16_t*)&level.levelMb); status != OK) {
+                    return status;
+                }
+            }
+            return OK;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(Equalizer, equalizer, param);
+        }
+    }
+
+    param.setStatus(BAD_VALUE);
+    ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+    return BAD_VALUE;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
new file mode 100644
index 0000000..f94556c
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionEq : public EffectConversionHelperAidl {
+  public:
+    AidlConversionEq(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+                      int32_t sessionId, int32_t ioId,
+                      const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionEq() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+    ConversionResult<::aidl::android::hardware::audio::effect::Parameter> getAidlParameter(
+            ::aidl::android::hardware::audio::effect::Equalizer::Tag tag);
+    ConversionResult<int32_t> getParameterPreset();
+    ConversionResult<std::string> getParameterPresetName(utils::EffectParamWriter& param);
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
new file mode 100644
index 0000000..73430ba
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionHapticGenerator"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionHapticGenerator.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::HapticGenerator;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionHapticGenerator::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case HG_PARAM_HAPTIC_INTENSITY: {
+            int32_t id = 0, scale;
+            if (OK != param.readFromValue(&id) || OK != param.readFromValue(&scale)) {
+                ALOGE("%s invalid intensity %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            HapticGenerator::HapticScale hpScale(
+                    {.id = id, .scale = (HapticGenerator::VibratorScale)(scale)});
+            aidlParam = MAKE_SPECIFIC_PARAMETER(HapticGenerator, hapticGenerator, hapticScales,
+                                                {hpScale});
+            break;
+        }
+        case HG_PARAM_VIBRATOR_INFO: {
+            float resonantFrequencyHz, qFactor, maxAmplitude;
+            if (OK != param.readFromValue(&resonantFrequencyHz) ||
+                OK != param.readFromValue(&qFactor) || OK != param.readFromValue(&maxAmplitude)) {
+                ALOGE("%s invalid vibrator info %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            HapticGenerator::VibratorInformation info({.resonantFrequencyHz = resonantFrequencyHz,
+                                                       .qFactor = qFactor,
+                                                       .maxAmplitude = maxAmplitude});
+            aidlParam =
+                    MAKE_SPECIFIC_PARAMETER(HapticGenerator, hapticGenerator, vibratorInfo, info);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(HapticGenerator, hapticGenerator, vendor, ext);
+            break;
+        }
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+// No parameter to get for HapticGenerator
+status_t AidlConversionHapticGenerator::getParameter(EffectParamWriter& param) {
+    VENDOR_EXTENSION_GET_AND_RETURN(HapticGenerator, hapticGenerator, param);
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.h
new file mode 100644
index 0000000..03114a5
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionHapticGenerator : public EffectConversionHelperAidl {
+  public:
+    AidlConversionHapticGenerator(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionHapticGenerator() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
new file mode 100644
index 0000000..31eec65
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionLoudnessEnhancer"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_loudnessenhancer.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionLoudnessEnhancer.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::hardware::audio::effect::LoudnessEnhancer;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionLoudnessEnhancer::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    int32_t gain = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type) || OK != param.readFromValue(&gain)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB: {
+            aidlParam = MAKE_SPECIFIC_PARAMETER(LoudnessEnhancer, loudnessEnhancer, gainMb, gain);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(LoudnessEnhancer, loudnessEnhancer, vendor, ext);
+            break;
+        }
+    }
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionLoudnessEnhancer::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+        OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    switch (type) {
+        case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB: {
+            Parameter aidlParam;
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(LoudnessEnhancer, loudnessEnhancerTag,
+                                                        LoudnessEnhancer::gainMb);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            int32_t gain = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, LoudnessEnhancer, loudnessEnhancer, LoudnessEnhancer::gainMb,
+                    std::decay_t<decltype(gain)>));
+            return param.writeToValue(&gain);
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(LoudnessEnhancer, loudnessEnhancer, param);
+        }
+    }
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.h
new file mode 100644
index 0000000..c0402f94
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionLoudnessEnhancer : public EffectConversionHelperAidl {
+  public:
+    AidlConversionLoudnessEnhancer(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionLoudnessEnhancer() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
new file mode 100644
index 0000000..7c34ed7
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionNoiseSuppression"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_ns.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionNoiseSuppression.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::NoiseSuppression;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionNoiseSuppression::setParameter(EffectParamReader& param) {
+    uint32_t type = 0, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case NS_PARAM_LEVEL: {
+            aidlParam = MAKE_SPECIFIC_PARAMETER(NoiseSuppression, noiseSuppression, level,
+                                                static_cast<NoiseSuppression::Level>(value));
+            break;
+        }
+        case NS_PARAM_TYPE: {
+            aidlParam = MAKE_SPECIFIC_PARAMETER(NoiseSuppression, noiseSuppression, type,
+                                                static_cast<NoiseSuppression::Type>(value));
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(NoiseSuppression, noiseSuppression, vendor, ext);
+            break;
+        }
+    }
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionNoiseSuppression::getParameter(EffectParamWriter& param) {
+    uint32_t paramType = 0, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+        OK != param.readFromParameter(&paramType)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (paramType) {
+        case NS_PARAM_LEVEL: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(NoiseSuppression, noiseSuppressionTag,
+                                                        NoiseSuppression::level);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            NoiseSuppression::Level level = VALUE_OR_RETURN_STATUS(
+                    GET_PARAMETER_SPECIFIC_FIELD(aidlParam, NoiseSuppression, noiseSuppression,
+                                                 NoiseSuppression::level, NoiseSuppression::Level));
+            value = static_cast<uint32_t>(level);
+            break;
+        }
+        case NS_PARAM_TYPE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(NoiseSuppression, noiseSuppressionTag,
+                                                        NoiseSuppression::type);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            NoiseSuppression::Type nsType = VALUE_OR_RETURN_STATUS(
+                    GET_PARAMETER_SPECIFIC_FIELD(aidlParam, NoiseSuppression, noiseSuppression,
+                                                 NoiseSuppression::type, NoiseSuppression::Type));
+            value = static_cast<uint32_t>(nsType);
+            break;
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(NoiseSuppression, noiseSuppression, param);
+        }
+    }
+    return param.writeToValue(&value);
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.h
new file mode 100644
index 0000000..f51e13a
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionNoiseSuppression : public EffectConversionHelperAidl {
+  public:
+    AidlConversionNoiseSuppression(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionNoiseSuppression() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
new file mode 100644
index 0000000..e936aef
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionPresetReverb"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_presetreverb.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionPresetReverb.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::PresetReverb;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionPresetReverb::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    if (type == REVERB_PARAM_PRESET) {
+        uint16_t value = 0;
+        if (OK != param.readFromValue(&value)) {
+            ALOGE("%s invalid preset value %s", __func__, param.toString().c_str());
+            return BAD_VALUE;
+        }
+        aidlParam = MAKE_SPECIFIC_PARAMETER(PresetReverb, presetReverb, preset,
+                                            static_cast<PresetReverb::Presets>(value));
+    } else {
+        // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+        VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+        aidlParam = MAKE_SPECIFIC_PARAMETER(PresetReverb, presetReverb, vendor, ext);
+    }
+
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionPresetReverb::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    uint16_t value = 0;
+    ALOGE("%s enter %s", __func__, param.toString().c_str());
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+        OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    if (type == REVERB_PARAM_PRESET) {
+        Parameter aidlParam;
+        Parameter::Id id =
+                MAKE_SPECIFIC_PARAMETER_ID(PresetReverb, presetReverbTag, PresetReverb::preset);
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+        auto aidlPreset = VALUE_OR_RETURN_STATUS(
+                GET_PARAMETER_SPECIFIC_FIELD(aidlParam, PresetReverb, presetReverb,
+                                             PresetReverb::preset, PresetReverb::Presets));
+        value = static_cast<uint16_t>(aidlPreset);
+    } else {
+        // handle vendor extension
+        VENDOR_EXTENSION_GET_AND_RETURN(PresetReverb, presetReverb, param);
+    }
+    return param.writeToValue(&value);
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.h
new file mode 100644
index 0000000..397d6e6
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionPresetReverb : public EffectConversionHelperAidl {
+  public:
+    AidlConversionPresetReverb(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionPresetReverb() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
new file mode 100644
index 0000000..eadd6c3
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionSpatializer"
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_spatializer.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionSpatializer.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::DefaultExtension;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionSpatializer::setParameter(EffectParamReader& param) {
+    Parameter aidlParam = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_EffectParameterReader_ParameterExtension(param));
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
+    DefaultExtension defaultExt;
+    // read parameters into DefaultExtension vector<uint8_t>
+    if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+
+    VendorExtension idTag;
+    idTag.extension.setParcelable(defaultExt);
+    Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    // copy the AIDL extension data back to effect_param_t
+    return VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(aidlParam,
+                                                                                  param));
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
new file mode 100644
index 0000000..c44567c
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionSpatializer : public EffectConversionHelperAidl {
+  public:
+    AidlConversionSpatializer(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionSpatializer() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
new file mode 100644
index 0000000..488d5cd
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#include <type_traits>
+#define LOG_TAG "AidlConversionVendorExtension"
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionVendorExtension.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::DefaultExtension;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+/**
+ * For all effect types we currently don't support, add a default extension implementation to use
+ * std::vector<uint8_t> to pass through all data in the format of effect_param_t (the data we got
+ * from libaudioclient for now).
+ * This logic will be removed after we adopt to same AIDL parameter union AIDL in libaudioclient,
+ * after that framework doesn't need to do any AIDL conversion, and the vendor extension can be
+ * pass down in Parameter as is.
+ */
+status_t AidlConversionVendorExtension::setParameter(EffectParamReader& param) {
+    Parameter aidlParam = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_EffectParameterReader_ParameterExtension(param));
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionVendorExtension::getParameter(EffectParamWriter& param) {
+    VendorExtension extId = VALUE_OR_RETURN_STATUS(
+            aidl::android::legacy2aidl_EffectParameterReader_Param_VendorExtension(param));
+    Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, extId);
+    Parameter aidlParam;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    // copy the AIDL extension data back to effect_param_t
+    return VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(aidlParam,
+                                                                                  param));
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.h
new file mode 100644
index 0000000..fd22e5c
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionVendorExtension : public EffectConversionHelperAidl {
+  public:
+    AidlConversionVendorExtension(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionVendorExtension() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
new file mode 100644
index 0000000..c95c3a9
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionVirtualizer"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/aidl_effects_utils.h>
+#include <system/audio_effects/effect_virtualizer.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionVirtualizer.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::Virtualizer;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionVirtualizer::setParameter(EffectParamReader& param) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case VIRTUALIZER_PARAM_STRENGTH: {
+            int16_t strength = 0;
+            if (OK != param.readFromValue(&strength)) {
+                ALOGE("%s invalid param %s for type %d", __func__, param.toString().c_str(), type);
+                return BAD_VALUE;
+            }
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Virtualizer, virtualizer, strengthPm, strength);
+            break;
+        }
+        case VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE: {
+            audio_devices_t deviceType;
+            if (OK != param.readFromValue(&deviceType)) {
+                ALOGE("%s invalid param %s for type %d", __func__, param.toString().c_str(), type);
+                return BAD_VALUE;
+            }
+            AudioDeviceDescription deviceDesc = VALUE_OR_RETURN_STATUS(
+                    ::aidl::android::legacy2aidl_audio_devices_t_AudioDeviceDescription(
+                            deviceType));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Virtualizer, virtualizer, device, deviceDesc);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Virtualizer, virtualizer, vendor, ext);
+            break;
+        }
+    }
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionVirtualizer::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0;
+    if (OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case VIRTUALIZER_PARAM_STRENGTH_SUPPORTED: {
+            // an invalid range indicates not setting support for this parameter
+            uint32_t support =
+                    ::aidl::android::hardware::audio::effect::isRangeValid<Range::Tag::virtualizer>(
+                            Virtualizer::strengthPm, mDesc.capability);
+            return param.writeToValue(&support);
+        }
+        case VIRTUALIZER_PARAM_STRENGTH: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Virtualizer, virtualizerTag,
+                                                          Virtualizer::strengthPm);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            int16_t strength = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Virtualizer, virtualizer, Virtualizer::strengthPm, int32_t));
+            return param.writeToValue(&strength);
+        }
+        case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: {
+            audio_channel_mask_t mask;
+            audio_devices_t device;
+            if (OK != param.readFromParameter(&mask) || OK != param.readFromParameter(&device)) {
+                ALOGW("%s illegal param %s", __func__, param.toString().c_str());
+                return BAD_VALUE;
+            }
+            Virtualizer::SpeakerAnglesPayload payload = {
+                    .layout = VALUE_OR_RETURN_STATUS(
+                            ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                                    mask, false)),
+                    .device = VALUE_OR_RETURN_STATUS(
+                            ::aidl::android::legacy2aidl_audio_devices_t_AudioDeviceDescription(
+                                    device))};
+            Virtualizer::Id vId = UNION_MAKE(Virtualizer::Id, speakerAnglesPayload, payload);
+            Parameter::Id id = UNION_MAKE(Parameter::Id, virtualizerTag, vId);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            const auto& angles = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Virtualizer, virtualizer, Virtualizer::speakerAngles,
+                    std::vector<Virtualizer::ChannelAngle>));
+            for (const auto& angle : angles) {
+                const audio_channel_mask_t chMask = ::aidl::android::
+                        aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+                                angle.channel, false);
+                ALOGW("%s aidl %d ch %d", __func__, angle.channel, chMask);
+                if (OK != param.writeToValue(&chMask) ||
+                    OK != param.writeToValue(&angle.azimuthDegree) ||
+                    OK != param.writeToValue(&angle.elevationDegree)) {
+                    ALOGW("%s can't write angles to param %s", __func__, param.toString().c_str());
+                    return BAD_VALUE;
+                }
+            }
+            return OK;
+        }
+        case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Virtualizer, virtualizerTag,
+                                                          Virtualizer::device);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            AudioDeviceDescription device = VALUE_OR_RETURN_STATUS(
+                    GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Virtualizer, virtualizer,
+                                                 Virtualizer::device, AudioDeviceDescription));
+            const audio_devices_t deviceType = VALUE_OR_RETURN_STATUS(
+                    ::aidl::android::aidl2legacy_AudioDeviceDescription_audio_devices_t(device));
+            return param.writeToValue(&deviceType);
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(Virtualizer, virtualizer, param);
+        }
+    }
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.h
new file mode 100644
index 0000000..91c0fcd
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionVirtualizer : public EffectConversionHelperAidl {
+  public:
+    AidlConversionVirtualizer(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionVirtualizer() {}
+
+  private:
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
new file mode 100644
index 0000000..b4440ee
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionVisualizer"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_visualizer.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionVisualizer.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::aidl::android::hardware::audio::effect::Visualizer;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionVisualizer::setParameter(EffectParamReader& param) {
+    uint32_t type = 0, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+        OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case VISUALIZER_PARAM_CAPTURE_SIZE: {
+            mCaptureSize = value;
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, captureSamples, value);
+            break;
+        }
+        case VISUALIZER_PARAM_SCALING_MODE: {
+            Visualizer::ScalingMode mode = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_Parameter_Visualizer_uint32_ScalingMode(value));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, scalingMode, mode);
+            break;
+        }
+        case VISUALIZER_PARAM_LATENCY: {
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, latencyMs, value);
+            break;
+        }
+        case VISUALIZER_PARAM_MEASUREMENT_MODE: {
+            Visualizer::MeasurementMode mode = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_Parameter_Visualizer_uint32_MeasurementMode(value));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, measurementMode, mode);
+            break;
+        }
+        default: {
+            // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+            VendorExtension ext = VALUE_OR_RETURN_STATUS(
+                    aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+            aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, vendor, ext);
+            break;
+        }
+    }
+    return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionVisualizer::getParameter(EffectParamWriter& param) {
+    uint32_t type = 0, value = 0;
+    if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int32_t)) ||
+        OK != param.readFromParameter(&type)) {
+        ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+        param.setStatus(BAD_VALUE);
+        return BAD_VALUE;
+    }
+    Parameter aidlParam;
+    switch (type) {
+        case VISUALIZER_PARAM_CAPTURE_SIZE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::captureSamples);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Visualizer, visualizer, Visualizer::captureSamples, int32_t));
+            mCaptureSize = value;
+            return param.writeToValue(&value);
+        }
+        case VISUALIZER_PARAM_SCALING_MODE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::scalingMode);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            Visualizer::ScalingMode mode = VALUE_OR_RETURN_STATUS(
+                    GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Visualizer, visualizer,
+                                                 Visualizer::scalingMode, Visualizer::ScalingMode));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_Visualizer_ScalingMode_uint32(mode));
+            return param.writeToValue(&value);
+        }
+        case VISUALIZER_PARAM_LATENCY: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::latencyMs);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            value = (int32_t)VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Visualizer, visualizer, Visualizer::latencyMs, int32_t));
+            return param.writeToValue(&value);
+        }
+        case VISUALIZER_PARAM_MEASUREMENT_MODE: {
+            Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                          Visualizer::measurementMode);
+            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+            Visualizer::MeasurementMode mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+                    aidlParam, Visualizer, visualizer, Visualizer::measurementMode,
+                    Visualizer::MeasurementMode));
+            value = VALUE_OR_RETURN_STATUS(
+                    aidl::android::aidl2legacy_Parameter_Visualizer_MeasurementMode_uint32(mode));
+            return param.writeToValue(&value);
+        }
+        default: {
+            VENDOR_EXTENSION_GET_AND_RETURN(Visualizer, visualizer, param);
+        }
+    }
+}
+
+status_t AidlConversionVisualizer::visualizerCapture(uint32_t* replySize, void* pReplyData) {
+    if (!replySize || !pReplyData || *replySize != mCaptureSize) {
+        ALOGE("%s illegal param replySize %p pReplyData %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    Parameter aidlParam;
+    Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag,
+                                                    Visualizer::captureSampleBuffer);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    const auto& samples = VALUE_OR_RETURN_STATUS(
+            GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Visualizer, visualizer,
+                                         Visualizer::captureSampleBuffer, std::vector<uint8_t>));
+    size_t len = std::min((size_t)*replySize, samples.size());
+    std::memcpy(pReplyData, samples.data(), *replySize = len);
+    return OK;
+}
+
+status_t AidlConversionVisualizer::visualizerMeasure(uint32_t* replySize, void* pReplyData) {
+    if (!replySize || !pReplyData || *replySize != 2 * sizeof(int32_t)) {
+        ALOGE("%s illegal param replySize %p pReplyData %p", __func__, replySize, pReplyData);
+        return BAD_VALUE;
+    }
+
+    Parameter aidlParam;
+    Parameter::Id id =
+            MAKE_SPECIFIC_PARAMETER_ID(Visualizer, visualizerTag, Visualizer::measurement);
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+    const auto& measure = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+            aidlParam, Visualizer, visualizer, Visualizer::measurement, Visualizer::Measurement));
+    int32_t* reply = (int32_t *) pReplyData;
+    *reply++ = measure.rms;
+    *reply = measure.peak;
+    return OK;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
new file mode 100644
index 0000000..e380bc6
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionVisualizer : public EffectConversionHelperAidl {
+  public:
+    AidlConversionVisualizer(
+            std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+            int32_t sessionId, int32_t ioId,
+            const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+        : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+    ~AidlConversionVisualizer() {}
+
+  private:
+    uint32_t mCaptureSize = 0;
+    status_t setParameter(utils::EffectParamReader& param) override;
+    status_t getParameter(utils::EffectParamWriter& param) override;
+    status_t visualizerCapture(uint32_t* replySize, void* pReplyData) override;
+    status_t visualizerMeasure(uint32_t* replySize, void* pReplyData) override;
+};
+
+}  // namespace effect
+}  // namespace android
diff --git a/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h b/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
new file mode 100644
index 0000000..2323ed6
--- /dev/null
+++ b/media/libaudiohal/include/media/audiohal/AudioHalVersionInfo.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <utility>
+#include <android/media/AudioHalVersion.h>
+
+namespace android::detail {
+
+class AudioHalVersionInfo : public android::media::AudioHalVersion {
+  public:
+    AudioHalVersionInfo(Type halType, int halMajor, int halMinor = 0) {
+        type = halType;
+        major = halMajor;
+        minor = halMinor;
+    }
+
+    bool isHidl() const { return type == Type::HIDL; }
+
+    Type getType() const { return type; }
+
+    int getMajorVersion() const { return major; }
+
+    int getMinorVersion() const { return minor; }
+
+    /** Keep HIDL version format as is for backward compatibility, only add prefix for AIDL. */
+    std::string toVersionString() const {
+        std::string versionStr =
+                android::internal::ToString(major) + "." + android::internal::ToString(minor);
+        if (type == Type::AIDL) {
+            return "aidl";
+        } else {
+            return versionStr;
+        }
+    }
+};
+
+} // namespace android
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index d27ad4c..71e5e7a 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -19,22 +19,34 @@
 
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioMode.h>
+#include <android/media/audio/common/AudioPort.h>
+#include <android/media/AudioRoute.h>
 #include <error/Result.h>
 #include <media/audiohal/EffectHalInterface.h>
-#include <media/MicrophoneInfo.h>
 #include <system/audio.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
 #include <utils/String8.h>
 
+namespace ndk {
+class SpAIBinder;
+}
+
 namespace android {
 
 class StreamInHalInterface;
 class StreamOutHalInterface;
 
-class DeviceHalInterface : public RefBase
+class DeviceHalInterface : public virtual RefBase
 {
   public:
+    virtual status_t getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) = 0;
+
+    virtual status_t getAudioRoutes(std::vector<media::AudioRoute> *routes) = 0;
+
+    virtual status_t getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) = 0;
+
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
     virtual status_t getSupportedDevices(uint32_t *devices) = 0;
 
@@ -107,7 +119,7 @@
     virtual status_t releaseAudioPatch(audio_patch_handle_t patch) = 0;
 
     // Fills the list of supported attributes for a given audio port.
-    virtual status_t getAudioPort(struct audio_port *port) = 0;
+    virtual status_t getAudioPort(struct audio_port* port) = 0;
 
     // Fills the list of supported attributes for a given audio port.
     virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
@@ -116,7 +128,8 @@
     virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
 
     // List microphones
-    virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
+    virtual status_t getMicrophones(
+            std::vector<audio_microphone_characteristic_t>* microphones) = 0;
 
     virtual status_t addDeviceEffect(
             audio_port_handle_t device, sp<EffectHalInterface> effect) = 0;
@@ -125,17 +138,28 @@
 
     virtual status_t getMmapPolicyInfos(
             media::audio::common::AudioMMapPolicyType policyType,
-            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos)  = 0;
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
     virtual int32_t getAAudioMixerBurstCount() = 0;
     virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
 
+    virtual int32_t supportsBluetoothVariableLatency(bool* supports) = 0;
+
     // Update the connection status of an external device.
-    virtual status_t setConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+    virtual status_t setConnectedState(const struct audio_port_v7* port, bool connected) = 0;
+
+    // Enable simulation of external devices connection at the HAL level.
+    virtual status_t setSimulateDeviceConnections(bool enabled) = 0;
 
     virtual error::Result<audio_hw_sync_t> getHwAvSync() = 0;
 
     virtual status_t dump(int fd, const Vector<String16>& args) = 0;
 
+    // Returns the sound dose binder interface if it is supported by the HAL, nullptr otherwise
+    virtual status_t getSoundDoseInterface(const std::string& module,
+                                           ::ndk::SpAIBinder* soundDoseBinder) = 0;
+
+    virtual status_t prepareToDisconnectExternalDevice(const struct audio_port_v7* port) = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     DeviceHalInterface() {}
diff --git a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
index 17010e6..8397e9b 100644
--- a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -14,14 +14,17 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_INTERFACE_H
-#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_INTERFACE_H
+#pragma once
 
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <android/media/SurroundSoundConfig.h>
 #include <media/audiohal/DeviceHalInterface.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
 #include <vector>
 
+#include "AudioHalVersionInfo.h"
+
 namespace android {
 
 class DevicesFactoryHalCallback : public RefBase
@@ -33,6 +36,8 @@
 class DevicesFactoryHalInterface : public RefBase
 {
   public:
+    virtual status_t getDeviceNames(std::vector<std::string> *names) = 0;
+
     // Opens a device with the specified name. To close the device, it is
     // necessary to release references to the returned object.
     virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device) = 0;
@@ -43,7 +48,11 @@
     // The callback can be only set once.
     virtual status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) = 0;
 
-    virtual float getHalVersion() const = 0;
+    virtual android::detail::AudioHalVersionInfo getHalVersion() const = 0;
+
+    virtual status_t getSurroundSoundConfig(media::SurroundSoundConfig *config) = 0;
+
+    virtual status_t getEngineConfig(media::audio::common::AudioHalEngineConfig *config) = 0;
 
     static sp<DevicesFactoryHalInterface> create();
 
@@ -55,5 +64,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_INTERFACE_H
diff --git a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
index 3e505bd..832df18 100644
--- a/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectsFactoryHalInterface.h
@@ -14,14 +14,18 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_INTERFACE_H
-#define ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_INTERFACE_H
+#pragma once
+#include <vector>
 
 #include <media/audiohal/EffectHalInterface.h>
+#include <media/EffectsConfig.h>
 #include <system/audio_effect.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
 
+#include "AudioHalVersionInfo.h"
+#include "FactoryHal.h"
+
 namespace android {
 
 class EffectsFactoryHalInterface : public RefBase
@@ -31,34 +35,37 @@
     virtual status_t queryNumberEffects(uint32_t *pNumEffects) = 0;
 
     // Returns a descriptor of the next available effect.
-    virtual status_t getDescriptor(uint32_t index,
-            effect_descriptor_t *pDescriptor) = 0;
+    virtual status_t getDescriptor(uint32_t index, effect_descriptor_t* pDescriptor) = 0;
 
-    virtual status_t getDescriptor(const effect_uuid_t *pEffectUuid,
-            effect_descriptor_t *pDescriptor) = 0;
+    virtual status_t getDescriptor(const effect_uuid_t* pEffectUuid,
+                                   effect_descriptor_t* pDescriptor) = 0;
 
     virtual status_t getDescriptors(const effect_uuid_t *pEffectType,
                                     std::vector<effect_descriptor_t> *descriptors) = 0;
 
+    virtual std::shared_ptr<const effectsConfig::Processings> getProcessings() const = 0;
+
+    // status_t if parser return error, skipped elements if parsing result is OK (always 0 for AIDL)
+    virtual error::Result<size_t> getSkippedElements() const = 0;
+
     // Creates an effect engine of the specified type.
     // To release the effect engine, it is necessary to release references
     // to the returned effect object.
-    virtual status_t createEffect(const effect_uuid_t *pEffectUuid,
-            int32_t sessionId, int32_t ioId, int32_t deviceId,
-            sp<EffectHalInterface> *effect) = 0;
+    virtual status_t createEffect(const effect_uuid_t* pEffectUuid, int32_t sessionId, int32_t ioId,
+                                  int32_t deviceId, sp<EffectHalInterface>* effect) = 0;
 
     virtual status_t dumpEffects(int fd) = 0;
 
-    virtual float getHalVersion() = 0;
-
     static sp<EffectsFactoryHalInterface> create();
 
     virtual status_t allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) = 0;
     virtual status_t mirrorBuffer(void* external, size_t size,
                                   sp<EffectBufferHalInterface>* buffer) = 0;
 
+    virtual android::detail::AudioHalVersionInfo getHalVersion() const = 0;
+
     // Helper function to compare effect uuid to EFFECT_UUID_NULL.
-    static bool isNullUuid(const effect_uuid_t *pEffectUuid);
+    static bool isNullUuid(const effect_uuid_t* pEffectUuid);
 
   protected:
     // Subclasses can not be constructed directly by clients.
@@ -68,5 +75,3 @@
 };
 
 } // namespace android
-
-#endif // ANDROID_HARDWARE_EFFECTS_FACTORY_HAL_INTERFACE_H
diff --git a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h b/media/libaudiohal/include/media/audiohal/FactoryHal.h
similarity index 62%
rename from media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
rename to media/libaudiohal/include/media/audiohal/FactoryHal.h
index 866dd3e..4776d98 100644
--- a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
+++ b/media/libaudiohal/include/media/audiohal/FactoryHal.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,24 +14,19 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_FACTORY_HAL_HIDL_H
-#define ANDROID_HARDWARE_FACTORY_HAL_HIDL_H
+#pragma once
 
 #include <string>
 #include <utility>
-
 #include <utils/StrongPointer.h>
 
+#include "AudioHalVersionInfo.h"
+
 namespace android {
 
-// The pair of the interface's package name and the interface name,
-// e.g. <"android.hardware.audio", "IDevicesFactory">.
-// Splitting is used for easier construction of versioned names (FQNs).
-using InterfaceName = std::pair<std::string, std::string>;
-
 namespace detail {
 
-void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface);
+void* createPreferredImpl(bool isCore);
 
 }  // namespace detail
 
@@ -45,17 +40,12 @@
  * shared libraries the loader function considers which interface among two has the most
  * recent version. Thus, a pair of interface names must be passed in.
  *
- * @param iface the interface that needs to be created.
- * @param siblingIface the interface which occupies the same shared library.
+ * @param isCore Indicating if this is audio Core HAL service interface.
  * @return the preferred available implementation or nullptr if none are available.
  */
+
 template <class Interface>
-static sp<Interface> createPreferredImpl(
-        const InterfaceName& iface, const InterfaceName& siblingIface) {
-    return sp<Interface>{
-        static_cast<Interface*>(detail::createPreferredImpl(iface, siblingIface))};
+static sp<Interface> createPreferredImpl(bool isCore) {
+    return sp<Interface>{static_cast<Interface*>(detail::createPreferredImpl(isCore))};
 }
-
 } // namespace android
-
-#endif // ANDROID_HARDWARE_FACTORY_HAL_HIDL_H
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 1d52b7d..a780a17 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -19,8 +19,8 @@
 
 #include <vector>
 
+#include <android/media/MicrophoneInfoFw.h>
 #include <media/audiohal/EffectHalInterface.h>
-#include <media/MicrophoneInfo.h>
 #include <system/audio.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
@@ -110,8 +110,8 @@
     virtual void onError() {}
 
   protected:
-    StreamOutHalInterfaceCallback() {}
-    virtual ~StreamOutHalInterfaceCallback() {}
+    StreamOutHalInterfaceCallback() = default;
+    virtual ~StreamOutHalInterfaceCallback() = default;
 };
 
 class StreamOutHalInterfaceEventCallback : public virtual RefBase {
@@ -119,8 +119,8 @@
     virtual void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) = 0;
 
 protected:
-    StreamOutHalInterfaceEventCallback() {}
-    virtual ~StreamOutHalInterfaceEventCallback() {}
+    StreamOutHalInterfaceEventCallback() = default;
+    virtual ~StreamOutHalInterfaceEventCallback() = default;
 };
 
 class StreamOutHalInterfaceLatencyModeCallback : public virtual RefBase {
@@ -131,8 +131,8 @@
     virtual void onRecommendedLatencyModeChanged(std::vector<audio_latency_mode_t> modes) = 0;
 
 protected:
-    StreamOutHalInterfaceLatencyModeCallback() {}
-    virtual ~StreamOutHalInterfaceLatencyModeCallback() {}
+    StreamOutHalInterfaceLatencyModeCallback() = default;
+    virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
 };
 
 class StreamOutHalInterface : public virtual StreamHalInterface {
@@ -273,7 +273,7 @@
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
 
     // Get active microphones
-    virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
+    virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) = 0;
 
     // Set direction for capture processing
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t) = 0;
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
new file mode 100644
index 0000000..8210f7d
--- /dev/null
+++ b/media/libaudiohal/tests/Android.bp
@@ -0,0 +1,72 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Headers module is in frameworks/av/Android.bp because modules are not allowed
+// to refer to headers in parent directories and the headers live in
+// frameworks/av/include.
+
+package {
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "AudioHalTestDefaults",
+    test_suites: ["device-tests"],
+    defaults: [
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wthread-safety",
+        "-DBACKEND_NDK",
+    ],
+
+    shared_libs: [
+        "audioclient-types-aidl-cpp",
+        "libaudio_aidl_conversion_common_ndk",
+        "libaudiohal",
+        "liblog",
+        "libutils",
+        "libvibrator",
+    ],
+}
+
+cc_test {
+    name: "EffectsFactoryHalInterfaceTest",
+    srcs: ["EffectsFactoryHalInterface_test.cpp"],
+    defaults: ["AudioHalTestDefaults"],
+    header_libs: ["libaudiohal_headers"],
+}
+
+cc_test {
+    name: "EffectProxyTest",
+    srcs: [
+        "EffectProxy_test.cpp",
+        ":audio_effectproxy_src_files",
+    ],
+    defaults: [
+        "AudioHalTestDefaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "libaudiohal_default",
+        "use_libaidlvintf_gtest_helper_static",
+    ],
+    shared_libs: [
+        "android.hardware.common.fmq-V1-ndk",
+        "libbinder_ndk",
+        "libfmq",
+    ],
+    header_libs: ["libaudiohalimpl_headers"],
+}
diff --git a/media/libaudiohal/tests/EffectProxy_test.cpp b/media/libaudiohal/tests/EffectProxy_test.cpp
new file mode 100644
index 0000000..92e3dce
--- /dev/null
+++ b/media/libaudiohal/tests/EffectProxy_test.cpp
@@ -0,0 +1,357 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#include <cstddef>
+#include <cstdint>
+#include <memory>
+#include <utility>
+#define LOG_TAG "EffectProxyTest"
+
+#include <aidl/android/media/audio/common/AudioUuid.h>
+#include <aidl/Vintf.h>
+#include <android/binder_manager.h>
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include "EffectProxy.h"
+
+/**
+ * This test suite is depending on audio effect AIDL service.
+ */
+namespace android {
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioFormatDescription;
+using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioUuid;
+using ::aidl::android::media::audio::common::PcmType;
+using ::android::effect::EffectProxy;
+
+class EffectProxyTest : public testing::Test {
+  public:
+    void SetUp() override {
+        auto serviceName = android::getAidlHalInstanceNames(IFactory::descriptor);
+        // only unit test with the first one in case more than one EffectFactory service exist
+        ASSERT_NE(0ul, serviceName.size());
+        mFactory = IFactory::fromBinder(
+                ndk::SpAIBinder(AServiceManager_waitForService(serviceName[0].c_str())));
+        ASSERT_NE(nullptr, mFactory);
+        mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &mDescs);
+        for (const auto& desc : mDescs) {
+            if (desc.common.id.proxy.has_value()) {
+                mProxyDescs.insert({desc.common.id, desc});
+            }
+        }
+    }
+
+    void TearDown() override {}
+
+    const AudioFormatDescription kDefaultFormatDescription = {
+            .type = AudioFormatType::PCM, .pcm = PcmType::FLOAT_32_BIT, .encoding = ""};
+
+    Parameter::Common createParamCommon(
+            int session = 0, int ioHandle = -1, int iSampleRate = 48000, int oSampleRate = 48000,
+            long iFrameCount = 0x100, long oFrameCount = 0x100,
+            AudioChannelLayout inputChannelLayout =
+                    AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                            AudioChannelLayout::LAYOUT_STEREO),
+            AudioChannelLayout outputChannelLayout =
+                    AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+                            AudioChannelLayout::LAYOUT_STEREO)) {
+        Parameter::Common common;
+        common.session = session;
+        common.ioHandle = ioHandle;
+
+        auto& input = common.input;
+        auto& output = common.output;
+        input.base.sampleRate = iSampleRate;
+        input.base.channelMask = inputChannelLayout;
+        input.base.format = kDefaultFormatDescription;
+        input.frameCount = iFrameCount;
+        output.base.sampleRate = oSampleRate;
+        output.base.channelMask = outputChannelLayout;
+        output.base.format = kDefaultFormatDescription;
+        output.frameCount = oFrameCount;
+        return common;
+    }
+
+    static bool isFlagSet(const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+                          Flags::HardwareAccelerator flag) {
+        return desc.common.flags.hwAcceleratorMode == flag;
+    }
+
+    enum TupleIndex { HANDLE, DESCRIPTOR };
+    using EffectProxyTuple = std::tuple<std::shared_ptr<EffectProxy>, std::vector<Descriptor>>;
+
+    std::map<AudioUuid, EffectProxyTuple> createAllProxies() {
+        std::map<AudioUuid, EffectProxyTuple> proxyMap;
+        for (const auto& itor : mProxyDescs) {
+            const auto& uuid = itor.first.proxy.value();
+            if (proxyMap.end() == proxyMap.find(uuid)) {
+                std::get<TupleIndex::HANDLE>(proxyMap[uuid]) =
+                        ndk::SharedRefBase::make<EffectProxy>(itor.first, mFactory);
+            }
+        }
+        return proxyMap;
+    }
+
+    bool addAllSubEffects(std::map<AudioUuid, EffectProxyTuple> proxyMap) {
+        for (auto& itor : mProxyDescs) {
+            const auto& uuid = itor.first.proxy.value();
+            if (proxyMap.end() == proxyMap.find(uuid)) {
+                return false;
+            }
+            auto& proxy = std::get<TupleIndex::HANDLE>(proxyMap[uuid]);
+            if (!proxy->addSubEffect(itor.second).isOk()) {
+                return false;
+            }
+            std::get<TupleIndex::DESCRIPTOR>(proxyMap[uuid]).emplace_back(itor.second);
+        }
+        return true;
+    }
+
+    std::shared_ptr<IFactory> mFactory;
+    std::vector<Descriptor> mDescs;
+    std::map<Descriptor::Identity, Descriptor> mProxyDescs;
+};
+
+TEST_F(EffectProxyTest, createProxy) {
+    auto proxyMap = createAllProxies();
+    // if there are some descriptor defined with proxy, then proxyMap can not be empty
+    EXPECT_EQ(mProxyDescs.size() == 0, proxyMap.size() == 0);
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateAndDestroy) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateOpenCloseDestroy) {
+    auto proxyMap = createAllProxies();
+    EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// Add sub-effects, set active sub-effect with different checkers
+TEST_F(EffectProxyTest, setOffloadParam) {
+    auto proxyMap = createAllProxies();
+    EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+    // Any flag exist should be able to set successfully
+    bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+    for (const auto& itor : mProxyDescs) {
+        isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+        isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+        isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+    }
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+        offloadParam.isOffload = true;
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+TEST_F(EffectProxyTest, destroyWithoutCreate) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+TEST_F(EffectProxyTest, closeWithoutOpen) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    for (const auto& itor : proxyMap) {
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// Add sub-effects, set active sub-effect, create, open, and send command, expect success handling
+TEST_F(EffectProxyTest, normalSequency) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    bool isTunnelExist = [&]() {
+        for (const auto& itor : mProxyDescs) {
+            if (isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL)) {
+                return true;
+            }
+        }
+        return false;
+    }();
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    Parameter::VolumeStereo volumeStereo({.left = .1f, .right = -0.8f});
+    Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+    Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+    State state;
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        effect_offload_param_t offloadParam{true, 0};
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+
+        EXPECT_TRUE(proxy->setParameter(param).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// setParameter, change active sub-effect, verify with getParameter
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyParameter) {
+    auto proxyMap = createAllProxies();
+    EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+    bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+    for (const auto& itor : mProxyDescs) {
+        isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+        isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+        isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+    }
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    Parameter::VolumeStereo volumeStereo({.left = .5f, .right = .8f});
+    Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+    Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->setParameter(param).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        offloadParam.isOffload = true;
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+        EXPECT_EQ(expect, param);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+// send command, change active sub-effect, then verify the state with getState
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyState) {
+    auto proxyMap = createAllProxies();
+    ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+    bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+    for (const auto& itor : mProxyDescs) {
+        isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+        isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+        isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+    }
+
+    Parameter::Common common = createParamCommon();
+    IEffect::OpenEffectReturn ret;
+    State state;
+    for (const auto& itor : proxyMap) {
+        Parameter expect;
+        auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+        EXPECT_TRUE(proxy->create().isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::INIT, state);
+        EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+        EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        effect_offload_param_t offloadParam{false, 0};
+        EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        offloadParam.isOffload = true;
+        EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::PROCESSING, state);
+
+        EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::IDLE, state);
+
+        EXPECT_TRUE(proxy->close().isOk());
+        EXPECT_TRUE(proxy->getState(&state).isOk());
+        EXPECT_EQ(State::INIT, state);
+        EXPECT_TRUE(proxy->destroy().isOk());
+    }
+}
+
+} // namespace android
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
new file mode 100644
index 0000000..63f895f
--- /dev/null
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -0,0 +1,355 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#include <algorithm>
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <memory>
+#include <utility>
+#define LOG_TAG "EffectsFactoryHalInterfaceTest"
+
+#include <aidl/android/media/audio/common/AudioUuid.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <system/audio_effects/audio_effects_utils.h>
+#include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_agc.h>
+#include <system/audio_effects/effect_agc2.h>
+#include <system/audio_effects/effect_bassboost.h>
+#include <system/audio_effects/effect_downmix.h>
+#include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+#include <system/audio_effects/effect_loudnessenhancer.h>
+#include <system/audio_effects/effect_ns.h>
+#include <system/audio_effect.h>
+
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+#include <vibrator/ExternalVibrationUtils.h>
+
+namespace android {
+
+using effect::utils::EffectParamReader;
+using effect::utils::EffectParamWriter;
+using ::aidl::android::media::audio::common::AudioUuid;
+
+// EffectsFactoryHalInterface
+TEST(libAudioHalTest, createEffectsFactoryHalInterface) {
+    ASSERT_NE(nullptr, EffectsFactoryHalInterface::create());
+}
+
+TEST(libAudioHalTest, queryNumberEffects) {
+    auto factory = EffectsFactoryHalInterface::create();
+    ASSERT_NE(nullptr, factory);
+
+    uint32_t numEffects = 0;
+    EXPECT_EQ(OK, factory->queryNumberEffects(&numEffects));
+    EXPECT_NE(0ul, numEffects);
+}
+
+TEST(libAudioHalTest, getDescriptorByNumber) {
+    auto factory = EffectsFactoryHalInterface::create();
+    ASSERT_NE(nullptr, factory);
+
+    uint32_t numEffects = 0;
+    EXPECT_EQ(OK, factory->queryNumberEffects(&numEffects));
+    EXPECT_NE(0ul, numEffects);
+
+    effect_descriptor_t desc;
+    for (uint32_t i = 0; i < numEffects; i++) {
+        EXPECT_EQ(OK, factory->getDescriptor(i, &desc));
+    }
+}
+
+TEST(libAudioHalTest, createEffect) {
+    auto factory = EffectsFactoryHalInterface::create();
+    ASSERT_NE(nullptr, factory);
+
+    uint32_t numEffects = 0;
+    EXPECT_EQ(OK, factory->queryNumberEffects(&numEffects));
+    EXPECT_NE(0ul, numEffects);
+
+    effect_descriptor_t desc;
+    for (uint32_t i = 0; i < numEffects; i++) {
+        sp<EffectHalInterface> interface;
+        EXPECT_EQ(OK, factory->getDescriptor(i, &desc));
+        EXPECT_EQ(OK, factory->createEffect(&desc.uuid, 1 /* sessionId */, 1 /* ioId */,
+                                            1 /* deviceId */, &interface));
+    }
+}
+
+TEST(libAudioHalTest, getProcessings) {
+    auto factory = EffectsFactoryHalInterface::create();
+    ASSERT_NE(nullptr, factory);
+
+    const auto &processings = factory->getProcessings();
+    if (processings) {
+        EXPECT_NE(0UL, processings->preprocess.size() + processings->postprocess.size() +
+                               processings->deviceprocess.size());
+
+        auto processingChecker = [](const auto& processings) {
+            if (processings.size() != 0) {
+                // any process need at least 1 effect inside
+                std::for_each(processings.begin(), processings.end(), [](const auto& process) {
+                    EXPECT_NE(0ul, process.effects.size());
+                    // any effect should have a valid name string, and not proxy
+                    for (const auto& effect : process.effects) {
+                        SCOPED_TRACE("Effect: {" +
+                                     (effect == nullptr
+                                              ? "NULL}"
+                                              : ("{name: " + effect->name + ", isproxy: " +
+                                                 (effect->isProxy ? "true" : "false") + ", sw: " +
+                                                 (effect->libSw ? "non-null" : "null") + ", hw: " +
+                                                 (effect->libHw ? "non-null" : "null") + "}")));
+                        EXPECT_NE(nullptr, effect);
+                        EXPECT_NE("", effect->name);
+                        EXPECT_EQ(false, effect->isProxy);
+                        EXPECT_EQ(nullptr, effect->libSw);
+                        EXPECT_EQ(nullptr, effect->libHw);
+                    }
+                });
+            }
+        };
+
+        processingChecker(processings->preprocess);
+        processingChecker(processings->postprocess);
+        processingChecker(processings->deviceprocess);
+    } else {
+        GTEST_SKIP() << "no processing found, skipping the test";
+    }
+}
+
+TEST(libAudioHalTest, getHalVersion) {
+    auto factory = EffectsFactoryHalInterface::create();
+    ASSERT_NE(nullptr, factory);
+
+    auto version = factory->getHalVersion();
+    EXPECT_NE(0, version.getMajorVersion());
+}
+
+class EffectParamCombination {
+  public:
+    template <typename P, typename V>
+    void init(const P& p, const V& v, size_t len) {
+        setBuffer.resize(sizeof(effect_param_t) + sizeof(p) + sizeof(v) + 4);
+        getBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+        expectBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+        parameterSet =
+                std::make_shared<EffectParamReader>(createEffectParam(setBuffer.data(), p, v));
+        parameterGet =
+                std::make_shared<EffectParamReader>(createEffectParam(getBuffer.data(), p, v));
+        parameterExpect =
+                std::make_shared<EffectParamReader>(createEffectParam(expectBuffer.data(), p, v));
+        valueSize = len;
+    }
+
+    std::shared_ptr<EffectParamReader> parameterSet; /* setParameter */
+    std::shared_ptr<EffectParamReader> parameterGet; /* getParameter */
+    std::shared_ptr<EffectParamReader> parameterExpect; /* expected from getParameter */
+    size_t valueSize;   /* ValueSize expect to write in reply data buffer */
+
+  private:
+    std::vector<uint8_t> setBuffer;
+    std::vector<uint8_t> getBuffer;
+    std::vector<uint8_t> expectBuffer;
+
+    template <typename P, typename V>
+    EffectParamReader createEffectParam(void* buf, const P& p, const V& v) {
+        effect_param_t* paramRet = (effect_param_t*)buf;
+        paramRet->psize = sizeof(P);
+        paramRet->vsize = sizeof(V);
+        EffectParamWriter writer(*paramRet);
+        EXPECT_EQ(OK, writer.writeToParameter(&p));
+        EXPECT_EQ(OK, writer.writeToValue(&v));
+        writer.finishValueWrite();
+        return writer;
+    }
+};
+
+template <typename P, typename V>
+std::shared_ptr<EffectParamCombination> createEffectParamCombination(const P& p, const V& v,
+                                                                     size_t len) {
+    auto comb = std::make_shared<EffectParamCombination>();
+    comb->init(p, v, len);
+    return comb;
+}
+
+enum ParamName { TUPLE_UUID, TUPLE_PARAM_COMBINATION };
+using EffectParamTestTuple =
+        std::tuple<const effect_uuid_t* /* type UUID */, std::shared_ptr<EffectParamCombination>>;
+
+static const effect_uuid_t EXTEND_EFFECT_TYPE_UUID = {
+        0xfa81dbde, 0x588b, 0x11ed, 0x9b6a, {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
+
+std::vector<EffectParamTestTuple> testPairs = {
+        std::make_tuple(FX_IID_AEC,
+                        createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
+                                                     sizeof(int32_t) /* returnValueSize */)),
+        std::make_tuple(FX_IID_AGC,
+                        createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
+                                                     sizeof(int16_t) /* returnValueSize */)),
+        std::make_tuple(FX_IID_AGC2, createEffectParamCombination(
+                                             AGC2_PARAM_FIXED_DIGITAL_GAIN, 15 /* digitalGainDb */,
+                                             sizeof(int32_t) /* returnValueSize */)),
+        std::make_tuple(SL_IID_BASSBOOST,
+                        createEffectParamCombination(BASSBOOST_PARAM_STRENGTH, 20 /* strength */,
+                                                     sizeof(int32_t) /* returnValueSize */)),
+        std::make_tuple(EFFECT_UIID_DOWNMIX,
+                        createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
+                                                     sizeof(int16_t) /* returnValueSize */)),
+        std::make_tuple(SL_IID_DYNAMICSPROCESSING,
+                        createEffectParamCombination(
+                                std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
+                                30 /* gainDb */, sizeof(int32_t) /* returnValueSize */)),
+        std::make_tuple(
+                FX_IID_HAPTICGENERATOR,
+                createEffectParamCombination(
+                        HG_PARAM_HAPTIC_INTENSITY,
+                        std::array<uint32_t, 2>(
+                                {1, uint32_t(::android::os::HapticScale::HIGH) /* scale */}),
+                        0 /* returnValueSize */)),
+        std::make_tuple(
+                FX_IID_LOUDNESS_ENHANCER,
+                createEffectParamCombination(LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, 5 /* gain */,
+                                             sizeof(int32_t) /* returnValueSize */)),
+        std::make_tuple(FX_IID_NS,
+                        createEffectParamCombination(NS_PARAM_LEVEL, 1 /* level */,
+                                                     sizeof(int32_t) /* returnValueSize */)),
+        std::make_tuple(&EXTEND_EFFECT_TYPE_UUID,
+                        createEffectParamCombination(1, 0xbead, sizeof(int32_t)))};
+
+class libAudioHalEffectParamTest : public ::testing::TestWithParam<EffectParamTestTuple> {
+  public:
+    libAudioHalEffectParamTest()
+        : mParamTuple(GetParam()),
+          mFactory(EffectsFactoryHalInterface::create()),
+          mTypeUuid(std::get<TUPLE_UUID>(mParamTuple)),
+          mCombination(std::get<TUPLE_PARAM_COMBINATION>(mParamTuple)),
+          mExpectedValue([&]() {
+              std::vector<uint8_t> expectData(mCombination->valueSize);
+              mCombination->parameterExpect->readFromValue(expectData.data(),
+                                                           mCombination->valueSize);
+              return expectData;
+          }()),
+          mDescs([&]() {
+              std::vector<effect_descriptor_t> descs;
+              if (mFactory && mTypeUuid && OK == mFactory->getDescriptors(mTypeUuid, &descs)) {
+                  return descs;
+              }
+              return descs;
+          }()) {}
+
+    void SetUp() override {
+        ASSERT_NE(0ul, mDescs.size());
+        for (const auto& desc : mDescs) {
+            sp<EffectHalInterface> interface = createEffectHal(desc);
+            ASSERT_NE(nullptr, interface);
+            mHalInterfaces.push_back(interface);
+        }
+    }
+
+    void initEffect(const sp<EffectHalInterface>& interface) {
+        uint32_t initReply = 0;
+        uint32_t initReplySize = sizeof(initReply);
+        ASSERT_EQ(OK, interface->command(EFFECT_CMD_INIT, 0, nullptr, &initReplySize, &initReply));
+    }
+
+    void TearDown() override {
+        for (auto& interface : mHalInterfaces) {
+            interface->close();
+        }
+    }
+
+    sp<EffectHalInterface> createEffectHal(const effect_descriptor_t& desc) {
+        sp<EffectHalInterface> interface = nullptr;
+        if (0 == std::memcmp(&desc.type, mTypeUuid, sizeof(effect_uuid_t)) &&
+            OK == mFactory->createEffect(&desc.uuid, 1 /* sessionId */, 1 /* ioId */,
+                                         1 /* deviceId */, &interface)) {
+            return interface;
+        }
+        return nullptr;
+    }
+
+    void setAndGetParameter(const sp<EffectHalInterface>& interface) {
+        uint32_t replySize = sizeof(uint32_t);
+        uint8_t reply[replySize];
+        auto parameterSet = mCombination->parameterSet;
+        ASSERT_EQ(OK,
+                  interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)parameterSet->getTotalSize(),
+                                     const_cast<effect_param_t*>(&parameterSet->getEffectParam()),
+                                     &replySize, &reply))
+                << parameterSet->toString();
+        ASSERT_EQ(replySize, sizeof(uint32_t));
+
+        effect_param_t* getParam =
+                const_cast<effect_param_t*>(&mCombination->parameterGet->getEffectParam());
+        size_t maxReplySize = mCombination->valueSize + sizeof(effect_param_t) +
+                              sizeof(parameterSet->getPaddedParameterSize());
+        replySize = maxReplySize;
+        EXPECT_EQ(OK,
+                  interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)parameterSet->getTotalSize(),
+                                     const_cast<effect_param_t*>(&parameterSet->getEffectParam()),
+                                     &replySize, getParam));
+        EffectParamReader parameterGet(*getParam);
+        EXPECT_EQ(replySize, parameterGet.getTotalSize()) << parameterGet.toString();
+        if (mCombination->valueSize) {
+            std::vector<uint8_t> response(mCombination->valueSize);
+            EXPECT_EQ(OK, parameterGet.readFromValue(response.data(), mCombination->valueSize))
+                    << " try get valueSize " << mCombination->valueSize << " from "
+                    << parameterGet.toString();
+            EXPECT_EQ(response, mExpectedValue);
+        }
+    }
+
+    const EffectParamTestTuple mParamTuple;
+    const sp<EffectsFactoryHalInterface> mFactory;
+    const effect_uuid_t* mTypeUuid;
+    std::shared_ptr<EffectParamCombination> mCombination;
+    const std::vector<uint8_t> mExpectedValue;
+    const std::vector<effect_descriptor_t> mDescs;
+    std::vector<sp<EffectHalInterface>> mHalInterfaces;
+};
+
+TEST_P(libAudioHalEffectParamTest, setAndGetParam) {
+    for (auto& interface : mHalInterfaces) {
+        EXPECT_NO_FATAL_FAILURE(initEffect(interface));
+        EXPECT_NO_FATAL_FAILURE(setAndGetParameter(interface));
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        libAudioHalEffectParamTest, libAudioHalEffectParamTest, ::testing::ValuesIn(testPairs),
+        [](const testing::TestParamInfo<libAudioHalEffectParamTest::ParamType>& info) {
+            AudioUuid uuid = ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(
+                                     *std::get<TUPLE_UUID>(info.param))
+                                     .value();
+            std::string name = "UUID_" + uuid.toString();
+            std::replace_if(
+                    name.begin(), name.end(), [](const char c) { return !std::isalnum(c); }, '_');
+            return name;
+        });
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(libAudioHalEffectParamTest);
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    return RUN_ALL_TESTS();
+}
+
+// TODO: b/263986405 Add multi-thread testing
+
+} // namespace android
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index e6fdb1d..57b860d 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -116,6 +116,9 @@
         track->mKeepContractedChannels = false;
     }
 
+    track->mInputFrameSize = audio_bytes_per_frame(
+            track->channelCount + track->mHapticChannelCount, track->mFormat);
+
     // channel masks have changed, does this track need a downmixer?
     // update to try using our desired format (if we aren't already using it)
     const status_t status = track->prepareForDownmix();
@@ -190,7 +193,7 @@
 
     // See if we should use our built-in non-effect downmixer.
     if (mMixerInFormat == AUDIO_FORMAT_PCM_FLOAT
-            && mMixerChannelMask == AUDIO_CHANNEL_OUT_STEREO
+            && ChannelMixBufferProvider::isOutputChannelMaskSupported(mMixerChannelMask)
             && audio_channel_mask_get_representation(channelMask)
                     == AUDIO_CHANNEL_REPRESENTATION_POSITION) {
         mDownmixerBufferProvider.reset(new ChannelMixBufferProvider(channelMask,
@@ -297,6 +300,26 @@
     return NO_ERROR;
 }
 
+void AudioMixer::Track::unprepareForTee() {
+    ALOGV("AudioMixer::%s", __func__);
+    if (mTeeBufferProvider.get() != nullptr) {
+        mTeeBufferProvider.reset(nullptr);
+        reconfigureBufferProviders();
+    }
+}
+
+status_t AudioMixer::Track::prepareForTee() {
+    ALOGV("AudioMixer::%s(%p) teeBuffer=%p", __func__, this, teeBuffer);
+    unprepareForTee();
+    if (teeBuffer != nullptr) {
+        mTeeBufferProvider.reset(new TeeBufferProvider(
+                mInputFrameSize, mInputFrameSize, kCopyBufferFrameCount,
+                (uint8_t*)teeBuffer, mTeeBufferFrameCount));
+        reconfigureBufferProviders();
+    }
+    return NO_ERROR;
+}
+
 void AudioMixer::Track::clearContractedBuffer()
 {
     if (mAdjustChannelsBufferProvider.get() != nullptr) {
@@ -305,10 +328,20 @@
     }
 }
 
+void AudioMixer::Track::clearTeeFrameCopied() {
+    if (mTeeBufferProvider.get() != nullptr) {
+        static_cast<TeeBufferProvider*>(mTeeBufferProvider.get())->clearFramesCopied();
+    }
+}
+
 void AudioMixer::Track::reconfigureBufferProviders()
 {
     // configure from upstream to downstream buffer providers.
     bufferProvider = mInputBufferProvider;
+    if (mTeeBufferProvider != nullptr) {
+        mTeeBufferProvider->setBufferProvider(bufferProvider);
+        bufferProvider = mTeeBufferProvider.get();
+    }
     if (mAdjustChannelsBufferProvider.get() != nullptr) {
         mAdjustChannelsBufferProvider->setBufferProvider(bufferProvider);
         bufferProvider = mAdjustChannelsBufferProvider.get();
@@ -420,6 +453,20 @@
                 track->mHapticMaxAmplitude = hapticMaxAmplitude;
             }
             } break;
+        case TEE_BUFFER:
+            if (track->teeBuffer != valueBuf) {
+                track->teeBuffer = valueBuf;
+                ALOGV("setParameter(TRACK, TEE_BUFFER, %p)", valueBuf);
+                track->prepareForTee();
+            }
+            break;
+        case TEE_BUFFER_FRAME_COUNT:
+            if (track->mTeeBufferFrameCount != valueInt) {
+                track->mTeeBufferFrameCount = valueInt;
+                ALOGV("setParameter(TRACK, TEE_BUFFER_FRAME_COUNT, %i)", valueInt);
+                track->prepareForTee();
+            }
+            break;
         default:
             LOG_ALWAYS_FATAL("setParameter track: bad param %d", param);
         }
@@ -500,6 +547,8 @@
         track->mReformatBufferProvider->reset();
     } else if (track->mAdjustChannelsBufferProvider.get() != nullptr) {
         track->mAdjustChannelsBufferProvider->reset();
+    } else if (track->mTeeBufferProvider.get() != nullptr) {
+        track->mTeeBufferProvider->reset();
     }
 
     track->mInputBufferProvider = bufferProvider;
@@ -543,6 +592,8 @@
     t->mAdjustInChannelCount = t->channelCount + t->mHapticChannelCount;
     t->mAdjustOutChannelCount = t->channelCount;
     t->mKeepContractedChannels = false;
+    t->mInputFrameSize = audio_bytes_per_frame(
+            t->channelCount + t->mHapticChannelCount, t->mFormat);
     // Check the downmixing (or upmixing) requirements.
     status_t status = t->prepareForDownmix();
     if (status != OK) {
@@ -565,6 +616,7 @@
         if (t->mKeepContractedChannels) {
             t->clearContractedBuffer();
         }
+        t->clearTeeFrameCopied();
     }
 }
 
@@ -593,6 +645,10 @@
                 }
                 break;
             }
+            if (t->teeBuffer != nullptr && t->volumeRL == 0) {
+                // Need to mute tee
+                memset(t->teeBuffer, 0, t->mTeeBufferFrameCount * t->mInputFrameSize);
+            }
         }
     }
 }
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index f30eb54..3d11d92 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -143,6 +143,7 @@
         // setParameter(name, TRACK, MAIN_BUFFER, mixBuffer) is required before enable(name)
         t->mainBuffer = NULL;
         t->auxBuffer = NULL;
+        t->teeBuffer = nullptr;
         t->mMixerFormat = AUDIO_FORMAT_PCM_16_BIT;
         t->mFormat = format;
         t->mMixerInFormat = kUseFloat && kUseNewMixer ?
@@ -150,6 +151,8 @@
         t->mMixerChannelMask = audio_channel_mask_from_representation_and_bits(
                 AUDIO_CHANNEL_REPRESENTATION_POSITION, AUDIO_CHANNEL_OUT_STEREO);
         t->mMixerChannelCount = audio_channel_count_from_out_mask(t->mMixerChannelMask);
+        t->mTeeBufferFrameCount = 0;
+        t->mInputFrameSize = audio_bytes_per_frame(t->channelCount, t->mFormat);
         status_t status = postCreateTrack(t.get());
         if (status != OK) return status;
         mTracks[name] = t;
@@ -176,6 +179,7 @@
     track->channelCount = trackChannelCount;
     track->mMixerChannelMask = mixerChannelMask;
     track->mMixerChannelCount = mixerChannelCount;
+    track->mInputFrameSize = audio_bytes_per_frame(track->channelCount, track->mFormat);
 
     // Resampler channels may have changed.
     track->recreateResampler(mSampleRate);
@@ -401,6 +405,20 @@
                 invalidate();
             }
             } break;
+        case TEE_BUFFER:
+            if (track->teeBuffer != valueBuf) {
+                track->teeBuffer = valueBuf;
+                ALOGV("setParameter(TRACK, TEE_BUFFER, %p)", valueBuf);
+                invalidate();
+            }
+            break;
+        case TEE_BUFFER_FRAME_COUNT:
+            if (track->mTeeBufferFrameCount != valueInt) {
+                track->mTeeBufferFrameCount = valueInt;
+                ALOGV("setParameter(TRACK, TEE_BUFFER_FRAME_COUNT, %i)", valueInt);
+                invalidate();
+            }
+            break;
         default:
             LOG_ALWAYS_FATAL("setParameter track: bad param %d", param);
         }
@@ -453,9 +471,9 @@
                         &track->mVolume[param - VOLUME0],
                         &track->mPrevVolume[param - VOLUME0],
                         &track->mVolumeInc[param - VOLUME0])) {
-                    ALOGV("setParameter(%s, VOLUME%d: %04x)",
-                            target == VOLUME ? "VOLUME" : "RAMP_VOLUME", param - VOLUME0,
-                                    track->volume[param - VOLUME0]);
+                    ALOGV("setParameter(%s, VOLUME%d: %f)",
+                          target == VOLUME ? "VOLUME" : "RAMP_VOLUME", param - VOLUME0,
+                          track->mVolume[param - VOLUME0]);
                     invalidate();
                 }
             } else {
@@ -630,7 +648,7 @@
 
         if (t->volumeInc[0]|t->volumeInc[1]) {
             volumeRamp = true;
-        } else if (!t->doesResample() && t->volumeRL == 0) {
+        } else if (!t->doesResample() && t->isVolumeMuted()) {
             n |= NEEDS_MUTE;
         }
         t->needs = n;
@@ -730,7 +748,7 @@
 
         for (const int name : mEnabled) {
             const std::shared_ptr<TrackBase> &t = mTracks[name];
-            if (!t->doesResample() && t->volumeRL == 0) {
+            if (!t->doesResample() && t->isVolumeMuted()) {
                 t->needs |= NEEDS_MUTE;
                 t->hook = &TrackBase::track__nop;
             } else {
diff --git a/media/libaudioprocessing/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
index 4658db8..9f19f7b 100644
--- a/media/libaudioprocessing/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -373,18 +373,23 @@
                 audio_bytes_per_sample(format)
                     * audio_channel_count_from_out_mask(outputChannelMask),
                 bufferFrameCount)
+        , mChannelMix{format == AUDIO_FORMAT_PCM_FLOAT
+                ? audio_utils::channels::IChannelMix::create(outputChannelMask) : nullptr}
+        , mIsValid{mChannelMix && mChannelMix->setInputChannelMask(inputChannelMask)}
 {
     ALOGV("ChannelMixBufferProvider(%p)(%#x, %#x, %#x)",
             this, format, inputChannelMask, outputChannelMask);
-    if (outputChannelMask == AUDIO_CHANNEL_OUT_STEREO && format == AUDIO_FORMAT_PCM_FLOAT) {
-        mIsValid = mChannelMix.setInputChannelMask(inputChannelMask);
-    }
 }
 
 void ChannelMixBufferProvider::copyFrames(void *dst, const void *src, size_t frames)
 {
-    mChannelMix.process(static_cast<const float *>(src), static_cast<float *>(dst),
-            frames, false /* accumulate */);
+    if (mIsValid) {
+        mChannelMix->process(static_cast<const float *>(src), static_cast<float *>(dst),
+                frames, false /* accumulate */);
+    } else {
+        // Should fall back to a different BufferProvider if not valid.
+        ALOGE("%s: Use without being valid!", __func__);
+    }
 }
 
 ReformatBufferProvider::ReformatBufferProvider(int32_t channelCount,
@@ -739,5 +744,21 @@
     mContractedWrittenFrames = 0;
     CopyBufferProvider::reset();
 }
+
+void TeeBufferProvider::copyFrames(void *dst, const void *src, size_t frames) {
+    memcpy(dst, src, frames * mInputFrameSize);
+    if (int teeBufferFrameLeft = mTeeBufferFrameCount - mFrameCopied; teeBufferFrameLeft < frames) {
+        ALOGW("Unable to copy all frames to tee buffer, %d frames dropped",
+              (int)frames - teeBufferFrameLeft);
+        frames = teeBufferFrameLeft;
+    }
+    memcpy(mTeeBuffer + mFrameCopied * mInputFrameSize, src, frames * mInputFrameSize);
+    mFrameCopied += frames;
+}
+
+void TeeBufferProvider::clearFramesCopied() {
+    mFrameCopied = 0;
+}
+
 // ----------------------------------------------------------------------------
 } // namespace android
diff --git a/media/libaudioprocessing/TEST_MAPPING b/media/libaudioprocessing/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/media/libaudioprocessing/TEST_MAPPING
+++ b/media/libaudioprocessing/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index 2993a60..b39fb92 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -96,7 +96,10 @@
         void        unprepareForReformat();
         status_t    prepareForAdjustChannels(size_t frames);
         void        unprepareForAdjustChannels();
+        void        unprepareForTee();
+        status_t    prepareForTee();
         void        clearContractedBuffer();
+        void        clearTeeFrameCopied();
         bool        setPlaybackRate(const AudioPlaybackRate &playbackRate);
         void        reconfigureBufferProviders();
 
@@ -108,20 +111,22 @@
          * all pre-mixer track buffer conversions outside the AudioMixer class.
          *
          * 1) mInputBufferProvider: The AudioTrack buffer provider.
-         * 2) mAdjustChannelsBufferProvider: Expands or contracts sample data from one interleaved
+         * 2) mTeeBufferProvider: If not NULL, copy the data to tee buffer.
+         * 3) mAdjustChannelsBufferProvider: Expands or contracts sample data from one interleaved
          *    channel format to another. Expanded channels are filled with zeros and put at the end
          *    of each audio frame. Contracted channels are copied to the end of the buffer.
-         * 3) mReformatBufferProvider: If not NULL, performs the audio reformat to
+         * 4) mReformatBufferProvider: If not NULL, performs the audio reformat to
          *    match either mMixerInFormat or mDownmixRequiresFormat, if the downmixer
          *    requires reformat. For example, it may convert floating point input to
          *    PCM_16_bit if that's required by the downmixer.
-         * 4) mDownmixerBufferProvider: If not NULL, performs the channel remixing to match
+         * 5) mDownmixerBufferProvider: If not NULL, performs the channel remixing to match
          *    the number of channels required by the mixer sink.
-         * 5) mPostDownmixReformatBufferProvider: If not NULL, performs reformatting from
+         * 6) mPostDownmixReformatBufferProvider: If not NULL, performs reformatting from
          *    the downmixer requirements to the mixer engine input requirements.
-         * 6) mTimestretchBufferProvider: Adds timestretching for playback rate
+         * 7) mTimestretchBufferProvider: Adds timestretching for playback rate
          */
         AudioBufferProvider* mInputBufferProvider;    // externally provided buffer provider.
+        std::unique_ptr<PassthruBufferProvider> mTeeBufferProvider;
         std::unique_ptr<PassthruBufferProvider> mAdjustChannelsBufferProvider;
         std::unique_ptr<PassthruBufferProvider> mReformatBufferProvider;
         std::unique_ptr<PassthruBufferProvider> mDownmixerBufferProvider;
diff --git a/media/libaudioprocessing/include/media/AudioMixerBase.h b/media/libaudioprocessing/include/media/AudioMixerBase.h
index 3419816..b44ff20 100644
--- a/media/libaudioprocessing/include/media/AudioMixerBase.h
+++ b/media/libaudioprocessing/include/media/AudioMixerBase.h
@@ -68,6 +68,10 @@
         // 0x4004 reserved
         MIXER_FORMAT    = 0x4005, // AUDIO_FORMAT_PCM_(FLOAT|16_BIT)
         MIXER_CHANNEL_MASK = 0x4006, // Channel mask for mixer output
+        // 0x4007, 0x4008, 0x4009 is defined for haptic stuff in AudioMixer.h
+        TEE_BUFFER = 0x400A,
+        TEE_BUFFER_FORMAT = 0x400B,
+        TEE_BUFFER_FRAME_COUNT = 0x400C,
         // for target RESAMPLE
         SAMPLE_RATE     = 0x4100, // Configure sample rate conversion on this track name;
                                   // parameter 'value' is the new sample rate in Hz.
@@ -271,6 +275,7 @@
         uint32_t    sampleRate;
         int32_t*    mainBuffer;
         int32_t*    auxBuffer;
+        int32_t*    teeBuffer;
 
         int32_t     sessionId;
 
@@ -290,6 +295,20 @@
         audio_channel_mask_t mMixerChannelMask;
         uint32_t             mMixerChannelCount;
 
+        int32_t        mTeeBufferFrameCount;
+
+        uint32_t       mInputFrameSize; // The track input frame size, used for tee buffer
+
+        // consider volume muted only if all channel volume (floating point) is 0.f
+        inline bool isVolumeMuted() const {
+            for (const auto volume : mVolume) {
+                if (volume != 0) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
       protected:
 
         // hooks
diff --git a/media/libaudioprocessing/include/media/BufferProviders.h b/media/libaudioprocessing/include/media/BufferProviders.h
index b3ab8a5..8d18010 100644
--- a/media/libaudioprocessing/include/media/BufferProviders.h
+++ b/media/libaudioprocessing/include/media/BufferProviders.h
@@ -142,9 +142,14 @@
 
     bool isValid() const { return mIsValid; }
 
+    static bool isOutputChannelMaskSupported(audio_channel_mask_t outputChannelMask) {
+        return audio_utils::channels::IChannelMix::isOutputChannelMaskSupported(
+                outputChannelMask);
+    }
+
 protected:
-    audio_utils::channels::ChannelMix mChannelMix;
-    bool mIsValid = false;
+    const std::shared_ptr<audio_utils::channels::IChannelMix> mChannelMix;
+    const bool mIsValid;
 };
 
 // RemixBufferProvider derives from CopyBufferProvider to perform an
@@ -279,6 +284,27 @@
     size_t               mContractedWrittenFrames;
     size_t               mContractedOutputFrameSize; // contracted output frame size
 };
+
+class TeeBufferProvider : public CopyBufferProvider {
+public:
+    TeeBufferProvider(
+            size_t inputFrameSize, size_t outputFrameSize,
+            size_t bufferFrameCount, uint8_t* teeBuffer, int teeBufferFrameCount)
+            : CopyBufferProvider(inputFrameSize, outputFrameSize, bufferFrameCount),
+              mTeeBuffer(teeBuffer), mTeeBufferFrameCount(teeBufferFrameCount),
+              mFrameCopied(0) {};
+
+    void copyFrames(void *dst, const void *src, size_t frames) override;
+
+    void clearFramesCopied();
+
+protected:
+    AudioBufferProvider *mTrackBufferProvider;
+    uint8_t* mTeeBuffer;
+    const int mTeeBufferFrameCount;
+    int mFrameCopied;
+};
+
 // ----------------------------------------------------------------------------
 } // namespace android
 
diff --git a/media/libaudioprocessing/tests/test-resampler.cpp b/media/libaudioprocessing/tests/test-resampler.cpp
index f178bde..2166a83 100644
--- a/media/libaudioprocessing/tests/test-resampler.cpp
+++ b/media/libaudioprocessing/tests/test-resampler.cpp
@@ -18,6 +18,7 @@
 #include <stdio.h>
 #include <stdlib.h>
 #include <fcntl.h>
+#include <memory>
 #include <string.h>
 #include <sys/mman.h>
 #include <sys/stat.h>
@@ -474,7 +475,7 @@
     // mono takes left channel only (out of stereo output pair)
     // stereo and multichannel preserve all channels.
     int32_t* out = (int32_t*) output_vaddr;
-    int16_t* convert = (int16_t*) malloc(output_frames * channels * sizeof(int16_t));
+    std::unique_ptr<int16_t[]> convert(new int16_t[output_frames * channels]);
 
     const int volumeShift = 12; // shift requirement for Q4.27 to Q.15
     // round to half towards zero and saturate at int16 (non-dithered)
@@ -509,7 +510,7 @@
         perror(file_out);
         return EXIT_FAILURE;
     }
-    (void) sf_writef_short(sf, convert, output_frames);
+    (void) sf_writef_short(sf, convert.get(), output_frames);
     sf_close(sf);
 
     return EXIT_SUCCESS;
diff --git a/media/libaudiousecasevalidation/Android.bp b/media/libaudiousecasevalidation/Android.bp
new file mode 100644
index 0000000..3ee7e32
--- /dev/null
+++ b/media/libaudiousecasevalidation/Android.bp
@@ -0,0 +1,49 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libaudiousecasevalidation",
+    host_supported: true,
+    srcs: [
+        "UsecaseLookup.cpp",
+        "UsecaseValidator.cpp",
+    ],
+    header_libs: [
+        "liberror_headers",
+    ],
+    shared_libs: [
+        "framework-permission-aidl-cpp",
+        "libaudioutils",
+        "libbase",
+        "liblog",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_test_host {
+    name: "libaudiousecasevalidation-test",
+    srcs: [
+        "tests/UsecaseValidator-test.cpp",
+    ],
+    header_libs: [
+        "liberror_headers",
+    ],
+    shared_libs: [
+        "framework-permission-aidl-cpp",
+        "libaudiousecasevalidation",
+        "libutils",
+    ],
+}
diff --git a/media/libaudiousecasevalidation/UsecaseLookup.cpp b/media/libaudiousecasevalidation/UsecaseLookup.cpp
new file mode 100644
index 0000000..01e667f
--- /dev/null
+++ b/media/libaudiousecasevalidation/UsecaseLookup.cpp
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "UsecaseLookup"
+// #define LOG_NDEBUG 0
+
+#include "media/UsecaseLookup.h"
+
+#include <utils/Log.h>
+
+namespace android {
+namespace media {
+
+/**
+ * Add streamId and outputFlags to stream list.
+ */
+void UsecaseLookup::addStream(STREAMID streamId, bool outputFlagGame) {
+    ALOGV("%s streamId: %d outputFlagGame: %d", __func__, streamId, outputFlagGame);
+
+    mutex_lock lock(m_mutex);
+    m_streams[streamId] = outputFlagGame;
+}
+
+/**
+ * Remove streamId from stream list.
+ */
+void UsecaseLookup::removeStream(STREAMID streamId) {
+    ALOGV("%s streamId: %d ", __func__, streamId);
+
+    mutex_lock lock(m_mutex);
+    m_streams.erase(streamId);
+
+    // Shouldn't happen but it might.
+    for (auto it = m_tracks.begin(); it != m_tracks.end();) {
+        if (it->second == streamId) {
+            it = m_tracks.erase(it);
+        } else {
+            it++;
+        }
+    }
+}
+
+/**
+ * Add streamId and portId to track list.
+ */
+void UsecaseLookup::addTrack(STREAMID streamId, PORTID portId) {
+    ALOGV("%s streamId: %d portId: %d", __func__, streamId, portId);
+
+    mutex_lock lock(m_mutex);
+
+    if (m_tracks.find(portId) == m_tracks.end()) {
+        m_tracks[portId] = streamId;
+    }
+}
+
+/**
+ * Remove streamId and portId from track list.
+ */
+void UsecaseLookup::removeTrack(STREAMID streamId, PORTID portId) {
+    ALOGV("%s streamId: %d portId: %d", __func__, streamId, portId);
+
+    mutex_lock lock(m_mutex);
+    auto it = m_tracks.find(portId);
+
+    if (it != m_tracks.end() && it->second == streamId) {
+        m_tracks.erase(portId);
+    }
+}
+
+/**
+ * Check if stream list contains streamId with Game outputFlag.
+ */
+bool UsecaseLookup::isGameStream(STREAMID streamId) {
+    ALOGV("%s streamId: %d ", __func__, streamId);
+    mutex_lock lock(m_mutex);
+    auto it = m_streams.find(streamId);
+
+    return (it != m_streams.end()) ? it->second : false;
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libaudiousecasevalidation/UsecaseValidator.cpp b/media/libaudiousecasevalidation/UsecaseValidator.cpp
new file mode 100644
index 0000000..bf532de
--- /dev/null
+++ b/media/libaudiousecasevalidation/UsecaseValidator.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "UsecaseValidator"
+// #define LOG_NDEBUG 0
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+
+#include "media/UsecaseValidator.h"
+#include "media/UsecaseLookup.h"
+
+namespace android {
+namespace media {
+namespace {
+
+class UsecaseValidatorImpl : public UsecaseValidator {
+ public:
+    UsecaseValidatorImpl() {}
+
+    /**
+     * Register a new mixer/stream.
+     * Called when the stream is opened at the HAL and communicates
+     * immutable stream attributes like flags, sampling rate, format.
+     */
+    status_t registerStream(audio_io_handle_t streamId,
+                            const audio_config_base_t& audioConfig __attribute__((unused)),
+                            const audio_output_flags_t outputFlags) override {
+        ALOGV("%s output: %d flags: %#x", __func__, streamId, outputFlags);
+
+        // Check if FAST or MMAP output flag has been set.
+        bool outputFlagGame = outputFlags & (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
+        m_lookup.addStream(streamId, outputFlagGame);
+        return OK;
+    };
+
+    /**
+     * Unregister a stream/mixer.
+     * Called when the stream is closed.
+     */
+    status_t unregisterStream(audio_io_handle_t streamId) override {
+        ALOGV("%s output: %d", __func__, streamId);
+
+        m_lookup.removeStream(streamId);
+        return OK;
+    };
+
+    /**
+     * Indicates that some playback activity started on the stream.
+     * Called each time an audio track starts or resumes.
+     */
+    error::Result<audio_attributes_t> startClient(audio_io_handle_t streamId,
+            audio_port_handle_t portId, const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes,
+            const AttributesChangedCallback *callback __attribute__((unused))) override {
+        ALOGV("%s output: %d portId: %d usage: %d pid: %d package: %s",
+                __func__, streamId, portId, attributes.usage, attributionSource.pid,
+                attributionSource.packageName.value_or("").c_str());
+
+        m_lookup.addTrack(streamId, portId);
+
+        return verifyAudioAttributes(streamId, attributionSource, attributes);
+    };
+
+    /**
+     * Indicates that some playback activity stopped on the stream.
+     * Called each time an audio track stops or pauses.
+     */
+    status_t stopClient(audio_io_handle_t streamId, audio_port_handle_t portId) override {
+        ALOGV("%s output: %d portId: %d", __func__, streamId, portId);
+
+        m_lookup.removeTrack(streamId, portId);
+        return OK;
+    };
+
+    /**
+     * Called to verify and update audio attributes for a track that is connected
+     * to the specified stream.
+     */
+    error::Result<audio_attributes_t> verifyAudioAttributes(audio_io_handle_t streamId,
+            const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes) override {
+        ALOGV("%s output: %d usage: %d pid: %d package: %s",
+                __func__, streamId, attributes.usage, attributionSource.pid,
+                attributionSource.packageName.value_or("").c_str());
+
+        audio_attributes_t attrRet = attributes;
+
+        if (isUsageValid(attributes.usage) && isContentTypeValid(attributes.content_type)
+                && areFlagsValid(attributes.flags) && m_lookup.isGameStream(streamId)) {
+            ALOGI("%s update usage: %d to AUDIO_USAGE_GAME for output: %d pid: %d package: %s",
+                    __func__, attributes.usage, streamId, attributionSource.pid,
+                    attributionSource.packageName.value_or("").c_str());
+            // Set attribute usage Game.
+            attrRet.usage = AUDIO_USAGE_GAME;
+        }
+
+        return {attrRet};
+    };
+
+ protected:
+    /**
+     * Check if attribute usage valid.
+     */
+    bool isUsageValid(audio_usage_t usage) {
+        ALOGV("isUsageValid usage: %d", usage);
+        switch (usage) {
+            case AUDIO_USAGE_MEDIA:
+            case AUDIO_USAGE_UNKNOWN:
+                return true;
+            default:
+                break;
+        }
+        return false;
+    }
+
+    bool isContentTypeValid(audio_content_type_t contentType) {
+        ALOGV("isContentTypeValid contentType: %d", contentType);
+        switch (contentType) {
+            case AUDIO_CONTENT_TYPE_MUSIC:
+            case AUDIO_CONTENT_TYPE_MOVIE:
+            case AUDIO_CONTENT_TYPE_UNKNOWN:
+                return true;
+            default:
+                break;
+        }
+        return false;
+    }
+
+    bool areFlagsValid(audio_flags_mask_t flags) {
+        ALOGV("areFlagsValid flags: %#x", flags);
+        if ((flags & (AUDIO_FLAG_SCO|AUDIO_FLAG_AUDIBILITY_ENFORCED|AUDIO_FLAG_BEACON)) != 0) {
+            return false;
+        }
+        if ((flags & AUDIO_FLAG_LOW_LATENCY) != 0) {
+            return true;
+        }
+        return false;
+    }
+
+ protected:
+    UsecaseLookup m_lookup;
+};
+
+}  // namespace
+
+std::unique_ptr<UsecaseValidator> createUsecaseValidator() {
+    return std::make_unique<UsecaseValidatorImpl>();
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libaudiousecasevalidation/include/media/UsecaseLookup.h b/media/libaudiousecasevalidation/include/media/UsecaseLookup.h
new file mode 100644
index 0000000..a35d88d
--- /dev/null
+++ b/media/libaudiousecasevalidation/include/media/UsecaseLookup.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASELOOKUP_H_
+#define MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASELOOKUP_H_
+
+#pragma once
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+
+namespace android {
+namespace media {
+
+typedef int STREAMID;
+typedef int PORTID;
+
+// List of streamId and outputFlag state.
+typedef std::map<STREAMID, bool> STREAMLIST;
+// List of portId and streamId.
+typedef std::map<PORTID, STREAMID> TRACKLIST;
+typedef std::lock_guard<std::mutex> mutex_lock;
+
+class UsecaseLookup {
+ public:
+    UsecaseLookup() { }
+    virtual ~UsecaseLookup() { }
+
+    // Required for testing.
+    void clear() {
+        m_streams.clear();
+        m_tracks.clear();
+    }
+
+    /**
+     * Add streamId and outputFlag to stream list.
+     */
+    void addStream(STREAMID streamId, bool outputFlagGame = false);
+
+    /**
+     * Remove streamId from stream list.
+     */
+    void removeStream(STREAMID streamId);
+
+    /**
+     * Add streamId and portId to track list.
+     */
+    void addTrack(STREAMID streamId, PORTID portId);
+
+    /**
+     * Remove streamId and portId from track list.
+     */
+    void removeTrack(STREAMID streamId, PORTID portId);
+
+    /**
+     * Check if stream list contains streamId with Game output flag.
+     */
+    bool isGameStream(STREAMID streamId);
+
+ protected:
+    STREAMLIST m_streams;
+    TRACKLIST m_tracks;
+    std::mutex m_mutex;
+};
+
+}  // namespace media
+}  // namespace android
+
+#endif  // MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASELOOKUP_H_
diff --git a/media/libaudiousecasevalidation/include/media/UsecaseValidator.h b/media/libaudiousecasevalidation/include/media/UsecaseValidator.h
new file mode 100644
index 0000000..2e1d7f4
--- /dev/null
+++ b/media/libaudiousecasevalidation/include/media/UsecaseValidator.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASEVALIDATOR_H_
+#define MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASEVALIDATOR_H_
+
+#pragma once
+
+#include <error/Result.h>
+#include <system/audio.h>
+#include <android/content/AttributionSourceState.h>
+
+#include <limits>
+#include <memory>
+
+namespace android {
+namespace media {
+
+/**
+ * Main entry-point for this library.
+ */
+class UsecaseValidator {
+ public:
+    virtual ~UsecaseValidator() = default;
+
+    /**
+     * A callback called by the module when the audio attributes for
+     * an active portId changes.
+     */
+    class AttributesChangedCallback {
+     public:
+        virtual ~AttributesChangedCallback() = default;
+        virtual void onAttributesChanged(audio_port_handle_t portId,
+                                         const audio_attributes_t& attributes) = 0;
+    };
+
+    /**
+     * Register a new mixer/stream.
+     * Called when the stream is opened at the HAL  and communicates
+     * immutable stream attributes like flags, sampling rate, format.
+     */
+    virtual status_t registerStream(audio_io_handle_t streamId,
+                                    const audio_config_base_t& audioConfig,
+                                    const audio_output_flags_t outputFlags) = 0;
+
+    /**
+     * Unregister a stream/mixer.
+     * Called when the stream is closed.
+     */
+    virtual status_t unregisterStream(audio_io_handle_t streamId) = 0;
+
+    /**
+     * Indicates that some playback activity started on the stream.
+     * Called each time an audio track starts or resumes.
+     */
+    virtual error::Result<audio_attributes_t> startClient(audio_io_handle_t streamId,
+            audio_port_handle_t portId,
+            const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes,
+            const AttributesChangedCallback *callback) = 0;
+
+    /**
+     * Indicates that some playback activity stopped on the stream.
+     * Called each time an audio track stops or pauses.
+     */
+    virtual status_t stopClient(audio_io_handle_t streamId, audio_port_handle_t portId) = 0;
+
+    /**
+     * Called to verify and update audio attributes for a track that is connected
+     * to the specified stream.
+     */
+    virtual error::Result<audio_attributes_t> verifyAudioAttributes(audio_io_handle_t streamId,
+            const content::AttributionSourceState& attributionSource,
+            const audio_attributes_t& attributes) = 0;
+};
+
+/**
+ * Creates an instance featuring a default implementation of the UsecaseValidator interface.
+ */
+std::unique_ptr<UsecaseValidator> createUsecaseValidator();
+
+}  // namespace media
+}  // namespace android
+
+#endif  // MEDIA_LIBAUDIOUSECASEVALIDATION_INCLUDE_MEDIA_USECASEVALIDATOR_H_
diff --git a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp
new file mode 100644
index 0000000..5768a9b
--- /dev/null
+++ b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.cpp
@@ -0,0 +1,299 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "tests/UsecaseValidator-test.h"
+
+#include <gtest/gtest.h>
+
+namespace android {
+namespace media {
+
+/**
+ * Helper test functions.
+ */
+
+/**
+ * Register a mock stream.
+ */
+audio_io_handle_t UsecaseValidatorTest::testRegisterStream(bool outputFlagGame) {
+    static int streamId = 0;
+    status_t result;
+    static audio_config_base_t audioConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    audio_output_flags_t outputFlags = outputFlagGame ? GAME_OUTPUT_FLAGS : MEDIA_OUTPUT_FLAGS;
+
+    result = m_validator->registerStream(++streamId, audioConfig, outputFlags);
+
+    return result == OK ? streamId : 0;
+}
+
+/**
+ * Create a mock portId.
+ */
+audio_port_handle_t UsecaseValidatorTest::testCreatePortId(audio_io_handle_t streamId) {
+    static int portId = 0;
+
+    return (streamId << 8) | (++portId);
+}
+
+/**
+ * Add a mock portId to a stream and verify.
+ */
+error::Result<audio_attributes_t> UsecaseValidatorTest::testStartClient(audio_io_handle_t streamId,
+        audio_port_handle_t portId,
+        audio_attributes_t attributes) {
+    content::AttributionSourceState attributionSource;
+
+    return m_validator->startClient(streamId, portId, attributionSource, attributes, NULL);
+}
+
+/**
+ * Verify a mock stream.
+ */
+error::Result<audio_attributes_t> UsecaseValidatorTest::testVerifyAudioAttributes(
+        audio_io_handle_t streamId,
+        audio_usage_t usage) {
+    content::AttributionSourceState attributionSource;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = usage;
+
+    return m_validator->verifyAudioAttributes(streamId, attributionSource, attributes);
+}
+
+/**
+ * Test functions.
+ */
+
+/**
+ * Test adding and removing streams.
+ */
+TEST_F(UsecaseLookupTest, testAddAndRemoveStream) {
+    addStream(1, false);
+    addStream(2, true);
+
+    EXPECT_NE(m_streams.find(1), m_streams.end());
+    EXPECT_NE(m_streams.find(2), m_streams.end());
+    EXPECT_EQ(m_streams.find(3), m_streams.end());
+
+    EXPECT_FALSE(isGameStream(1));
+    EXPECT_TRUE(isGameStream(2));
+    EXPECT_FALSE(isGameStream(3));
+
+    removeStream(2);
+
+    EXPECT_FALSE(isGameStream(2));
+}
+
+/**
+ * Verify attributes usage for stream.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsage) {
+    audio_io_handle_t gameStreamId, mediaStreamId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    mediaStreamId = testRegisterStream(false);
+    EXPECT_NE(gameStreamId, 0);
+    EXPECT_NE(mediaStreamId, 0);
+    EXPECT_NE(gameStreamId, mediaStreamId);
+
+    // Verify attributes on game stream.
+    auto attr = testVerifyAudioAttributes(gameStreamId, AUDIO_USAGE_GAME);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    // Verify attributes on media stream.
+    attr = testVerifyAudioAttributes(mediaStreamId, AUDIO_USAGE_MEDIA);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+    EXPECT_EQ(m_validator->unregisterStream(mediaStreamId), 0);
+}
+
+/**
+ * Test hanging client.
+ */
+TEST_F(UsecaseValidatorTest, testHangingClient) {
+    audio_io_handle_t gameStreamId, mediaStreamId;
+    audio_port_handle_t gamePortId, mediaPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+    mediaStreamId = testRegisterStream(false);
+    EXPECT_NE(mediaStreamId, 0);
+
+    // Assign portId.
+    gamePortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(gamePortId, 0);
+    mediaPortId = testCreatePortId(mediaStreamId);
+    EXPECT_NE(mediaPortId, 0);
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.usage = AUDIO_USAGE_GAME;
+    // Start client on game stream.
+    testStartClient(gameStreamId, gamePortId, attributes);
+
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    // Start client on media stream.
+    testStartClient(mediaStreamId, mediaPortId, attributes);
+
+    // Unregister media stream before stopClient.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+    EXPECT_EQ(m_validator->unregisterStream(mediaStreamId), 0);
+}
+
+/**
+ * Verify attributes usage does not change.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageUnchanged) {
+    audio_io_handle_t gameStreamId, mediaStreamId;
+    audio_port_handle_t gamePortId, mediaPortId, unknownPortId, voiceCommPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+    mediaStreamId = testRegisterStream(false);
+    EXPECT_NE(mediaStreamId, 0);
+
+    // Assign portId.
+    gamePortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(gamePortId, 0);
+    mediaPortId = testCreatePortId(mediaStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(mediaStreamId);
+    EXPECT_NE(unknownPortId, 0);
+    voiceCommPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(voiceCommPortId, 0);
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    // Verify attributes on game stream.
+    attributes.usage = AUDIO_USAGE_GAME;
+    auto attr = testStartClient(gameStreamId, gamePortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    attributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+    attr = testStartClient(gameStreamId, voiceCommPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_VOICE_COMMUNICATION);
+
+    // Verify attributes on media stream.
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attr = testStartClient(mediaStreamId, mediaPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    attributes.usage = AUDIO_USAGE_UNKNOWN;
+    attr = testStartClient(mediaStreamId, unknownPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_UNKNOWN);
+
+    // Stop client on game and media stream.
+    EXPECT_EQ(m_validator->stopClient(gameStreamId, gamePortId), 0);
+    EXPECT_EQ(m_validator->stopClient(mediaStreamId, mediaPortId), 0);
+
+    // Unregister game and media stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+    EXPECT_EQ(m_validator->unregisterStream(mediaStreamId), 0);
+}
+
+/**
+ * Verify attributes usage changes.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageChanged) {
+    audio_io_handle_t gameStreamId;
+    audio_port_handle_t mediaPortId, unknownPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+
+    // Assign portId.
+    mediaPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(unknownPortId, 0);
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    attributes.flags = AUDIO_FLAG_LOW_LATENCY;
+    // Verify attributes on game stream.
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    auto attr = testStartClient(gameStreamId, mediaPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    attributes.usage = AUDIO_USAGE_UNKNOWN;
+    attr = testStartClient(gameStreamId, unknownPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_GAME);
+
+    // Unregister game stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+}
+
+/**
+ * Verify attributes usage does not change for non low latency clients.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageUnChangedIfNotLowLatency) {
+    audio_io_handle_t gameStreamId;
+    audio_port_handle_t mediaPortId, unknownPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+
+    // Assign portId.
+    mediaPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(unknownPortId, 0);
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    // Verify attributes on game stream.
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    auto attr = testStartClient(gameStreamId, mediaPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    attributes.usage = AUDIO_USAGE_UNKNOWN;
+    attr = testStartClient(gameStreamId, unknownPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_UNKNOWN);
+
+    // Unregister game stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+}
+
+/**
+ * Verify attributes usage does not change for content type speech.
+ */
+TEST_F(UsecaseValidatorTest, testAttributesUsageUnChangedIfSpeech) {
+    audio_io_handle_t gameStreamId;
+    audio_port_handle_t mediaPortId, unknownPortId;
+
+    // Register game and media stream.
+    gameStreamId = testRegisterStream(true);
+    EXPECT_NE(gameStreamId, 0);
+
+    // Assign portId.
+    mediaPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(mediaPortId, 0);
+    unknownPortId = testCreatePortId(gameStreamId);
+    EXPECT_NE(unknownPortId, 0);
+
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    // Verify attributes on game stream.
+    attributes.usage = AUDIO_USAGE_MEDIA;
+    attributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+    auto attr = testStartClient(gameStreamId, mediaPortId, attributes);
+    EXPECT_EQ(attr.value().usage, AUDIO_USAGE_MEDIA);
+
+    // Unregister game stream.
+    EXPECT_EQ(m_validator->unregisterStream(gameStreamId), 0);
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h
new file mode 100644
index 0000000..8cbd0f0
--- /dev/null
+++ b/media/libaudiousecasevalidation/tests/UsecaseValidator-test.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef MEDIA_LIBAUDIOUSECASEVALIDATION_TESTS_USECASEVALIDATOR_TEST_H_
+#define MEDIA_LIBAUDIOUSECASEVALIDATION_TESTS_USECASEVALIDATOR_TEST_H_
+
+#include <gtest/gtest.h>
+
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+
+#include "media/UsecaseLookup.h"
+#include "media/UsecaseValidator.h"
+
+namespace android {
+namespace media {
+
+#define MEDIA_OUTPUT_FLAGS (audio_output_flags_t)(0xFFFFF &\
+                                ~(AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ))
+
+#define GAME_OUTPUT_FLAGS (audio_output_flags_t)\
+                                (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)
+
+class TestCallback : public UsecaseValidator::AttributesChangedCallback {
+ public:
+    TestCallback() {
+        m_iCallCnt = 0;
+    }
+    virtual ~TestCallback() { }
+    virtual void onAttributesChanged(audio_port_handle_t /*portId*/,
+                                     const audio_attributes_t& /*attributes*/) {
+        ++m_iCallCnt;
+    }
+
+ public:
+    int m_iCallCnt;
+};
+
+class UsecaseLookupTest : public UsecaseLookup, public ::testing::Test {
+ public:
+    UsecaseLookupTest() { }
+    virtual ~UsecaseLookupTest() = default;
+};
+
+class UsecaseValidatorTest : public ::testing::Test {
+ public:
+    UsecaseValidatorTest() {
+        m_validator = createUsecaseValidator();
+    }
+
+    virtual ~UsecaseValidatorTest() = default;
+
+ protected:
+    audio_io_handle_t testRegisterStream(bool outputFlagGame);
+    audio_port_handle_t testCreatePortId(audio_io_handle_t streamId);
+    error::Result<audio_attributes_t> testStartClient(audio_io_handle_t streamId,
+                                                      audio_port_handle_t portId,
+                                                      audio_attributes_t attributes);
+    error::Result<audio_attributes_t> testVerifyAudioAttributes(audio_io_handle_t streamId,
+                                                                audio_usage_t usage);
+
+    std::unique_ptr<UsecaseValidator> m_validator;
+};
+
+}  // namespace media
+}  // namespace android
+
+#endif  // MEDIA_LIBAUDIOUSECASEVALIDATION_TESTS_USECASEVALIDATOR_TEST_H_
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index b02dcb6..293a9c2 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -27,8 +27,21 @@
         "libcutils",
     ],
 
-    header_libs: ["libaudio_system_headers"],
-    export_header_lib_headers: ["libaudio_system_headers"],
+    header_libs: [
+        "libaudio_system_headers",
+        "liberror_headers",
+    ],
+
+    export_header_lib_headers: [
+        "libaudio_system_headers",
+        "liberror_headers",
+    ],
+
+    export_include_dirs: ["include"],
+}
+
+cc_library_headers {
+    name: "libeffectsconfig_headers",
 
     export_include_dirs: ["include"],
 }
diff --git a/media/libeffects/config/include/media/EffectsConfig.h b/media/libeffects/config/include/media/EffectsConfig.h
index 57d4dd7..09a060d 100644
--- a/media/libeffects/config/include/media/EffectsConfig.h
+++ b/media/libeffects/config/include/media/EffectsConfig.h
@@ -22,8 +22,10 @@
  * @see audio_effects_conf_V2_0.xsd for documentation on each structure
  */
 
+#include <error/Result.h>
 #include <system/audio_effect.h>
 
+#include <cstddef>
 #include <map>
 #include <memory>
 #include <string>
@@ -47,26 +49,27 @@
     std::string name;
     std::string path;
 };
-using Libraries = std::vector<Library>;
+using Libraries = std::vector<std::shared_ptr<const Library>>;
 
 struct EffectImpl {
-    Library* library; //< Only valid as long as the associated library vector is unmodified
+    //< Only valid as long as the associated library vector is unmodified
+    std::shared_ptr<const Library> library;
     effect_uuid_t uuid;
 };
 
 struct Effect : public EffectImpl {
     std::string name;
     bool isProxy;
-    EffectImpl libSw; //< Only valid if isProxy
-    EffectImpl libHw; //< Only valid if isProxy
+    std::shared_ptr<EffectImpl> libSw; //< Only valid if isProxy
+    std::shared_ptr<EffectImpl> libHw; //< Only valid if isProxy
 };
 
-using Effects = std::vector<Effect>;
+using Effects = std::vector<std::shared_ptr<const Effect>>;
 
 template <class Type>
 struct Stream {
     Type type;
-    std::vector<std::reference_wrapper<Effect>> effects;
+    Effects effects;
 };
 using OutputStream = Stream<audio_stream_type_t>;
 using InputStream = Stream<audio_source_t>;
@@ -75,6 +78,12 @@
     std::string address;
 };
 
+struct Processings {
+    std::vector<InputStream> preprocess;
+    std::vector<OutputStream> postprocess;
+    std::vector<DeviceEffects> deviceprocess;
+};
+
 /** Parsed configuration.
  * Intended to be a transient structure only used for deserialization.
  * Note: Everything is copied in the configuration from the xml dom.
@@ -82,19 +91,16 @@
  *       consider keeping a private handle on the xml dom and replace all strings by dom pointers.
  *       Or even better, use SAX parsing to avoid the allocations all together.
  */
-struct Config {
+struct Config : public Processings {
     float version;
     Libraries libraries;
     Effects effects;
-    std::vector<OutputStream> postprocess;
-    std::vector<InputStream> preprocess;
-    std::vector<DeviceEffects> deviceprocess;
 };
 
 /** Result of `parse(const char*)` */
 struct ParsingResult {
     /** Parsed config, nullptr if the xml lib could not load the file */
-    std::unique_ptr<Config> parsedConfig;
+    std::shared_ptr<const Config> parsedConfig;
     size_t nbSkippedElement; //< Number of skipped invalid library, effect or processing chain
     const std::string configPath; //< Path to the loaded configuration
 };
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 1696233..2ff057e 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -19,6 +19,7 @@
 #include <algorithm>
 #include <cstdint>
 #include <functional>
+#include <memory>
 #include <string>
 #include <unistd.h>
 
@@ -149,7 +150,10 @@
         ALOGE("library must have a name and a path: %s", dump(xmlLibrary));
         return false;
     }
-    libraries->push_back({name, path});
+
+    // need this temp variable because `struct Library` doesn't have a constructor
+    Library lib({.name = name, .path = path});
+    libraries->push_back(std::make_shared<const Library>(lib));
     return true;
 }
 
@@ -157,10 +161,10 @@
  * @return nullptr if not found, the element address if found.
  */
 template <class T>
-T* findByName(const char* name, std::vector<T>& collection) {
+T findByName(const char* name, std::vector<T>& collection) {
     auto it = find_if(begin(collection), end(collection),
-                         [name] (auto& item) { return item.name == name; });
-    return it != end(collection) ? &*it : nullptr;
+                      [name](auto& item) { return item && item->name == name; });
+    return it != end(collection) ? *it : nullptr;
 }
 
 /** Parse an effect from an xml element describing it.
@@ -187,7 +191,7 @@
         }
 
         // Convert library name to a pointer to the previously loaded library
-        auto* library = findByName(libraryName, libraries);
+        auto library = findByName(libraryName, libraries);
         if (library == nullptr) {
             ALOGE("Could not find library referenced in: %s", dump(xmlImpl));
             return false;
@@ -211,20 +215,25 @@
         effect.isProxy = true;
 
         // Function to parse libhw and libsw
-        auto parseProxy = [&xmlEffect, &parseImpl](const char* tag, EffectImpl& proxyLib) {
+        auto parseProxy = [&xmlEffect, &parseImpl](const char* tag,
+                                                   const std::shared_ptr<EffectImpl>& proxyLib) {
             auto* xmlProxyLib = xmlEffect.FirstChildElement(tag);
             if (xmlProxyLib == nullptr) {
                 ALOGE("effectProxy must contain a <%s>: %s", tag, dump(xmlEffect));
                 return false;
             }
-            return parseImpl(*xmlProxyLib, proxyLib);
+            return parseImpl(*xmlProxyLib, *proxyLib);
         };
+        effect.libSw = std::make_shared<EffectImpl>();
+        effect.libHw = std::make_shared<EffectImpl>();
         if (!parseProxy("libhw", effect.libHw) || !parseProxy("libsw", effect.libSw)) {
+            effect.libSw.reset();
+            effect.libHw.reset();
             return false;
         }
     }
 
-    effects->push_back(std::move(effect));
+    effects->push_back(std::make_shared<const Effect>(effect));
     return true;
 }
 
@@ -250,12 +259,12 @@
             ALOGE("<stream|device>/apply must have reference an effect: %s", dump(xmlApply));
             return false;
         }
-        auto* effect = findByName(effectName, effects);
+        auto effect = findByName(effectName, effects);
         if (effect == nullptr) {
             ALOGE("Could not find effect referenced in: %s", dump(xmlApply));
             return false;
         }
-        stream.effects.emplace_back(*effect);
+        stream.effects.emplace_back(effect);
     }
     streams->push_back(std::move(stream));
     return true;
@@ -286,7 +295,7 @@
         return {nullptr, 0, std::move(path)};
     }
 
-    auto config = std::make_unique<Config>();
+    auto config = std::make_shared<Config>();
     size_t nbSkippedElements = 0;
     auto registerFailure = [&nbSkippedElements](bool result) {
         nbSkippedElements += result ? 0 : 1;
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index abe622d..a5259aa 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -47,3 +47,30 @@
         "libhardware_headers",
     ],
 }
+
+cc_library_shared {
+    name: "libdownmixaidl",
+    srcs: [
+        "aidl/EffectDownmix.cpp",
+        "aidl/DownmixContext.cpp",
+        ":effectCommonFile",
+    ],
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers"
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libcutils",
+        "liblog",
+    ],
+    relative_install_path: "soundfx",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
+    ],
+}
diff --git a/media/libeffects/downmix/EffectDownmix.cpp b/media/libeffects/downmix/EffectDownmix.cpp
index d8f5787..b921537 100644
--- a/media/libeffects/downmix/EffectDownmix.cpp
+++ b/media/libeffects/downmix/EffectDownmix.cpp
@@ -40,7 +40,7 @@
     downmix_type_t type;
     bool apply_volume_correction;
     uint8_t input_channel_count;
-    android::audio_utils::channels::ChannelMix channelMix;
+    android::audio_utils::channels::ChannelMix<AUDIO_CHANNEL_OUT_STEREO> channelMix;
 };
 
 typedef struct downmix_module_s {
@@ -259,7 +259,7 @@
     ret = Downmix_Init(module);
     if (ret < 0) {
         ALOGW("DownmixLib_Create() init failed");
-        free(module);
+        delete module;
         return ret;
     }
 
@@ -582,7 +582,7 @@
     ALOGV("Downmix_Init module %p", pDwmModule);
     int ret = 0;
 
-    memset(&pDwmModule->context, 0, sizeof(downmix_object_t));
+    pDwmModule->context = downmix_object_t{};  // zero initialize (contains class with vtable).
 
     pDwmModule->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
     pDwmModule->config.inputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
new file mode 100644
index 0000000..ac893d8
--- /dev/null
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_DownmixContext"
+
+#include <android-base/logging.h>
+
+#include "DownmixContext.h"
+
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::media::audio::common::AudioChannelLayout;
+
+namespace aidl::android::hardware::audio::effect {
+
+DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
+    : EffectContext(statusDepth, common) {
+    LOG(DEBUG) << __func__;
+    mState = DOWNMIX_STATE_UNINITIALIZED;
+    init_params(common);
+}
+
+DownmixContext::~DownmixContext() {
+    LOG(DEBUG) << __func__;
+    mState = DOWNMIX_STATE_UNINITIALIZED;
+}
+
+RetCode DownmixContext::enable() {
+    LOG(DEBUG) << __func__;
+    if (mState != DOWNMIX_STATE_INITIALIZED) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = DOWNMIX_STATE_ACTIVE;
+    return RetCode::SUCCESS;
+}
+
+RetCode DownmixContext::disable() {
+    LOG(DEBUG) << __func__;
+    if (mState != DOWNMIX_STATE_ACTIVE) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = DOWNMIX_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+void DownmixContext::reset() {
+    LOG(DEBUG) << __func__;
+    disable();
+    resetBuffer();
+}
+
+IEffect::Status DownmixContext::lvmProcess(float* in, float* out, int samples) {
+    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
+    IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
+
+    if (in == nullptr || out == nullptr || getInputFrameSize() != getOutputFrameSize() ||
+        getInputFrameSize() == 0) {
+        return status;
+    }
+
+    status = {EX_ILLEGAL_STATE, 0, 0};
+    if (mState == DOWNMIX_STATE_UNINITIALIZED) {
+        LOG(ERROR) << __func__ << "Trying to use an uninitialized downmixer";
+        return status;
+    } else if (mState == DOWNMIX_STATE_INITIALIZED) {
+        LOG(ERROR) << __func__ << "Trying to use a non-configured downmixer";
+        return status;
+    }
+
+    LOG(DEBUG) << __func__ << " start processing";
+    bool accumulate = false;
+    int frames = samples * sizeof(float) / getInputFrameSize();
+    if (mType == Downmix::Type::STRIP) {
+        int inputChannelCount = getChannelCount(mChMask);
+        while (frames) {
+            if (accumulate) {
+                out[0] = std::clamp(out[0] + in[0], -1.f, 1.f);
+                out[1] = std::clamp(out[1] + in[1], -1.f, 1.f);
+            } else {
+                out[0] = in[0];
+                out[1] = in[1];
+            }
+            in += inputChannelCount;
+            out += 2;
+            frames--;
+        }
+    } else {
+        int chMask = mChMask.get<AudioChannelLayout::layoutMask>();
+        if (!mChannelMix.process(in, out, frames, accumulate, (audio_channel_mask_t)chMask)) {
+            LOG(ERROR) << "Multichannel configuration " << mChMask.toString()
+                       << " is not supported";
+            return status;
+        }
+    }
+    LOG(DEBUG) << __func__ << " done processing";
+    return {STATUS_OK, samples, samples};
+}
+
+void DownmixContext::init_params(const Parameter::Common& common) {
+    // when configuring the effect, do not allow a blank or unsupported channel mask
+    AudioChannelLayout channelMask = common.input.base.channelMask;
+    if (isChannelMaskValid(channelMask)) {
+        LOG(ERROR) << "Downmix_Configure error: input channel mask " << channelMask.toString()
+                   << " not supported";
+    } else {
+        mType = Downmix::Type::FOLD;
+        mChMask = channelMask;
+        mState = DOWNMIX_STATE_INITIALIZED;
+    }
+}
+
+bool DownmixContext::isChannelMaskValid(AudioChannelLayout channelMask) {
+    if (channelMask.getTag() == AudioChannelLayout::layoutMask) return false;
+    int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
+    // check against unsupported channels (up to FCC_26)
+    constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
+                                              AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
+    if (chMask & ~MAXIMUM_CHANNEL_MASK) {
+        LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
+        return false;
+    }
+    return true;
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
new file mode 100644
index 0000000..1571c38
--- /dev/null
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "effect-impl/EffectContext.h"
+
+#include <audio_utils/ChannelMix.h>
+
+namespace aidl::android::hardware::audio::effect {
+
+enum DownmixState {
+    DOWNMIX_STATE_UNINITIALIZED,
+    DOWNMIX_STATE_INITIALIZED,
+    DOWNMIX_STATE_ACTIVE,
+};
+
+class DownmixContext final : public EffectContext {
+  public:
+    DownmixContext(int statusDepth, const Parameter::Common& common);
+    ~DownmixContext();
+    RetCode enable();
+    RetCode disable();
+    void reset();
+
+    RetCode setDmType(Downmix::Type type) {
+        mType = type;
+        return RetCode::SUCCESS;
+    }
+    Downmix::Type getDmType() const { return mType; }
+
+    RetCode setOutputDevice(
+            const std::vector<::aidl::android::media::audio::common::AudioDeviceDescription>&
+                    device) override {
+        // FIXME change type if playing on headset vs speaker
+        mOutputDevice = device;
+        return RetCode::SUCCESS;
+    }
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+  private:
+    DownmixState mState;
+    Downmix::Type mType;
+    ::aidl::android::media::audio::common::AudioChannelLayout mChMask;
+    ::android::audio_utils::channels::ChannelMix<AUDIO_CHANNEL_OUT_STEREO> mChannelMix;
+
+    // Common Params
+    void init_params(const Parameter::Common& common);
+    bool isChannelMaskValid(::aidl::android::media::audio::common::AudioChannelLayout channelMask);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
new file mode 100644
index 0000000..7068c5c
--- /dev/null
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_DownmixImpl"
+
+#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "EffectDownmix.h"
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::DownmixImpl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidDownmix;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::media::audio::common::AudioUuid;
+
+extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmix()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<DownmixImpl>();
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmix()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    *_aidl_return = DownmixImpl::kDescriptor;
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+const std::string DownmixImpl::kEffectName = "Multichannel Downmix To Stereo";
+const Descriptor DownmixImpl::kDescriptor = {
+        .common = {.id = {.type = getEffectTypeUuidDownmix(),
+                          .uuid = getEffectImplUuidDownmix(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
+                   .name = DownmixImpl::kEffectName,
+                   .implementor = "The Android Open Source Project"}};
+
+ndk::ScopedAStatus DownmixImpl::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << __func__ << kDescriptor.toString();
+    *_aidl_return = kDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus DownmixImpl::setParameterCommon(const Parameter& param) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto tag = param.getTag();
+    switch (tag) {
+        case Parameter::common:
+            RETURN_IF(mContext->setCommon(param.get<Parameter::common>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setCommFailed");
+            break;
+        case Parameter::deviceDescription:
+            RETURN_IF(mContext->setOutputDevice(param.get<Parameter::deviceDescription>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
+            break;
+        case Parameter::mode:
+            RETURN_IF(mContext->setAudioMode(param.get<Parameter::mode>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setModeFailed");
+            break;
+        case Parameter::source:
+            RETURN_IF(mContext->setAudioSource(param.get<Parameter::source>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setSourceFailed");
+            break;
+        case Parameter::volumeStereo:
+            RETURN_IF(mContext->setVolumeStereo(param.get<Parameter::volumeStereo>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed");
+            break;
+        default: {
+            LOG(ERROR) << __func__ << " unsupportedParameterTag " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commonParamNotSupported");
+        }
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus DownmixImpl::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->reset();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus DownmixImpl::setParameterSpecific(const Parameter::Specific& specific) {
+    RETURN_IF(Parameter::Specific::downmix != specific.getTag(), EX_ILLEGAL_ARGUMENT,
+              "EffectNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto& dmParam = specific.get<Parameter::Specific::downmix>();
+    auto tag = dmParam.getTag();
+
+    switch (tag) {
+        case Downmix::type: {
+            RETURN_IF(mContext->setDmType(dmParam.get<Downmix::type>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setTypeFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "DownmixTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus DownmixImpl::getParameterSpecific(const Parameter::Id& id,
+                                                     Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+    RETURN_IF(Parameter::Id::downmixTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
+    auto dmId = id.get<Parameter::Id::downmixTag>();
+    auto dmIdTag = dmId.getTag();
+    switch (dmIdTag) {
+        case Downmix::Id::commonTag:
+            return getParameterDownmix(dmId.get<Downmix::Id::commonTag>(), specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(dmIdTag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "DownmixTagNotSupported");
+    }
+}
+
+ndk::ScopedAStatus DownmixImpl::getParameterDownmix(const Downmix::Tag& tag,
+                                                    Parameter::Specific* specific) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    Downmix dmParam;
+    switch (tag) {
+        case Downmix::type: {
+            dmParam.set<Downmix::type>(mContext->getDmType());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "DownmixTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::downmix>(dmParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> DownmixImpl::createContext(const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+        return mContext;
+    }
+
+    mContext = std::make_shared<DownmixContext>(1 /* statusFmqDepth */, common);
+    return mContext;
+}
+
+RetCode DownmixImpl::releaseContext() {
+    if (mContext) {
+        mContext.reset();
+    }
+    return RetCode::SUCCESS;
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status DownmixImpl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
+    if (!mContext) {
+        LOG(ERROR) << __func__ << " nullContext";
+        return {EX_NULL_POINTER, 0, 0};
+    }
+    return mContext->lvmProcess(in, out, sampleToProcess);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
new file mode 100644
index 0000000..812d26b
--- /dev/null
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <audio_effects/effect_downmix.h>
+
+#include "DownmixContext.h"
+#include "effect-impl/EffectImpl.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class DownmixImpl final : public EffectImpl {
+  public:
+    static const std::string kEffectName;
+    static const Descriptor kDescriptor;
+    DownmixImpl() { LOG(DEBUG) << __func__; }
+    ~DownmixImpl() {
+        cleanUp();
+        LOG(DEBUG) << __func__;
+    }
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+    ndk::ScopedAStatus setParameterCommon(const Parameter& param) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    RetCode releaseContext() override;
+
+    std::shared_ptr<EffectContext> getContext() override { return mContext; }
+    std::string getEffectName() override { return kEffectName; }
+
+  private:
+    std::shared_ptr<DownmixContext> mContext;
+    ndk::ScopedAStatus getParameterDownmix(const Downmix::Tag& tag, Parameter::Specific* specific);
+};
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/benchmark/downmix_benchmark.cpp b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
index d9d40ed..c4e0d65 100644
--- a/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
+++ b/media/libeffects/downmix/benchmark/downmix_benchmark.cpp
@@ -60,34 +60,35 @@
 static constexpr size_t kFrameCount = 1000;
 
 /*
-Pixel 4XL
-$ adb shell /data/benchmarktest/downmix_benchmark/vendor/downmix_benchmark
+Pixel 7
+$ atest downmix_benchmark
 
 --------------------------------------------------------
 Benchmark              Time             CPU   Iterations
 --------------------------------------------------------
-BM_Downmix/0        3638 ns         3624 ns       197517 AUDIO_CHANNEL_OUT_MONO
-BM_Downmix/1        4040 ns         4024 ns       178766
-BM_Downmix/2        4759 ns         4740 ns       134741 AUDIO_CHANNEL_OUT_STEREO
-BM_Downmix/3        6042 ns         6017 ns       129546 AUDIO_CHANNEL_OUT_2POINT1
-BM_Downmix/4        6897 ns         6868 ns        96316 AUDIO_CHANNEL_OUT_2POINT0POINT2
-BM_Downmix/5        2117 ns         2109 ns       331705 AUDIO_CHANNEL_OUT_QUAD
-BM_Downmix/6        2097 ns         2088 ns       335421 AUDIO_CHANNEL_OUT_QUAD_SIDE
-BM_Downmix/7        7291 ns         7263 ns        96256 AUDIO_CHANNEL_OUT_SURROUND
-BM_Downmix/8        8246 ns         8206 ns        84318 AUDIO_CHANNEL_OUT_2POINT1POINT2
-BM_Downmix/9        8341 ns         8303 ns        84298 AUDIO_CHANNEL_OUT_3POINT0POINT2
-BM_Downmix/10       7549 ns         7517 ns        84293 AUDIO_CHANNEL_OUT_PENTA
-BM_Downmix/11       9395 ns         9354 ns        75209 AUDIO_CHANNEL_OUT_3POINT1POINT2
-BM_Downmix/12       3267 ns         3253 ns       215596 AUDIO_CHANNEL_OUT_5POINT1
-BM_Downmix/13       3178 ns         3163 ns       220132 AUDIO_CHANNEL_OUT_5POINT1_SIDE
-BM_Downmix/14      10245 ns        10199 ns        67486 AUDIO_CHANNEL_OUT_6POINT1
-BM_Downmix/15      10975 ns        10929 ns        61359 AUDIO_CHANNEL_OUT_5POINT1POINT2
-BM_Downmix/16       3796 ns         3780 ns       184728 AUDIO_CHANNEL_OUT_7POINT1
-BM_Downmix/17      13562 ns        13503 ns        51823 AUDIO_CHANNEL_OUT_5POINT1POINT4
-BM_Downmix/18      13573 ns        13516 ns        51800 AUDIO_CHANNEL_OUT_7POINT1POINT2
-BM_Downmix/19      15502 ns        15435 ns        47147 AUDIO_CHANNEL_OUT_7POINT1POINT4
-BM_Downmix/20      16693 ns        16624 ns        42109 AUDIO_CHANNEL_OUT_13POINT_360RA
-BM_Downmix/21      28267 ns        28116 ns        24982 AUDIO_CHANNEL_OUT_22POINT2
+downmix_benchmark:
+  #BM_Downmix/0     2216 ns    2208 ns       308323
+  #BM_Downmix/1     2237 ns    2228 ns       314730
+  #BM_Downmix/2      270 ns     268 ns      2681469
+  #BM_Downmix/3     3016 ns    2999 ns       234146
+  #BM_Downmix/4     3331 ns    3313 ns       212026
+  #BM_Downmix/5      816 ns     809 ns       864395
+  #BM_Downmix/6      813 ns     809 ns       863876
+  #BM_Downmix/7     3336 ns    3319 ns       211938
+  #BM_Downmix/8     3786 ns    3762 ns       185047
+  #BM_Downmix/9     3810 ns    3797 ns       186840
+  #BM_Downmix/10    3767 ns    3746 ns       187015
+  #BM_Downmix/11    4212 ns    4191 ns       166119
+  #BM_Downmix/12    1245 ns    1231 ns       574388
+  #BM_Downmix/13    1234 ns    1228 ns       574743
+  #BM_Downmix/14    4795 ns    4771 ns       147157
+  #BM_Downmix/15    1334 ns    1327 ns       527728
+  #BM_Downmix/16    1346 ns    1332 ns       525444
+  #BM_Downmix/17    2144 ns    2121 ns       333343
+  #BM_Downmix/18    2133 ns    2118 ns       330391
+  #BM_Downmix/19    2527 ns    2513 ns       278553
+  #BM_Downmix/20    8148 ns    8113 ns        86136
+  #BM_Downmix/21    6332 ns    6301 ns       111134
 */
 
 static void BM_Downmix(benchmark::State& state) {
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index 84131a4..7838117 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -32,34 +32,71 @@
     ],
 }
 
+cc_defaults {
+    name : "dynamicsprocessingdefaults",
+    srcs: [
+        "dsp/DPBase.cpp",
+        "dsp/DPFrequency.cpp",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "liblog",
+        "libutils",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libeigen",
+    ],
+    cflags: [
+        "-Wthread-safety",
+        "-Wall",
+        "-Werror",
+    ],
+    relative_install_path: "soundfx",
+}
+
 cc_library_shared {
     name: "libdynproc",
 
     vendor: true,
 
+    defaults: [
+        "dynamicsprocessingdefaults",
+    ],
+
     srcs: [
         "EffectDynamicsProcessing.cpp",
-        "dsp/DPBase.cpp",
-        "dsp/DPFrequency.cpp",
     ],
 
     cflags: [
         "-O2",
         "-fvisibility=hidden",
+    ],
+}
 
-        "-Wall",
-        "-Werror",
+cc_library_shared {
+    name: "libdynamicsprocessingaidl",
+
+    srcs: [
+        "aidl/DynamicsProcessing.cpp",
+        "aidl/DynamicsProcessingContext.cpp",
+        ":effectCommonFile",
     ],
 
-    shared_libs: [
-        "libcutils",
-        "liblog",
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+        "dynamicsprocessingdefaults",
     ],
 
-    relative_install_path: "soundfx",
+    static_libs: [
+        "libaudioaidlranges",
+    ],
 
-    header_libs: [
-        "libaudioeffects",
-        "libeigen",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
     ],
 }
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
new file mode 100644
index 0000000..1fed9a5
--- /dev/null
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -0,0 +1,449 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_DynamicsProcessingLibEffects"
+
+#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "DynamicsProcessing.h"
+
+#include <dsp/DPBase.h>
+#include <dsp/DPFrequency.h>
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::DynamicsProcessingImpl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidDynamicsProcessing;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+using aidl::android::media::audio::common::PcmType;
+
+extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessing()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<DynamicsProcessingImpl>();
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessing()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    *_aidl_return = DynamicsProcessingImpl::kDescriptor;
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+const std::string DynamicsProcessingImpl::kEffectName = "DynamicsProcessing";
+
+static const Range::DynamicsProcessingRange kEngineConfigRange = {
+        .min = DynamicsProcessing::make<
+                DynamicsProcessing::engineArchitecture>(DynamicsProcessing::EngineArchitecture(
+                {.resolutionPreference =
+                         DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
+                 .preferredProcessingDurationMs = 1.0f,
+                 .preEqStage = {.inUse = false, .bandCount = 0},
+                 .postEqStage = {.inUse = false, .bandCount = 0},
+                 .mbcStage = {.inUse = false, .bandCount = 0},
+                 .limiterInUse = false})),
+        .max = DynamicsProcessing::make<
+                DynamicsProcessing::engineArchitecture>(DynamicsProcessing::EngineArchitecture(
+                {.resolutionPreference =
+                         DynamicsProcessing::ResolutionPreference::FAVOR_TIME_RESOLUTION,
+                 .preferredProcessingDurationMs = 1000.0f,
+                 .preEqStage = {.inUse = true, .bandCount = 128},
+                 .postEqStage = {.inUse = true, .bandCount = 128},
+                 .mbcStage = {.inUse = true, .bandCount = 128},
+                 .limiterInUse = true}))};
+
+static const DynamicsProcessing::ChannelConfig kChannelConfigMin =
+        DynamicsProcessing::ChannelConfig({.channel = 0, .enable = false});
+
+static const DynamicsProcessing::ChannelConfig kChannelConfigMax =
+        DynamicsProcessing::ChannelConfig(
+                {.channel = std::numeric_limits<int>::max(), .enable = true});
+
+static const Range::DynamicsProcessingRange kPreEqChannelConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::preEq>({kChannelConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::preEq>({kChannelConfigMax})};
+
+static const Range::DynamicsProcessingRange kPostEqChannelConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::postEq>({kChannelConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::postEq>({kChannelConfigMax})};
+
+static const Range::DynamicsProcessingRange kMbcChannelConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::mbc>({kChannelConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::mbc>({kChannelConfigMax})};
+
+static const DynamicsProcessing::EqBandConfig kEqBandConfigMin =
+        DynamicsProcessing::EqBandConfig({.channel = 0,
+                                          .band = 0,
+                                          .enable = false,
+                                          .cutoffFrequencyHz = 20,
+                                          .gainDb = -200});
+
+static const DynamicsProcessing::EqBandConfig kEqBandConfigMax =
+        DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits<int>::max(),
+                                          .band = std::numeric_limits<int>::max(),
+                                          .enable = true,
+                                          .cutoffFrequencyHz = 20000,
+                                          .gainDb = 200});
+
+static const Range::DynamicsProcessingRange kPreEqBandConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::preEqBand>({kEqBandConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::preEqBand>({kEqBandConfigMax})};
+
+static const Range::DynamicsProcessingRange kPostEqBandConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::postEqBand>({kEqBandConfigMin}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::postEqBand>({kEqBandConfigMax})};
+
+static const Range::DynamicsProcessingRange kMbcBandConfigRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::mbcBand>(
+                {DynamicsProcessing::MbcBandConfig(
+                        {.channel = 0,
+                         .band = 0,
+                         .enable = false,
+                         .cutoffFrequencyHz = 20,
+                         .attackTimeMs = 0,
+                         .releaseTimeMs = 0,
+                         .ratio = 1,
+                         .thresholdDb = -200,
+                         .kneeWidthDb = 0,
+                         .noiseGateThresholdDb = -200,
+                         .expanderRatio = 1,
+                         .preGainDb = -200,
+                         .postGainDb = -200})}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::mbcBand>(
+                {DynamicsProcessing::MbcBandConfig(
+                        {.channel = std::numeric_limits<int>::max(),
+                         .band = std::numeric_limits<int>::max(),
+                         .enable = true,
+                         .cutoffFrequencyHz = 20000,
+                         .attackTimeMs = 60000,
+                         .releaseTimeMs = 60000,
+                         .ratio = 50,
+                         .thresholdDb = 200,
+                         .kneeWidthDb = 100,
+                         .noiseGateThresholdDb = 200,
+                         .expanderRatio = 50,
+                         .preGainDb = 200,
+                         .postGainDb = 200})})};
+
+static const Range::DynamicsProcessingRange kInputGainRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::inputGain>(
+                {DynamicsProcessing::InputGain(
+                        {.channel = 0, .gainDb = -200.0f})}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::inputGain>(
+                {DynamicsProcessing::InputGain({.channel = std::numeric_limits<int>::max(),
+                                                .gainDb = 200.0f})})};
+
+static const Range::DynamicsProcessingRange kLimiterRange = {
+        .min = DynamicsProcessing::make<DynamicsProcessing::limiter>(
+                {DynamicsProcessing::LimiterConfig(
+                        {.channel = 0,
+                         .enable = false,
+                         .linkGroup = std::numeric_limits<int>::min(),
+                         .attackTimeMs = 0,
+                         .releaseTimeMs = 0,
+                         .ratio = 1,
+                         .thresholdDb = -200,
+                         .postGainDb = -200})}),
+        .max = DynamicsProcessing::make<DynamicsProcessing::limiter>(
+                {DynamicsProcessing::LimiterConfig(
+                        {.channel = std::numeric_limits<int>::max(),
+                         .enable = true,
+                         .linkGroup = std::numeric_limits<int>::max(),
+                         .attackTimeMs = 60000,
+                         .releaseTimeMs = 60000,
+                         .ratio = 50,
+                         .thresholdDb = 200,
+                         .postGainDb = 200})})};
+
+const std::vector<Range::DynamicsProcessingRange> kRanges = {
+        kEngineConfigRange,     kPreEqChannelConfigRange, kPostEqChannelConfigRange,
+        kMbcChannelConfigRange, kPreEqBandConfigRange,    kPostEqBandConfigRange,
+        kMbcBandConfigRange,    kInputGainRange,          kLimiterRange};
+
+const Capability DynamicsProcessingImpl::kCapability = {.range = kRanges};
+
+const Descriptor DynamicsProcessingImpl::kDescriptor = {
+        .common = {.id = {.type = getEffectTypeUuidDynamicsProcessing(),
+                          .uuid = getEffectImplUuidDynamicsProcessing(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::LAST,
+                             .volume = Flags::Volume::CTRL},
+                   .name = DynamicsProcessingImpl::kEffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = DynamicsProcessingImpl::kCapability};
+
+ndk::ScopedAStatus DynamicsProcessingImpl::open(const Parameter::Common& common,
+                                                const std::optional<Parameter::Specific>& specific,
+                                                OpenEffectReturn* ret) {
+    LOG(DEBUG) << __func__;
+    // effect only support 32bits float
+    RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
+                      common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
+              EX_ILLEGAL_ARGUMENT, "dataMustBe32BitsFloat");
+    RETURN_OK_IF(mState != State::INIT);
+    auto context = createContext(common);
+    RETURN_IF(!context, EX_NULL_POINTER, "createContextFailed");
+
+    RETURN_IF_ASTATUS_NOT_OK(setParameterCommon(common), "setCommParamErr");
+    if (specific.has_value()) {
+        RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
+    } else {
+        Parameter::Specific defaultSpecific =
+                Parameter::Specific::make<Parameter::Specific::dynamicsProcessing>(
+                        DynamicsProcessing::make<DynamicsProcessing::engineArchitecture>(
+                                mContext->getEngineArchitecture()));
+        RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(defaultSpecific), "setDefaultEngineErr");
+    }
+
+    mState = State::IDLE;
+    context->dupeFmq(ret);
+    RETURN_IF(createThread(context, getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
+              "FailedToCreateWorker");
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus DynamicsProcessingImpl::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << __func__ << kDescriptor.toString();
+    *_aidl_return = kDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus DynamicsProcessingImpl::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            return ndk::ScopedAStatus::ok();
+        case CommandId::STOP:
+            mContext->disable();
+            return ndk::ScopedAStatus::ok();
+        case CommandId::RESET:
+            mContext->disable();
+            mContext->resetBuffer();
+            return ndk::ScopedAStatus::ok();
+        default:
+            // Need this default handling for vendor extendable CommandId::VENDOR_COMMAND_*
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+}
+
+bool DynamicsProcessingImpl::isParamInRange(const Parameter::Specific& specific) {
+    auto& dp = specific.get<Parameter::Specific::dynamicsProcessing>();
+    return DynamicsProcessingRanges::isParamInRange(dp, kRanges);
+}
+
+ndk::ScopedAStatus DynamicsProcessingImpl::setParameterSpecific(
+        const Parameter::Specific& specific) {
+    RETURN_IF(Parameter::Specific::dynamicsProcessing != specific.getTag(), EX_ILLEGAL_ARGUMENT,
+              "EffectNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    RETURN_IF(!isParamInRange(specific), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto& param = specific.get<Parameter::Specific::dynamicsProcessing>();
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case DynamicsProcessing::engineArchitecture: {
+            RETURN_IF(mContext->setEngineArchitecture(
+                              param.get<DynamicsProcessing::engineArchitecture>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setEngineArchitectureFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::preEq: {
+            RETURN_IF(
+                    mContext->setPreEq(param.get<DynamicsProcessing::preEq>()) != RetCode::SUCCESS,
+                    EX_ILLEGAL_ARGUMENT, "setPreEqFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::postEq: {
+            RETURN_IF(mContext->setPostEq(param.get<DynamicsProcessing::postEq>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setPostEqFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::preEqBand: {
+            RETURN_IF(mContext->setPreEqBand(param.get<DynamicsProcessing::preEqBand>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setPreEqBandFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::postEqBand: {
+            RETURN_IF(mContext->setPostEqBand(param.get<DynamicsProcessing::postEqBand>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setPostEqBandFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::mbc: {
+            RETURN_IF(mContext->setMbc(param.get<DynamicsProcessing::mbc>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setMbcFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::mbcBand: {
+            RETURN_IF(mContext->setMbcBand(param.get<DynamicsProcessing::mbcBand>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setMbcBandFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::limiter: {
+            RETURN_IF(mContext->setLimiter(param.get<DynamicsProcessing::limiter>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setLimiterFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::inputGain: {
+            RETURN_IF(mContext->setInputGain(param.get<DynamicsProcessing::inputGain>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setInputGainFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::vendor: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "DPVendorExtensionTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus DynamicsProcessingImpl::getParameterSpecific(const Parameter::Id& id,
+                                                                Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+    RETURN_IF(Parameter::Id::dynamicsProcessingTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
+    auto dpId = id.get<Parameter::Id::dynamicsProcessingTag>();
+    auto dpIdTag = dpId.getTag();
+    switch (dpIdTag) {
+        case DynamicsProcessing::Id::commonTag:
+            return getParameterDynamicsProcessing(dpId.get<DynamicsProcessing::Id::commonTag>(),
+                                                  specific);
+        case DynamicsProcessing::Id::vendorExtensionTag:
+            LOG(ERROR) << __func__ << " unsupported ID: " << toString(dpIdTag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "DPVendorExtensionIdNotSupported");
+    }
+}
+
+ndk::ScopedAStatus DynamicsProcessingImpl::getParameterDynamicsProcessing(
+        const DynamicsProcessing::Tag& tag, Parameter::Specific* specific) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    switch (tag) {
+        case DynamicsProcessing::engineArchitecture: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::engineArchitecture>(
+                            mContext->getEngineArchitecture()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::preEq: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::preEq>(mContext->getPreEq()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::postEq: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::postEq>(mContext->getPostEq()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::preEqBand: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::preEqBand>(
+                            mContext->getPreEqBand()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::postEqBand: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::postEqBand>(
+                            mContext->getPostEqBand()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::mbc: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::mbc>(mContext->getMbc()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::mbcBand: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::mbcBand>(mContext->getMbcBand()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::limiter: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::limiter>(mContext->getLimiter()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::inputGain: {
+            specific->set<Parameter::Specific::dynamicsProcessing>(
+                    DynamicsProcessing::make<DynamicsProcessing::inputGain>(
+                            mContext->getInputGain()));
+            return ndk::ScopedAStatus::ok();
+        }
+        case DynamicsProcessing::vendor: {
+            LOG(ERROR) << __func__ << " wrong vendor tag in CommonTag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "DPVendorExtensionTagInWrongId");
+        }
+    }
+}
+
+std::shared_ptr<EffectContext> DynamicsProcessingImpl::createContext(
+        const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+        return mContext;
+    }
+
+    mContext = std::make_shared<DynamicsProcessingContext>(1 /* statusFmqDepth */, common);
+    return mContext;
+}
+
+RetCode DynamicsProcessingImpl::releaseContext() {
+    if (mContext) {
+        mContext->disable();
+        mContext->resetBuffer();
+        mContext.reset();
+    }
+    return RetCode::SUCCESS;
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status DynamicsProcessingImpl::effectProcessImpl(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->lvmProcess(in, out, samples);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
new file mode 100644
index 0000000..1e1e72e
--- /dev/null
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "DynamicsProcessingContext.h"
+#include "EffectRangeSpecific.h"
+#include "effect-impl/EffectImpl.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class DynamicsProcessingImpl final : public EffectImpl {
+  public:
+    static const std::string kEffectName;
+    static const Descriptor kDescriptor;
+    static const Capability kCapability;
+
+    DynamicsProcessingImpl() { LOG(DEBUG) << __func__; }
+    ~DynamicsProcessingImpl() {
+        cleanUp();
+        LOG(DEBUG) << __func__;
+    }
+
+    ndk::ScopedAStatus open(const Parameter::Common& common,
+                            const std::optional<Parameter::Specific>& specific,
+                            OpenEffectReturn* ret) override;
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    RetCode releaseContext() override;
+
+    std::shared_ptr<EffectContext> getContext() override { return mContext; }
+    std::string getEffectName() override { return kEffectName; }
+
+  private:
+    std::shared_ptr<DynamicsProcessingContext> mContext;
+    ndk::ScopedAStatus getParameterDynamicsProcessing(const DynamicsProcessing::Tag& tag,
+                                                      Parameter::Specific* specific);
+    bool isParamInRange(const Parameter::Specific& specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
new file mode 100644
index 0000000..9d77135
--- /dev/null
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -0,0 +1,559 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_DPLibEffectsContext"
+
+#include "DynamicsProcessingContext.h"
+#include "DynamicsProcessing.h"
+
+#include <sys/param.h>
+#include <functional>
+#include <unordered_set>
+
+namespace aidl::android::hardware::audio::effect {
+
+DynamicsProcessingContext::DynamicsProcessingContext(int statusDepth,
+                                                     const Parameter::Common& common)
+    : EffectContext(statusDepth, common) {
+    LOG(DEBUG) << __func__;
+    init();
+}
+
+DynamicsProcessingContext::~DynamicsProcessingContext() {
+    LOG(DEBUG) << __func__;
+}
+
+RetCode DynamicsProcessingContext::enable() {
+    std::lock_guard lg(mMutex);
+    if (mState != DYNAMICS_PROCESSING_STATE_INITIALIZED) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = DYNAMICS_PROCESSING_STATE_ACTIVE;
+    return RetCode::SUCCESS;
+}
+
+RetCode DynamicsProcessingContext::disable() {
+    std::lock_guard lg(mMutex);
+    if (mState != DYNAMICS_PROCESSING_STATE_ACTIVE) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+void DynamicsProcessingContext::reset() {
+    std::lock_guard lg(mMutex);
+    if (mDpFreq != nullptr) {
+        mDpFreq.reset();
+    }
+}
+
+RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
+    mCommon = common;
+    init();
+    LOG(INFO) << __func__ << common.toString();
+    return RetCode::SUCCESS;
+}
+
+void DynamicsProcessingContext::dpSetFreqDomainVariant_l(
+        const DynamicsProcessing::EngineArchitecture& engine) {
+    mDpFreq.reset(new dp_fx::DPFrequency());
+    mDpFreq->init(mChannelCount, engine.preEqStage.inUse, engine.preEqStage.bandCount,
+                  engine.mbcStage.inUse, engine.mbcStage.bandCount, engine.postEqStage.inUse,
+                  engine.postEqStage.bandCount, engine.limiterInUse);
+
+    int32_t sampleRate = mCommon.input.base.sampleRate;
+    int32_t minBlockSize = (int32_t)dp_fx::DPFrequency::getMinBockSize();
+    int32_t block = engine.preferredProcessingDurationMs * sampleRate / 1000.0f;
+    LOG(INFO) << __func__ << " sampleRate " << sampleRate << " block length "
+              << engine.preferredProcessingDurationMs << " ms (" << block << "samples)";
+    if (block < minBlockSize) {
+        block = minBlockSize;
+    } else if (!powerof2(block)) {
+        // find next highest power of 2.
+        block = 1 << (32 - __builtin_clz(block));
+    }
+    mDpFreq->configure(block, block >> 1, sampleRate);
+}
+
+RetCode DynamicsProcessingContext::setEngineArchitecture(
+        const DynamicsProcessing::EngineArchitecture& engineArchitecture) {
+    std::lock_guard lg(mMutex);
+    if (!mEngineInited || mEngineArchitecture != engineArchitecture) {
+        if (engineArchitecture.resolutionPreference ==
+            DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION) {
+            dpSetFreqDomainVariant_l(engineArchitecture);
+        } else {
+            LOG(WARNING) << __func__ << toString(engineArchitecture.resolutionPreference)
+                         << " not available now";
+        }
+        mEngineInited = true;
+        mEngineArchitecture = engineArchitecture;
+    }
+    LOG(INFO) << __func__ << engineArchitecture.toString();
+    return RetCode::SUCCESS;
+}
+
+RetCode DynamicsProcessingContext::setPreEq(
+        const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
+    std::lock_guard lg(mMutex);
+    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.preEqStage.inUse,
+                                        StageType::PREEQ);
+}
+
+RetCode DynamicsProcessingContext::setPostEq(
+        const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
+    std::lock_guard lg(mMutex);
+    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.postEqStage.inUse,
+                                        StageType::POSTEQ);
+}
+
+RetCode DynamicsProcessingContext::setMbc(
+        const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
+    std::lock_guard lg(mMutex);
+    return setDpChannels_l<dp_fx::DPMbc>(channels, mEngineArchitecture.mbcStage.inUse,
+                                         StageType::MBC);
+}
+
+RetCode DynamicsProcessingContext::setPreEqBand(
+        const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
+    std::lock_guard lg(mMutex);
+    RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
+                    "preEqNotInUse");
+    RETURN_VALUE_IF(
+            !validateBandConfig(bands, mChannelCount, mEngineArchitecture.preEqStage.bandCount),
+            RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+    return setBands_l<DynamicsProcessing::EqBandConfig>(bands, StageType::PREEQ);
+}
+
+RetCode DynamicsProcessingContext::setPostEqBand(
+        const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
+    std::lock_guard lg(mMutex);
+    RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
+                    "postEqNotInUse");
+    RETURN_VALUE_IF(
+            !validateBandConfig(bands, mChannelCount, mEngineArchitecture.postEqStage.bandCount),
+            RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+    return setBands_l<DynamicsProcessing::EqBandConfig>(bands, StageType::POSTEQ);
+}
+
+RetCode DynamicsProcessingContext::setMbcBand(
+        const std::vector<DynamicsProcessing::MbcBandConfig>& bands) {
+    std::lock_guard lg(mMutex);
+    RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
+                    "mbcNotInUse");
+    RETURN_VALUE_IF(
+            !validateBandConfig(bands, mChannelCount, mEngineArchitecture.mbcStage.bandCount),
+            RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+    return setBands_l<DynamicsProcessing::MbcBandConfig>(bands, StageType::MBC);
+}
+
+RetCode DynamicsProcessingContext::setLimiter(
+        const std::vector<DynamicsProcessing::LimiterConfig>& limiters) {
+    std::lock_guard lg(mMutex);
+    RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
+                    "limiterNotInUse");
+    RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
+                    RetCode::ERROR_ILLEGAL_PARAMETER, "limiterConfigNotValid");
+    return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, StageType::LIMITER);
+}
+
+RetCode DynamicsProcessingContext::setInputGain(
+        const std::vector<DynamicsProcessing::InputGain>& inputGains) {
+    std::lock_guard lg(mMutex);
+    RETURN_VALUE_IF(!validateInputGainConfig(inputGains, mChannelCount),
+                    RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
+    return setBands_l<DynamicsProcessing::InputGain>(inputGains, StageType::INPUTGAIN);
+}
+
+DynamicsProcessing::EngineArchitecture DynamicsProcessingContext::getEngineArchitecture() {
+    std::lock_guard lg(mMutex);
+    LOG(INFO) << __func__ << mEngineArchitecture.toString();
+    return mEngineArchitecture;
+}
+
+std::vector<DynamicsProcessing::ChannelConfig> DynamicsProcessingContext::getPreEq() {
+    return getChannelConfig(StageType::PREEQ);
+}
+
+std::vector<DynamicsProcessing::ChannelConfig> DynamicsProcessingContext::getPostEq() {
+    return getChannelConfig(StageType::POSTEQ);
+}
+
+std::vector<DynamicsProcessing::EqBandConfig> DynamicsProcessingContext::getPreEqBand() {
+    return getEqBandConfigs(StageType::PREEQ);
+}
+
+std::vector<DynamicsProcessing::EqBandConfig> DynamicsProcessingContext::getPostEqBand() {
+    return getEqBandConfigs(StageType::POSTEQ);
+}
+
+std::vector<DynamicsProcessing::ChannelConfig> DynamicsProcessingContext::getMbc() {
+    return getChannelConfig(StageType::MBC);
+}
+
+std::vector<DynamicsProcessing::MbcBandConfig> DynamicsProcessingContext::getMbcBand() {
+    std::vector<DynamicsProcessing::MbcBandConfig> bands;
+
+    std::lock_guard lg(mMutex);
+    auto maxBand = mEngineArchitecture.mbcStage.bandCount;
+    for (int32_t ch = 0; ch < mChannelCount; ch++) {
+        auto mbc = getMbc_l(ch);
+        if (!mbc) {
+            continue;
+        }
+        for (int32_t bandId = 0; bandId < maxBand; bandId++) {
+            auto band = mbc->getBand(bandId);
+            if (!band) {
+                continue;
+            }
+            bands.push_back({.channel = ch,
+                             .band = bandId,
+                             .enable = band->isEnabled(),
+                             .cutoffFrequencyHz = band->getCutoffFrequency(),
+                             .attackTimeMs = band->getAttackTime(),
+                             .releaseTimeMs = band->getReleaseTime(),
+                             .ratio = band->getRatio(),
+                             .thresholdDb = band->getThreshold(),
+                             .kneeWidthDb = band->getKneeWidth(),
+                             .noiseGateThresholdDb = band->getNoiseGateThreshold(),
+                             .expanderRatio = band->getExpanderRatio(),
+                             .preGainDb = band->getPreGain(),
+                             .postGainDb = band->getPostGain()});
+        }
+    }
+    return bands;
+}
+
+std::vector<DynamicsProcessing::LimiterConfig> DynamicsProcessingContext::getLimiter() {
+    std::vector<DynamicsProcessing::LimiterConfig> ret;
+
+    std::lock_guard lg(mMutex);
+    for (int32_t ch = 0; ch < mChannelCount; ch++) {
+        auto limiter = getLimiter_l(ch);
+        if (!limiter) {
+            continue;
+        }
+        ret.push_back({.channel = ch,
+                       .enable = limiter->isEnabled(),
+                       .linkGroup = static_cast<int32_t>(limiter->getLinkGroup()),
+                       .attackTimeMs = limiter->getAttackTime(),
+                       .releaseTimeMs = limiter->getReleaseTime(),
+                       .ratio = limiter->getRatio(),
+                       .thresholdDb = limiter->getThreshold(),
+                       .postGainDb = limiter->getPostGain()});
+    }
+    return ret;
+}
+
+std::vector<DynamicsProcessing::InputGain> DynamicsProcessingContext::getInputGain() {
+    std::vector<DynamicsProcessing::InputGain> ret;
+
+    std::lock_guard lg(mMutex);
+    for (int32_t ch = 0; ch < mChannelCount; ch++) {
+        auto channel = getChannel_l(ch);
+        if (!channel) {
+            continue;
+        }
+        ret.push_back({.channel = ch, .gainDb = channel->getInputGain()});
+    }
+    return ret;
+}
+
+IEffect::Status DynamicsProcessingContext::lvmProcess(float* in, float* out, int samples) {
+    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
+
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!in, status, "nullInput");
+    RETURN_VALUE_IF(!out, status, "nullOutput");
+    status = {EX_ILLEGAL_STATE, 0, 0};
+
+    LOG(DEBUG) << __func__ << " start processing";
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(mState != DynamicsProcessingState::DYNAMICS_PROCESSING_STATE_ACTIVE, status,
+                        "notInActiveState");
+        RETURN_VALUE_IF(!mDpFreq, status, "engineNotInited");
+        mDpFreq->processSamples(in, out, samples);
+    }
+    return {STATUS_OK, samples, samples};
+}
+
+void DynamicsProcessingContext::init() {
+    std::lock_guard lg(mMutex);
+    mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
+    mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
+}
+
+dp_fx::DPChannel* DynamicsProcessingContext::getChannel_l(int channel) {
+    RETURN_VALUE_IF(mDpFreq == nullptr, nullptr, "DPFreqNotInited");
+
+    return mDpFreq->getChannel(channel);
+}
+
+dp_fx::DPEq* DynamicsProcessingContext::getPreEq_l(int ch) {
+    auto channel = getChannel_l(ch);
+    RETURN_VALUE_IF(channel == nullptr, nullptr, "ChannelNotExist");
+
+    return channel->getPreEq();
+}
+
+dp_fx::DPEq* DynamicsProcessingContext::getPostEq_l(int ch) {
+    auto channel = getChannel_l(ch);
+    RETURN_VALUE_IF(channel == nullptr, nullptr, "ChannelNotExist");
+
+    return channel->getPostEq();
+}
+
+dp_fx::DPMbc* DynamicsProcessingContext::getMbc_l(int ch) {
+    auto channel = getChannel_l(ch);
+    RETURN_VALUE_IF(channel == nullptr, nullptr, "ChannelNotExist");
+
+    return channel->getMbc();
+}
+
+dp_fx::DPLimiter* DynamicsProcessingContext::getLimiter_l(int ch) {
+    auto channel = getChannel_l(ch);
+    RETURN_VALUE_IF(channel == nullptr, nullptr, "ChannelNotExist");
+
+    return channel->getLimiter();
+}
+
+dp_fx::DPBandStage* DynamicsProcessingContext::getStageWithType_l(
+        DynamicsProcessingContext::StageType type, int ch) {
+    switch (type) {
+        case StageType::PREEQ: {
+            return getEqWithType_l(type, ch);
+        }
+        case StageType::POSTEQ: {
+            return getEqWithType_l(type, ch);
+        }
+        case StageType::MBC: {
+            return getMbc_l(ch);
+        }
+        case StageType::LIMITER:
+            FALLTHROUGH_INTENDED;
+        case StageType::INPUTGAIN: {
+            return nullptr;
+        }
+    }
+}
+
+dp_fx::DPEq* DynamicsProcessingContext::getEqWithType_l(DynamicsProcessingContext::StageType type,
+                                                        int ch) {
+    switch (type) {
+        case StageType::PREEQ: {
+            return getPreEq_l(ch);
+        }
+        case StageType::POSTEQ: {
+            return getPostEq_l(ch);
+        }
+        case StageType::MBC:
+            FALLTHROUGH_INTENDED;
+        case StageType::LIMITER:
+            FALLTHROUGH_INTENDED;
+        case StageType::INPUTGAIN: {
+            return nullptr;
+        }
+    }
+}
+
+std::vector<DynamicsProcessing::ChannelConfig> DynamicsProcessingContext::getChannelConfig(
+        StageType type) {
+    std::vector<DynamicsProcessing::ChannelConfig> ret;
+
+    std::lock_guard lg(mMutex);
+    for (int32_t ch = 0; ch < mChannelCount; ch++) {
+        auto stage = getStageWithType_l(type, ch);
+        if (!stage) {
+            continue;
+        }
+        ret.push_back({.channel = ch, .enable = stage->isEnabled()});
+    }
+    return ret;
+}
+
+std::vector<DynamicsProcessing::EqBandConfig> DynamicsProcessingContext::getEqBandConfigs(
+        StageType type) {
+    std::vector<DynamicsProcessing::EqBandConfig> eqBands;
+
+    std::lock_guard lg(mMutex);
+    auto maxBand = mEngineArchitecture.preEqStage.bandCount;
+    for (int32_t ch = 0; ch < mChannelCount; ch++) {
+        auto eq = getEqWithType_l(type, ch);
+        if (!eq) {
+            continue;
+        }
+        for (int32_t bandId = 0; bandId < maxBand; bandId++) {
+            auto band = eq->getBand(bandId);
+            if (!band) {
+                continue;
+            }
+            eqBands.push_back({.channel = ch,
+                               .band = bandId,
+                               .enable = band->isEnabled(),
+                               .cutoffFrequencyHz = band->getCutoffFrequency(),
+                               .gainDb = band->getGain()});
+        }
+    }
+    return eqBands;
+}
+
+template <typename T>
+bool DynamicsProcessingContext::validateBandConfig(const std::vector<T>& bands, int maxChannel,
+                                                   int maxBand) {
+    std::vector<float> freqs(bands.size(), -1);
+    for (auto band : bands) {
+        if (!validateChannel(band.channel, maxChannel)) return false;
+        if (!validateBand(band.band, maxBand)) return false;
+        freqs[band.band] = band.cutoffFrequencyHz;
+    }
+    if (std::count(freqs.begin(), freqs.end(), -1)) return false;
+    return std::is_sorted(freqs.begin(), freqs.end());
+}
+
+bool DynamicsProcessingContext::validateLimiterConfig(
+        const std::vector<DynamicsProcessing::LimiterConfig>& cfgs, int maxChannel) {
+    for (auto cfg : cfgs) {
+        if (!validateChannel(cfg.channel, maxChannel)) return false;
+    }
+    return true;
+}
+
+bool DynamicsProcessingContext::validateInputGainConfig(
+        const std::vector<DynamicsProcessing::InputGain>& cfgs, int maxChannel) {
+    for (auto cfg : cfgs) {
+        if (!validateChannel(cfg.channel, maxChannel)) return false;
+    }
+    return true;
+}
+
+template <typename D>
+RetCode DynamicsProcessingContext::setDpChannels_l(
+        const std::vector<DynamicsProcessing::ChannelConfig>& channels, bool stageInUse,
+        StageType type) {
+    RetCode ret = RetCode::SUCCESS;
+    std::unordered_set<int> channelSet;
+
+    RETURN_VALUE_IF(!stageInUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
+    for (auto& it : channels) {
+        if (0 != channelSet.count(it.channel)) {
+            LOG(WARNING) << __func__ << " duplicated channel " << it.channel;
+            ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+        } else {
+            channelSet.insert(it.channel);
+        }
+        if (it.channel < 0 || it.channel >= mChannelCount) {
+            LOG(WARNING) << __func__ << " skip illegal ChannelConfig " << it.toString() << " max "
+                         << mChannelCount;
+            ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+            continue;
+        }
+        auto dp = getStageWithType_l(type, it.channel);
+        if (!dp) {
+            LOG(WARNING) << __func__ << " channel " << it.channel << " not exist";
+            ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+            continue;
+        }
+        if (dp->isEnabled() != it.enable) {
+            LOG(INFO) << __func__ << it.toString();
+            dp->setEnabled(it.enable);
+        }
+    }
+    return ret;
+}
+
+RetCode DynamicsProcessingContext::setDpChannelBand_l(const std::any& anyConfig, StageType type,
+                                                      std::set<std::pair<int, int>>& chBandSet) {
+    RETURN_VALUE_IF(!anyConfig.has_value(), RetCode::ERROR_ILLEGAL_PARAMETER, "bandInvalid");
+    RetCode ret = RetCode::SUCCESS;
+    std::pair<int, int> chBandKey;
+    switch (type) {
+        case StageType::PREEQ:
+            FALLTHROUGH_INTENDED;
+        case StageType::POSTEQ: {
+            dp_fx::DPEq* dp;
+            const auto& config = std::any_cast<DynamicsProcessing::EqBandConfig>(anyConfig);
+            RETURN_VALUE_IF(
+                    nullptr == (dp = getEqWithType_l(type, config.channel)) || !dp->isEnabled(),
+                    RetCode::ERROR_ILLEGAL_PARAMETER, "dpEqNotExist");
+            dp_fx::DPEqBand band;
+            band.init(config.enable, config.cutoffFrequencyHz, config.gainDb);
+            dp->setBand(config.band, band);
+            chBandKey = {config.channel, config.band};
+            break;
+        }
+        case StageType::MBC: {
+            dp_fx::DPMbc* dp;
+            const auto& config = std::any_cast<DynamicsProcessing::MbcBandConfig>(anyConfig);
+            RETURN_VALUE_IF(nullptr == (dp = getMbc_l(config.channel)) || !dp->isEnabled(),
+                            RetCode::ERROR_ILLEGAL_PARAMETER, "dpMbcNotExist");
+            dp_fx::DPMbcBand band;
+            band.init(config.enable, config.cutoffFrequencyHz, config.attackTimeMs,
+                      config.releaseTimeMs, config.ratio, config.thresholdDb, config.kneeWidthDb,
+                      config.noiseGateThresholdDb, config.expanderRatio, config.preGainDb,
+                      config.postGainDb);
+            dp->setBand(config.band, band);
+            chBandKey = {config.channel, config.band};
+            break;
+        }
+        case StageType::LIMITER: {
+            dp_fx::DPChannel* dp;
+            const auto& config = std::any_cast<DynamicsProcessing::LimiterConfig>(anyConfig);
+            RETURN_VALUE_IF(nullptr == (dp = getChannel_l(config.channel)),
+                            RetCode::ERROR_ILLEGAL_PARAMETER, "dpChNotExist");
+            dp_fx::DPLimiter limiter;
+            limiter.init(mEngineArchitecture.limiterInUse, config.enable, config.linkGroup,
+                         config.attackTimeMs, config.releaseTimeMs, config.ratio,
+                         config.thresholdDb, config.postGainDb);
+            dp->setLimiter(limiter);
+            chBandKey = {config.channel, 0};
+            break;
+        }
+        case StageType::INPUTGAIN: {
+            dp_fx::DPChannel* dp;
+            const auto& config = std::any_cast<DynamicsProcessing::InputGain>(anyConfig);
+            RETURN_VALUE_IF(nullptr == (dp = getChannel_l(config.channel)),
+                            RetCode::ERROR_ILLEGAL_PARAMETER, "dpChNotExist");
+            dp->setInputGain(config.gainDb);
+            chBandKey = {config.channel, 0};
+            break;
+        }
+    }
+    RETURN_VALUE_IF(0 != chBandSet.count(chBandKey), RetCode::ERROR_ILLEGAL_PARAMETER,
+                    "duplicatedBand");
+    chBandSet.insert(chBandKey);
+    return ret;
+}
+
+template <typename T /* BandConfig */>
+RetCode DynamicsProcessingContext::setBands_l(const std::vector<T>& bands, StageType type) {
+    RetCode ret = RetCode::SUCCESS;
+    std::set<std::pair<int /* channel */, int /* band */>> bandSet;
+
+    for (const auto& it : bands) {
+        if (RetCode::SUCCESS != setDpChannelBand_l(std::make_any<T>(it), type, bandSet)) {
+            LOG(WARNING) << __func__ << " skipping band " << it.toString();
+            ret = RetCode::ERROR_ILLEGAL_PARAMETER;
+            continue;
+        }
+        LOG(INFO) << __func__ << it.toString();
+    }
+    return ret;
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
new file mode 100644
index 0000000..b8539f6
--- /dev/null
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <audio_effects/effect_dynamicsprocessing.h>
+
+#include "effect-impl/EffectContext.h"
+
+#include <any>
+#include <cstddef>
+#include <dsp/DPBase.h>
+#include <dsp/DPFrequency.h>
+
+namespace aidl::android::hardware::audio::effect {
+
+enum DynamicsProcessingState {
+    DYNAMICS_PROCESSING_STATE_UNINITIALIZED,
+    DYNAMICS_PROCESSING_STATE_INITIALIZED,
+    DYNAMICS_PROCESSING_STATE_ACTIVE,
+};
+
+class DynamicsProcessingContext final : public EffectContext {
+  public:
+    DynamicsProcessingContext(int statusDepth, const Parameter::Common& common);
+    ~DynamicsProcessingContext();
+
+    RetCode enable();
+    RetCode disable();
+    void reset();
+
+    // override EffectContext::setCommon to update mChannelCount
+    RetCode setCommon(const Parameter::Common& common) override;
+
+    RetCode setEngineArchitecture(const DynamicsProcessing::EngineArchitecture& engineArchitecture);
+    RetCode setPreEq(const std::vector<DynamicsProcessing::ChannelConfig>& eqChannels);
+    RetCode setPostEq(const std::vector<DynamicsProcessing::ChannelConfig>& eqChannels);
+    RetCode setPreEqBand(const std::vector<DynamicsProcessing::EqBandConfig>& eqBands);
+    RetCode setPostEqBand(const std::vector<DynamicsProcessing::EqBandConfig>& eqBands);
+    RetCode setMbc(const std::vector<DynamicsProcessing::ChannelConfig>& mbcChannels);
+    RetCode setMbcBand(const std::vector<DynamicsProcessing::MbcBandConfig>& eqBands);
+    RetCode setLimiter(const std::vector<DynamicsProcessing::LimiterConfig>& limiters);
+    RetCode setInputGain(const std::vector<DynamicsProcessing::InputGain>& gain);
+
+    DynamicsProcessing::EngineArchitecture getEngineArchitecture();
+    std::vector<DynamicsProcessing::ChannelConfig> getPreEq();
+    std::vector<DynamicsProcessing::ChannelConfig> getPostEq();
+    std::vector<DynamicsProcessing::EqBandConfig> getPreEqBand();
+    std::vector<DynamicsProcessing::EqBandConfig> getPostEqBand();
+    std::vector<DynamicsProcessing::ChannelConfig> getMbc();
+    std::vector<DynamicsProcessing::MbcBandConfig> getMbcBand();
+    std::vector<DynamicsProcessing::LimiterConfig> getLimiter();
+    std::vector<DynamicsProcessing::InputGain> getInputGain();
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+  private:
+    static constexpr float kPreferredProcessingDurationMs = 10.0f;
+    static constexpr int kBandCount = 5;
+    std::mutex mMutex;
+    size_t mChannelCount GUARDED_BY(mMutex) = 0;
+    DynamicsProcessingState mState GUARDED_BY(mMutex) = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
+    std::unique_ptr<dp_fx::DPFrequency> mDpFreq GUARDED_BY(mMutex) = nullptr;
+    bool mEngineInited GUARDED_BY(mMutex) = false;
+    DynamicsProcessing::EngineArchitecture mEngineArchitecture GUARDED_BY(mMutex) = {
+            .resolutionPreference =
+                    DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
+            .preferredProcessingDurationMs = kPreferredProcessingDurationMs,
+            .preEqStage = {.inUse = true, .bandCount = kBandCount},
+            .postEqStage = {.inUse = true, .bandCount = kBandCount},
+            .mbcStage = {.inUse = true, .bandCount = kBandCount},
+            .limiterInUse = true,
+    };
+
+    enum class StageType { PREEQ, POSTEQ, MBC, LIMITER, INPUTGAIN };
+
+    void init();
+
+    void dpSetFreqDomainVariant_l(const DynamicsProcessing::EngineArchitecture& engine)
+            REQUIRES(mMutex);
+    dp_fx::DPChannel* getChannel_l(int ch) REQUIRES(mMutex);
+    dp_fx::DPEq* getPreEq_l(int ch) REQUIRES(mMutex);
+    dp_fx::DPEq* getPostEq_l(int ch) REQUIRES(mMutex);
+    dp_fx::DPMbc* getMbc_l(int ch) REQUIRES(mMutex);
+    dp_fx::DPLimiter* getLimiter_l(int ch) REQUIRES(mMutex);
+    dp_fx::DPBandStage* getStageWithType_l(StageType type, int ch) REQUIRES(mMutex);
+    dp_fx::DPEq* getEqWithType_l(StageType type, int ch) REQUIRES(mMutex);
+    template <typename D>
+    RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
+                            bool stageInUse, StageType type) REQUIRES(mMutex);
+    template <typename T /* BandConfig */>
+    RetCode setBands_l(const std::vector<T>& bands, StageType type) REQUIRES(mMutex);
+    RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
+                               std::set<std::pair<int, int>>& chBandSet) REQUIRES(mMutex);
+
+    std::vector<DynamicsProcessing::EqBandConfig> getEqBandConfigs(StageType type);
+    std::vector<DynamicsProcessing::ChannelConfig> getChannelConfig(StageType type);
+
+    template <typename T /* BandConfig */>
+    bool validateBandConfig(const std::vector<T>& bands, int maxChannel, int maxBand);
+    bool validateLimiterConfig(const std::vector<DynamicsProcessing::LimiterConfig>& cfgs,
+                               int maxChannel);
+    bool validateInputGainConfig(const std::vector<DynamicsProcessing::InputGain>& cfgs,
+                                 int maxChannel);
+
+    inline bool validateChannel(int ch, int maxCh) { return ch >= 0 && ch < maxCh; }
+    inline bool validateBand(int band, int maxBand) { return band >= 0 && band < maxBand; }
+};
+
+}  // namespace aidl::android::hardware::audio::effect
\ No newline at end of file
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index 22838a3..d94093e 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -39,6 +39,7 @@
     header_libs: [
         "libaudioeffects",
         "libeffects_headers",
+        "liberror_headers",
     ],
     export_header_lib_headers: ["libeffects_headers"],
 }
@@ -56,7 +57,6 @@
         "-Werror",
     ],
 
-
     shared_libs: [
         "libeffectsconfig",
         "libeffects",
diff --git a/media/libeffects/factory/EffectsXmlConfigLoader.cpp b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
index 30a9007..9bff136 100644
--- a/media/libeffects/factory/EffectsXmlConfigLoader.cpp
+++ b/media/libeffects/factory/EffectsXmlConfigLoader.cpp
@@ -64,7 +64,7 @@
 
     std::string absolutePath;
     if (!resolveLibrary(relativePath, &absolutePath)) {
-        ALOGE("Could not find library in effect directories: %s", relativePath);
+        ALOGE("%s Could not find library in effect directories: %s", __func__, relativePath);
         libEntry->path = strdup(relativePath);
         return false;
     }
@@ -75,20 +75,20 @@
     std::unique_ptr<void, decltype(dlclose)*> libHandle(dlopen(path, RTLD_NOW),
                                                        dlclose);
     if (libHandle == nullptr) {
-        ALOGE("Could not dlopen library %s: %s", path, dlerror());
+        ALOGE("%s Could not dlopen library %s: %s", __func__, path, dlerror());
         return false;
     }
 
     auto* description = static_cast<audio_effect_library_t*>(
           dlsym(libHandle.get(), AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR));
     if (description == nullptr) {
-        ALOGE("Invalid effect library, failed not find symbol '%s' in %s: %s",
+        ALOGE("%s Invalid effect library, failed not find symbol '%s' in %s: %s", __func__,
               AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR, path, dlerror());
         return false;
     }
 
     if (description->tag != AUDIO_EFFECT_LIBRARY_TAG) {
-        ALOGE("Bad tag %#08x in description structure, expected %#08x for library %s",
+        ALOGE("%s Bad tag %#08x in description structure, expected %#08x for library %s", __func__,
               description->tag, AUDIO_EFFECT_LIBRARY_TAG, path);
         return false;
     }
@@ -96,8 +96,8 @@
     uint32_t majorVersion = EFFECT_API_VERSION_MAJOR(description->version);
     uint32_t expectedMajorVersion = EFFECT_API_VERSION_MAJOR(EFFECT_LIBRARY_API_VERSION_CURRENT);
     if (majorVersion != expectedMajorVersion) {
-        ALOGE("Unsupported major version %#08x, expected %#08x for library %s",
-              majorVersion, expectedMajorVersion, path);
+        ALOGE("%s Unsupported major version %#08x, expected %#08x for library %s",
+              __func__, majorVersion, expectedMajorVersion, path);
         return false;
     }
 
@@ -155,14 +155,13 @@
 {
     size_t nbSkippedElement = 0;
     for (auto& library : libs) {
-
         // Construct a lib entry
         auto libEntry = makeUniqueC<lib_entry_t>();
-        libEntry->name = strdup(library.name.c_str());
+        libEntry->name = strdup(library->name.c_str());
         libEntry->effects = nullptr;
         pthread_mutex_init(&libEntry->lock, nullptr);
 
-        if (!loadLibrary(library.path.c_str(), libEntry.get())) {
+        if (!loadLibrary(library->path.c_str(), libEntry.get())) {
             // Register library load failure
             listPush(std::move(libEntry), libFailedList);
             ++nbSkippedElement;
@@ -209,24 +208,24 @@
     UniqueCPtr<effect_descriptor_t> effectDesc;
 };
 
-LoadEffectResult loadEffect(const EffectImpl& effect, const std::string& name,
-                            list_elem_t* libList) {
+LoadEffectResult loadEffect(const std::shared_ptr<const EffectImpl>& effect,
+                            const std::string& name, list_elem_t* libList) {
     LoadEffectResult result;
 
     // Find the effect library
-    result.lib = findLibrary(effect.library->name.c_str(), libList);
+    result.lib = findLibrary(effect->library->name.c_str(), libList);
     if (result.lib == nullptr) {
-        ALOGE("Could not find library %s to load effect %s",
-              effect.library->name.c_str(), name.c_str());
+        ALOGE("%s Could not find library %s to load effect %s",
+              __func__, effect->library->name.c_str(), name.c_str());
         return result;
     }
 
     result.effectDesc = makeUniqueC<effect_descriptor_t>();
 
     // Get the effect descriptor
-    if (result.lib->desc->get_descriptor(&effect.uuid, result.effectDesc.get()) != 0) {
+    if (result.lib->desc->get_descriptor(&effect->uuid, result.effectDesc.get()) != 0) {
         ALOGE("Error querying effect %s on lib %s",
-              uuidToString(effect.uuid), result.lib->name);
+              uuidToString(effect->uuid), result.lib->name);
         result.effectDesc.reset();
         return result;
     }
@@ -241,14 +240,15 @@
     // Check effect is supported
     uint32_t expectedMajorVersion = EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION);
     if (EFFECT_API_VERSION_MAJOR(result.effectDesc->apiVersion) != expectedMajorVersion) {
-        ALOGE("Bad API version %#08x for effect %s in lib %s, expected major %#08x",
+        ALOGE("%s Bad API version %#08x for effect %s in lib %s, expected major %#08x", __func__,
               result.effectDesc->apiVersion, name.c_str(), result.lib->name, expectedMajorVersion);
         return result;
     }
 
     lib_entry_t *_;
-    if (findEffect(nullptr, &effect.uuid, &_, nullptr) == 0) {
-        ALOGE("Effect %s uuid %s already exist", uuidToString(effect.uuid), name.c_str());
+    if (findEffect(nullptr, &effect->uuid, &_, nullptr) == 0) {
+        ALOGE("%s Effect %s uuid %s already exist", __func__, uuidToString(effect->uuid),
+              name.c_str());
         return result;
     }
 
@@ -261,8 +261,11 @@
     size_t nbSkippedElement = 0;
 
     for (auto& effect : effects) {
+        if (!effect) {
+            continue;
+        }
 
-        auto effectLoadResult = loadEffect(effect, effect.name, libList);
+        auto effectLoadResult = loadEffect(effect, effect->name, libList);
         if (!effectLoadResult.success) {
             if (effectLoadResult.effectDesc != nullptr) {
                 listPush(std::move(effectLoadResult.effectDesc), skippedEffects);
@@ -271,9 +274,9 @@
             continue;
         }
 
-        if (effect.isProxy) {
-            auto swEffectLoadResult = loadEffect(effect.libSw, effect.name + " libsw", libList);
-            auto hwEffectLoadResult = loadEffect(effect.libHw, effect.name + " libhw", libList);
+        if (effect->isProxy) {
+            auto swEffectLoadResult = loadEffect(effect->libSw, effect->name + " libsw", libList);
+            auto hwEffectLoadResult = loadEffect(effect->libHw, effect->name + " libhw", libList);
             if (!swEffectLoadResult.success || !hwEffectLoadResult.success) {
                 // Push the main effect in the skipped list even if only a subeffect is invalid
                 // as the main effect is not usable without its subeffects.
@@ -287,7 +290,7 @@
             // get_descriptor call, we replace it with the corresponding
             // sw effect descriptor, but keep the Proxy UUID
             *effectLoadResult.effectDesc = *swEffectLoadResult.effectDesc;
-            effectLoadResult.effectDesc->uuid = effect.uuid;
+            effectLoadResult.effectDesc->uuid = effect->uuid;
 
             effectLoadResult.effectDesc->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
 
@@ -326,8 +329,8 @@
                                loadEffects(result.parsedConfig->effects, gLibraryList,
                                            &gSkippedEffects, &gSubEffectList);
 
-    ALOGE_IF(result.nbSkippedElement != 0, "%zu errors during loading of configuration: %s",
-             result.nbSkippedElement,
+    ALOGE_IF(result.nbSkippedElement != 0, "%s %zu errors during loading of configuration: %s",
+             __func__, result.nbSkippedElement,
              result.configPath.empty() ? "No config file found" : result.configPath.c_str());
 
     return result.nbSkippedElement;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 03ce329..fc80211 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -22,6 +22,24 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_defaults {
+    name : "hapticgeneratordefaults",
+    srcs: [
+        "Processors.cpp",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "liblog",
+        "libutils",
+        "libvibratorutils",
+    ],
+    header_libs: [
+        "libaudioeffects",
+    ],
+    relative_install_path: "soundfx",
+}
+
 cc_library_shared {
     name: "libhapticgenerator",
 
@@ -29,7 +47,10 @@
 
     srcs: [
         "EffectHapticGenerator.cpp",
-        "Processors.cpp",
+    ],
+
+    defaults: [
+        "hapticgeneratordefaults",
     ],
 
     cflags: [
@@ -42,19 +63,29 @@
                        // with/without `-ffast-math` for more context.
         "-fvisibility=hidden",
     ],
+}
 
-    shared_libs: [
-        "libaudioutils",
-        "libbase",
-        "libbinder",
-        "liblog",
-        "libutils",
-        "libvibrator",
+cc_library_shared {
+    name: "libhapticgeneratoraidl",
+
+    srcs: [
+        "aidl/EffectHapticGenerator.cpp",
+        "aidl/HapticGeneratorContext.cpp",
+        ":effectCommonFile",
     ],
 
-    relative_install_path: "soundfx",
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+        "hapticgeneratordefaults",
+    ],
 
-    header_libs: [
-        "libaudioeffects",
+    cflags: [
+        "-Wthread-safety",
+    ],
+
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
     ],
 }
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index 3137e13..d8cf20e 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -22,6 +22,7 @@
 
 #include <algorithm>
 #include <memory>
+#include <sstream>
 #include <string>
 #include <utility>
 
@@ -94,6 +95,33 @@
     return defaultValue;
 }
 
+std::string hapticParamToString(const struct HapticGeneratorParam& param) {
+    std::stringstream ss;
+    ss << "\t\tHapticGenerator Parameters:\n";
+    ss << "\t\t- resonant frequency: " << param.resonantFrequency << '\n';
+    ss << "\t\t- bpf Q: " << param.bpfQ << '\n';
+    ss << "\t\t- slow env normalization power: " << param.slowEnvNormalizationPower << '\n';
+    ss << "\t\t- bsf zero Q: " << param.bsfZeroQ << '\n';
+    ss << "\t\t- bsf pole Q: " << param.bsfPoleQ << '\n';
+    ss << "\t\t- distortion corner frequency: " << param.distortionCornerFrequency << '\n';
+    ss << "\t\t- distortion input gain: " << param.distortionInputGain << '\n';
+    ss << "\t\t- distortion cube threshold: " << param.distortionCubeThreshold << '\n';
+    ss << "\t\t- distortion output gain: " << param.distortionOutputGain << '\n';
+    return ss.str();
+}
+
+std::string hapticSettingToString(const struct HapticGeneratorParam& param) {
+    std::stringstream ss;
+    ss << "\t\tHaptic setting:\n";
+    ss << "\t\t- tracks intensity map:\n";
+    for (const auto&[id, intensity] : param.id2Intensity) {
+        ss << "\t\t\t- id=" << id << ", intensity=" << (int) intensity;
+    }
+    ss << "\t\t- max intensity: " << (int) param.maxHapticIntensity << '\n';
+    ss << "\t\t- max haptic amplitude: " << param.maxHapticAmplitude << '\n';
+    return ss.str();
+}
+
 int HapticGenerator_Init(struct HapticGeneratorContext *context) {
     context->itfe = &gHapticGeneratorInterface;
 
@@ -129,7 +157,7 @@
     context->param.distortionCubeThreshold = 0.1f;
     context->param.distortionOutputGain = getFloatProperty(
             "vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
-    ALOGD("Using distortion output gain as %f", context->param.distortionOutputGain);
+    ALOGD("%s\n%s", __func__, hapticParamToString(context->param).c_str());
 
     context->state = HAPTICGENERATOR_STATE_INITIALIZED;
     return 0;
@@ -289,6 +317,7 @@
         }
         int id = *(int *) value;
         os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
+        ALOGD("Setting haptic intensity as %d", hapticIntensity);
         if (hapticIntensity == os::HapticScale::MUTE) {
             context->param.id2Intensity.erase(id);
         } else {
@@ -313,6 +342,10 @@
         context->param.bsfZeroQ = isnan(qFactor) ? DEFAULT_BSF_POLE_Q : qFactor;
         context->param.bsfPoleQ = context->param.bsfZeroQ / 2.0f;
         context->param.maxHapticAmplitude = maxAmplitude;
+        ALOGD("Updating vibrator info, resonantFrequency=%f, bsfZeroQ=%f, bsfPoleQ=%f, "
+              "maxHapticAmplitude=%f",
+              context->param.resonantFrequency, context->param.bsfZeroQ, context->param.bsfPoleQ,
+              context->param.maxHapticAmplitude);
 
         if (context->processorsRecord.bpf != nullptr) {
             context->processorsRecord.bpf->setCoefficients(
@@ -358,6 +391,11 @@
     return in;
 }
 
+void HapticGenerator_Dump(int32_t fd, const struct HapticGeneratorParam& param) {
+    dprintf(fd, "%s", hapticParamToString(param).c_str());
+    dprintf(fd, "%s", hapticSettingToString(param).c_str());
+}
+
 } // namespace (anonymous)
 
 //-----------------------------------------------------------------------------
@@ -562,6 +600,10 @@
         case EFFECT_CMD_SET_AUDIO_MODE:
             break;
 
+        case EFFECT_CMD_DUMP:
+            HapticGenerator_Dump(*(reinterpret_cast<int32_t*>(cmdData)), context->param);
+            break;
+
         default:
             ALOGW("HapticGenerator_Command invalid command %u", cmdCode);
             return -EINVAL;
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
new file mode 100644
index 0000000..031477f
--- /dev/null
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_HapticGeneratorImpl"
+
+#include <android-base/logging.h>
+#include <audio_effects/effect_hapticgenerator.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "EffectHapticGenerator.h"
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidHapticGenerator;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator;
+using aidl::android::hardware::audio::effect::HapticGeneratorImpl;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::media::audio::common::AudioUuid;
+
+extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGenerator()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<HapticGeneratorImpl>();
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGenerator()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    *_aidl_return = HapticGeneratorImpl::kDescriptor;
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+const std::string HapticGeneratorImpl::kEffectName = "Haptic Generator";
+const Descriptor HapticGeneratorImpl::kDescriptor = {
+        .common = {.id = {.type = getEffectTypeUuidHapticGenerator(),
+                          .uuid = getEffectImplUuidHapticGenerator(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
+                   .name = HapticGeneratorImpl::kEffectName,
+                   .implementor = "The Android Open Source Project"}};
+
+ndk::ScopedAStatus HapticGeneratorImpl::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << __func__ << kDescriptor.toString();
+    *_aidl_return = kDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus HapticGeneratorImpl::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->reset();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus HapticGeneratorImpl::setParameterSpecific(const Parameter::Specific& specific) {
+    RETURN_IF(Parameter::Specific::hapticGenerator != specific.getTag(), EX_ILLEGAL_ARGUMENT,
+              "EffectNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto& hgParam = specific.get<Parameter::Specific::hapticGenerator>();
+    auto tag = hgParam.getTag();
+
+    switch (tag) {
+        case HapticGenerator::hapticScales: {
+            RETURN_IF(mContext->setHgHapticScales(hgParam.get<HapticGenerator::hapticScales>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setHapticScaleFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case HapticGenerator::vibratorInfo: {
+            RETURN_IF(mContext->setHgVibratorInformation(
+                              hgParam.get<HapticGenerator::vibratorInfo>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setVibratorInfoFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus HapticGeneratorImpl::getParameterSpecific(const Parameter::Id& id,
+                                                             Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+    RETURN_IF(Parameter::Id::hapticGeneratorTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
+    auto hgId = id.get<Parameter::Id::hapticGeneratorTag>();
+    auto hgIdTag = hgId.getTag();
+    switch (hgIdTag) {
+        case HapticGenerator::Id::commonTag:
+            return getParameterHapticGenerator(hgId.get<HapticGenerator::Id::commonTag>(),
+                                               specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(hgIdTag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported");
+    }
+}
+
+ndk::ScopedAStatus HapticGeneratorImpl::getParameterHapticGenerator(const HapticGenerator::Tag& tag,
+                                                                    Parameter::Specific* specific) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    HapticGenerator hgParam;
+    switch (tag) {
+        case HapticGenerator::hapticScales: {
+            hgParam.set<HapticGenerator::hapticScales>(mContext->getHgHapticScales());
+            break;
+        }
+        case HapticGenerator::vibratorInfo: {
+            hgParam.set<HapticGenerator::vibratorInfo>(mContext->getHgVibratorInformation());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::hapticGenerator>(hgParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> HapticGeneratorImpl::createContext(const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+        return mContext;
+    }
+
+    mContext = std::make_shared<HapticGeneratorContext>(1 /* statusFmqDepth */, common);
+    return mContext;
+}
+
+RetCode HapticGeneratorImpl::releaseContext() {
+    if (mContext) {
+        mContext->reset();
+    }
+    return RetCode::SUCCESS;
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status HapticGeneratorImpl::effectProcessImpl(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->lvmProcess(in, out, samples);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
new file mode 100644
index 0000000..fe9616a
--- /dev/null
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "HapticGeneratorContext.h"
+#include "effect-impl/EffectImpl.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class HapticGeneratorImpl final : public EffectImpl {
+  public:
+    static const std::string kEffectName;
+    static const Descriptor kDescriptor;
+    HapticGeneratorImpl() { LOG(DEBUG) << __func__; }
+    ~HapticGeneratorImpl() {
+        cleanUp();
+        LOG(DEBUG) << __func__;
+    }
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    RetCode releaseContext() override;
+
+    std::shared_ptr<EffectContext> getContext() override { return mContext; }
+    std::string getEffectName() override { return kEffectName; }
+
+  private:
+    std::shared_ptr<HapticGeneratorContext> mContext;
+    ndk::ScopedAStatus getParameterHapticGenerator(const HapticGenerator::Tag& tag,
+                                                   Parameter::Specific* specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
new file mode 100644
index 0000000..de44e05
--- /dev/null
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -0,0 +1,347 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_HapticGeneratorContext"
+
+#include <Utils.h>
+#include <android-base/logging.h>
+#include <android-base/parsedouble.h>
+#include <android-base/properties.h>
+
+#include "HapticGeneratorContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+HapticGeneratorContext::HapticGeneratorContext(int statusDepth, const Parameter::Common& common)
+    : EffectContext(statusDepth, common) {
+    LOG(DEBUG) << __func__;
+    mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
+    mSampleRate = common.input.base.sampleRate;
+    mFrameCount = common.input.frameCount;
+    init_params(common.input.base.channelMask, common.output.base.channelMask);
+}
+
+HapticGeneratorContext::~HapticGeneratorContext() {
+    LOG(DEBUG) << __func__;
+    mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
+}
+
+RetCode HapticGeneratorContext::enable() {
+    if (mState != HAPTIC_GENERATOR_STATE_INITIALIZED) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = HAPTIC_GENERATOR_STATE_ACTIVE;
+    return RetCode::SUCCESS;
+}
+
+RetCode HapticGeneratorContext::disable() {
+    if (mState != HAPTIC_GENERATOR_STATE_ACTIVE) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+void HapticGeneratorContext::reset() {
+    for (auto& filter : mProcessorsRecord.filters) {
+        filter->clear();
+    }
+    for (auto& slowEnv : mProcessorsRecord.slowEnvs) {
+        slowEnv->clear();
+    }
+    for (auto& distortion : mProcessorsRecord.distortions) {
+        distortion->clear();
+    }
+}
+
+RetCode HapticGeneratorContext::setHgHapticScales(
+        const std::vector<HapticGenerator::HapticScale>& hapticScales) {
+    std::lock_guard lg(mMutex);
+    for (auto hapticScale : hapticScales) {
+        mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale.scale);
+    }
+    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
+    for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
+        mParams.mMaxVibratorScale = std::max(mParams.mMaxVibratorScale, vibratorScale);
+    }
+    return RetCode::SUCCESS;
+}
+
+HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() {
+    std::lock_guard lg(mMutex);
+    return mParams.mVibratorInfo;
+}
+
+std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() {
+    std::vector<HapticGenerator::HapticScale> result;
+    std::lock_guard lg(mMutex);
+    for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
+        result.push_back({id, vibratorScale});
+    }
+    return result;
+}
+
+RetCode HapticGeneratorContext::setHgVibratorInformation(
+        const HapticGenerator::VibratorInformation& vibratorInfo) {
+    {
+        std::lock_guard lg(mMutex);
+        mParams.mVibratorInfo = vibratorInfo;
+
+        if (mProcessorsRecord.bpf != nullptr) {
+            mProcessorsRecord.bpf->setCoefficients(
+                    ::android::audio_effect::haptic_generator::bpfCoefs(
+                            mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate));
+        }
+        if (mProcessorsRecord.bsf != nullptr) {
+            mProcessorsRecord.bsf->setCoefficients(
+                    ::android::audio_effect::haptic_generator::bsfCoefs(
+                            mParams.mVibratorInfo.resonantFrequencyHz,
+                            mParams.mVibratorInfo.qFactor, mParams.mVibratorInfo.qFactor / 2.0f,
+                            mSampleRate));
+        }
+    }
+    configure();
+    return RetCode::SUCCESS;
+}
+
+IEffect::Status HapticGeneratorContext::lvmProcess(float* in, float* out, int samples) {
+    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
+
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!in, status, "nullInput");
+    RETURN_VALUE_IF(!out, status, "nullOutput");
+    status = {EX_ILLEGAL_STATE, 0, 0};
+    RETURN_VALUE_IF(getInputFrameSize() != getOutputFrameSize(), status, "FrameSizeMismatch");
+    auto frameSize = getInputFrameSize();
+    RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
+
+    LOG(DEBUG) << __func__ << " start processing";
+    // The audio data must not be modified but just written to
+    // output buffer according the access mode.
+    bool accumulate = false;
+    if (in != out) {
+        for (int i = 0; i < samples; i++) {
+            if (accumulate) {
+                out[i] += in[i];
+            } else {
+                out[i] = in[i];
+            }
+        }
+    }
+
+    if (mState != HAPTIC_GENERATOR_STATE_ACTIVE) {
+        return status;
+    }
+
+    std::lock_guard lg(mMutex);
+    if (mParams.mMaxVibratorScale == HapticGenerator::VibratorScale::MUTE) {
+        // Haptic channels are muted, not need to generate haptic data.
+        return {STATUS_OK, samples, samples};
+    }
+
+    // Resize buffer if the haptic sample count is greater than buffer size.
+    size_t hapticSampleCount = mFrameCount * mParams.mHapticChannelCount;
+    if (hapticSampleCount > mInputBuffer.size()) {
+        // The inputBuffer and outputBuffer must have the same size, which must be at least
+        // the haptic sample count.
+        mInputBuffer.resize(hapticSampleCount);
+        mOutputBuffer.resize(hapticSampleCount);
+    }
+
+    // Construct input buffer according to haptic channel source
+    for (size_t i = 0; i < mFrameCount; ++i) {
+        for (size_t j = 0; j < mParams.mHapticChannelCount; ++j) {
+            mInputBuffer[i * mParams.mHapticChannelCount + j] =
+                    in[i * mParams.mAudioChannelCount + mParams.mHapticChannelSource[j]];
+        }
+    }
+
+    float* hapticOutBuffer =
+            runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
+    ::android::os::scaleHapticData(
+            hapticOutBuffer, hapticSampleCount,
+            static_cast<::android::os::HapticScale>(mParams.mMaxVibratorScale),
+            mParams.mVibratorInfo.qFactor);
+
+    // For haptic data, the haptic playback thread will copy the data from effect input
+    // buffer, which contains haptic data at the end of the buffer, directly to sink buffer.
+    // In that case, copy haptic data to input buffer instead of output buffer.
+    // Note: this may not work with rpc/binder calls
+    int offset = samples;
+    for (int i = 0; i < hapticSampleCount; ++i) {
+        in[samples + i] = hapticOutBuffer[i];
+    }
+    return {STATUS_OK, samples, static_cast<int32_t>(samples + hapticSampleCount)};
+}
+
+void HapticGeneratorContext::init_params(media::audio::common::AudioChannelLayout inputChMask,
+                                         media::audio::common::AudioChannelLayout outputChMask) {
+    std::lock_guard lg(mMutex);
+    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
+    mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
+    mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+
+    mParams.mAudioChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            inputChMask, ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+    mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+    LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
+    for (size_t i = 0; i < mParams.mHapticChannelCount; ++i) {
+        // By default, use the first audio channel to generate haptic channels.
+        mParams.mHapticChannelSource[i] = 0;
+    }
+
+    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
+}
+
+float HapticGeneratorContext::getDistortionOutputGain() {
+    float distortionOutputGain = getFloatProperty(
+            "vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
+    LOG(DEBUG) << "Using distortion output gain as " << distortionOutputGain;
+    return distortionOutputGain;
+}
+
+float HapticGeneratorContext::getFloatProperty(const std::string& key, float defaultValue) {
+    float result;
+    std::string value = ::android::base::GetProperty(key, "");
+    if (!value.empty() && ::android::base::ParseFloat(value, &result)) {
+        return result;
+    }
+    return defaultValue;
+}
+
+void HapticGeneratorContext::addBiquadFilter(std::shared_ptr<HapticBiquadFilter> filter) {
+    // The process chain captures the shared pointer of the filter in lambda.
+    // The process record will keep a shared pointer to the filter so that it is possible to
+    // access the filter outside of the process chain.
+    mProcessorsRecord.filters.push_back(filter);
+    mProcessingChain.push_back([filter](float* out, const float* in, size_t frameCount) {
+        filter->process(out, in, frameCount);
+    });
+}
+
+/**
+ * Build haptic generator processing chain.
+ */
+void HapticGeneratorContext::buildProcessingChain() {
+    std::lock_guard lg(mMutex);
+    const size_t channelCount = mParams.mHapticChannelCount;
+    float highPassCornerFrequency = 50.0f;
+    auto hpf = ::android::audio_effect::haptic_generator::createHPF2(highPassCornerFrequency,
+                                                                     mSampleRate, channelCount);
+    addBiquadFilter(hpf);
+    float lowPassCornerFrequency = 9000.0f;
+    auto lpf = ::android::audio_effect::haptic_generator::createLPF2(lowPassCornerFrequency,
+                                                                     mSampleRate, channelCount);
+    addBiquadFilter(lpf);
+
+    auto ramp = std::make_shared<::android::audio_effect::haptic_generator::Ramp>(
+            channelCount);  // ramp = half-wave rectifier.
+    // The process chain captures the shared pointer of the ramp in lambda. It will be the only
+    // reference to the ramp.
+    // The process record will keep a weak pointer to the ramp so that it is possible to access
+    // the ramp outside of the process chain.
+    mProcessorsRecord.ramps.push_back(ramp);
+    mProcessingChain.push_back([ramp](float* out, const float* in, size_t frameCount) {
+        ramp->process(out, in, frameCount);
+    });
+
+    highPassCornerFrequency = 60.0f;
+    hpf = ::android::audio_effect::haptic_generator::createHPF2(highPassCornerFrequency,
+                                                                mSampleRate, channelCount);
+    addBiquadFilter(hpf);
+    lowPassCornerFrequency = 700.0f;
+    lpf = ::android::audio_effect::haptic_generator::createLPF2(lowPassCornerFrequency, mSampleRate,
+                                                                channelCount);
+    addBiquadFilter(lpf);
+
+    lowPassCornerFrequency = 400.0f;
+    lpf = ::android::audio_effect::haptic_generator::createLPF2(lowPassCornerFrequency, mSampleRate,
+                                                                channelCount);
+    addBiquadFilter(lpf);
+    lowPassCornerFrequency = 500.0f;
+    lpf = ::android::audio_effect::haptic_generator::createLPF2(lowPassCornerFrequency, mSampleRate,
+                                                                channelCount);
+    addBiquadFilter(lpf);
+
+    auto bpf = ::android::audio_effect::haptic_generator::createBPF(
+            mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate, channelCount);
+    mProcessorsRecord.bpf = bpf;
+    addBiquadFilter(bpf);
+
+    float normalizationPower = DEFAULT_SLOW_ENV_NORMALIZATION_POWER;
+    // The process chain captures the shared pointer of the slow envelope in lambda. It will
+    // be the only reference to the slow envelope.
+    // The process record will keep a weak pointer to the slow envelope so that it is possible
+    // to access the slow envelope outside of the process chain.
+    // SlowEnvelope = partial normalizer, or AGC.
+    auto slowEnv = std::make_shared<::android::audio_effect::haptic_generator::SlowEnvelope>(
+            5.0f /*envCornerFrequency*/, mSampleRate, normalizationPower, 0.01f /*envOffset*/,
+            channelCount);
+    mProcessorsRecord.slowEnvs.push_back(slowEnv);
+    mProcessingChain.push_back([slowEnv](float* out, const float* in, size_t frameCount) {
+        slowEnv->process(out, in, frameCount);
+    });
+
+    auto bsf = ::android::audio_effect::haptic_generator::createBSF(
+            mParams.mVibratorInfo.resonantFrequencyHz, mParams.mVibratorInfo.qFactor,
+            mParams.mVibratorInfo.qFactor / 2.0f, mSampleRate, channelCount);
+    mProcessorsRecord.bsf = bsf;
+    addBiquadFilter(bsf);
+
+    // The process chain captures the shared pointer of the Distortion in lambda. It will
+    // be the only reference to the Distortion.
+    // The process record will keep a weak pointer to the Distortion so that it is possible
+    // to access the Distortion outside of the process chain.
+    auto distortion = std::make_shared<::android::audio_effect::haptic_generator::Distortion>(
+            DEFAULT_DISTORTION_CORNER_FREQUENCY, mSampleRate, DEFAULT_DISTORTION_INPUT_GAIN,
+            DEFAULT_DISTORTION_CUBE_THRESHOLD, getDistortionOutputGain(), channelCount);
+    mProcessorsRecord.distortions.push_back(distortion);
+    mProcessingChain.push_back([distortion](float* out, const float* in, size_t frameCount) {
+        distortion->process(out, in, frameCount);
+    });
+}
+
+void HapticGeneratorContext::configure() {
+    mProcessingChain.clear();
+    mProcessorsRecord.filters.clear();
+    mProcessorsRecord.ramps.clear();
+    mProcessorsRecord.slowEnvs.clear();
+    mProcessorsRecord.distortions.clear();
+
+    buildProcessingChain();
+}
+
+/**
+ * Run the processing chain to generate haptic data from audio data
+ *
+ * @param buf1 a buffer contains raw audio data
+ * @param buf2 a buffer that is large enough to keep all the data
+ * @param frameCount frame count of the data
+ *
+ * @return a pointer to the output buffer
+ */
+float* HapticGeneratorContext::runProcessingChain(float* buf1, float* buf2, size_t frameCount) {
+    float* in = buf1;
+    float* out = buf2;
+    for (const auto processingFunc : mProcessingChain) {
+        processingFunc(out, in, frameCount);
+        std::swap(in, out);
+    }
+    return in;
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
new file mode 100644
index 0000000..a0a0a4c
--- /dev/null
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <vibrator/ExternalVibrationUtils.h>
+#include <map>
+
+#include "Processors.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+enum HapticGeneratorState {
+    HAPTIC_GENERATOR_STATE_UNINITIALIZED,
+    HAPTIC_GENERATOR_STATE_INITIALIZED,
+    HAPTIC_GENERATOR_STATE_ACTIVE,
+};
+
+struct HapticGeneratorParam {
+    // The audio channels used to generate haptic channels. The first channel will be used to
+    // generate HAPTIC_A, The second channel will be used to generate HAPTIC_B.
+    // The value will be offset of audio channel
+    int mHapticChannelSource[2];
+
+    int mHapticChannelCount;
+    int mAudioChannelCount;
+
+    HapticGenerator::HapticScale mHapticScale;
+    std::map<int, HapticGenerator::VibratorScale> mHapticScales;
+    // max intensity will be used to scale haptic data.
+    HapticGenerator::VibratorScale mMaxVibratorScale;
+
+    HapticGenerator::VibratorInformation mVibratorInfo;
+};
+
+// A structure to keep all shared pointers for all processors in HapticGenerator.
+struct HapticGeneratorProcessorsRecord {
+    std::vector<std::shared_ptr<HapticBiquadFilter>> filters;
+    std::vector<std::shared_ptr<::android::audio_effect::haptic_generator::Ramp>> ramps;
+    std::vector<std::shared_ptr<::android::audio_effect::haptic_generator::SlowEnvelope>> slowEnvs;
+    std::vector<std::shared_ptr<::android::audio_effect::haptic_generator::Distortion>> distortions;
+
+    // Cache band-pass filter and band-stop filter for updating parameters
+    // according to vibrator info
+    std::shared_ptr<HapticBiquadFilter> bpf;
+    std::shared_ptr<HapticBiquadFilter> bsf;
+};
+
+class HapticGeneratorContext final : public EffectContext {
+  public:
+    HapticGeneratorContext(int statusDepth, const Parameter::Common& common);
+    ~HapticGeneratorContext();
+    RetCode enable();
+    RetCode disable();
+    void reset();
+
+    RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale>& hapticScales);
+    std::vector<HapticGenerator::HapticScale> getHgHapticScales();
+
+    RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo);
+    HapticGenerator::VibratorInformation getHgVibratorInformation();
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+  private:
+    static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
+    static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
+    static constexpr float DEFAULT_BSF_POLE_Q = 4.0f;
+    static constexpr float DEFAULT_DISTORTION_OUTPUT_GAIN = 1.5f;
+    static constexpr float DEFAULT_BPF_Q = 1.0f;
+    static constexpr float DEFAULT_SLOW_ENV_NORMALIZATION_POWER = -0.8f;
+    static constexpr float DEFAULT_DISTORTION_CORNER_FREQUENCY = 300.0f;
+    static constexpr float DEFAULT_DISTORTION_INPUT_GAIN = 0.3f;
+    static constexpr float DEFAULT_DISTORTION_CUBE_THRESHOLD = 0.1f;
+
+    std::mutex mMutex;
+    HapticGeneratorState mState;
+    HapticGeneratorParam mParams GUARDED_BY(mMutex);
+    int mSampleRate;
+    int mFrameCount = 0;
+
+    // A cache for all shared pointers of the HapticGenerator
+    struct HapticGeneratorProcessorsRecord mProcessorsRecord;
+
+    // Using a vector of functions to record the processing chain for haptic-generating algorithm.
+    // The three parameters of the processing functions are pointer to output buffer, pointer to
+    // input buffer and frame count.
+    std::vector<std::function<void(float*, const float*, size_t)>> mProcessingChain;
+
+    // inputBuffer is where to keep input buffer for the generating algorithm. It will be
+    // constructed according to hapticChannelSource.
+    std::vector<float> mInputBuffer;
+
+    // outputBuffer is a buffer having the same length as inputBuffer. It can be used as
+    // intermediate buffer in the generating algorithm.
+    std::vector<float> mOutputBuffer;
+
+    void init_params(media::audio::common::AudioChannelLayout inputChMask,
+                     media::audio::common::AudioChannelLayout outputChMask);
+    void configure();
+
+    float getDistortionOutputGain();
+    float getFloatProperty(const std::string& key, float defaultValue);
+    void addBiquadFilter(std::shared_ptr<HapticBiquadFilter> filter);
+    void buildProcessingChain();
+    float* runProcessingChain(float* buf1, float* buf2, size_t frameCount);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index bcd6947..7acba11 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -44,3 +44,33 @@
 
     header_libs: ["libaudioeffects"],
 }
+
+cc_library_shared {
+    name: "libloudnessenhanceraidl",
+    srcs: [
+        "aidl/EffectLoudnessEnhancer.cpp",
+        "aidl/LoudnessEnhancerContext.cpp",
+        "dsp/core/dynamic_range_compression.cpp",
+        ":effectCommonFile",
+    ],
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+    cflags: [
+        "-Wthread-safety",
+    ],
+    shared_libs: [
+        "libcutils",
+        "liblog",
+    ],
+    relative_install_path: "soundfx",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
+    ],
+}
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
new file mode 100644
index 0000000..a7d9282
--- /dev/null
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_LoudnessEnhancerImpl"
+
+#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "EffectLoudnessEnhancer.h"
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidLoudnessEnhancer;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::LoudnessEnhancerImpl;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+
+extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancer()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<LoudnessEnhancerImpl>();
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancer()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    *_aidl_return = LoudnessEnhancerImpl::kDescriptor;
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+const std::string LoudnessEnhancerImpl::kEffectName = "Loudness Enhancer";
+const Descriptor LoudnessEnhancerImpl::kDescriptor = {
+        .common = {.id = {.type = getEffectTypeUuidLoudnessEnhancer(),
+                          .uuid = getEffectImplUuidLoudnessEnhancer(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
+                   .name = LoudnessEnhancerImpl::kEffectName,
+                   .implementor = "The Android Open Source Project"}};
+
+ndk::ScopedAStatus LoudnessEnhancerImpl::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << __func__ << kDescriptor.toString();
+    *_aidl_return = kDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus LoudnessEnhancerImpl::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->disable();
+            mContext->resetBuffer();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus LoudnessEnhancerImpl::setParameterSpecific(const Parameter::Specific& specific) {
+    RETURN_IF(Parameter::Specific::loudnessEnhancer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
+              "EffectNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto& leParam = specific.get<Parameter::Specific::loudnessEnhancer>();
+    auto tag = leParam.getTag();
+
+    switch (tag) {
+        case LoudnessEnhancer::gainMb: {
+            RETURN_IF(mContext->setLeGain(leParam.get<LoudnessEnhancer::gainMb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setGainMbFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "LoudnessEnhancerTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus LoudnessEnhancerImpl::getParameterSpecific(const Parameter::Id& id,
+                                                              Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+    RETURN_IF(Parameter::Id::loudnessEnhancerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
+    auto leId = id.get<Parameter::Id::loudnessEnhancerTag>();
+    auto leIdTag = leId.getTag();
+    switch (leIdTag) {
+        case LoudnessEnhancer::Id::commonTag:
+            return getParameterLoudnessEnhancer(leId.get<LoudnessEnhancer::Id::commonTag>(),
+                                                specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(leIdTag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "LoudnessEnhancerTagNotSupported");
+    }
+}
+
+ndk::ScopedAStatus LoudnessEnhancerImpl::getParameterLoudnessEnhancer(
+        const LoudnessEnhancer::Tag& tag, Parameter::Specific* specific) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    LoudnessEnhancer leParam;
+    switch (tag) {
+        case LoudnessEnhancer::gainMb: {
+            leParam.set<LoudnessEnhancer::gainMb>(mContext->getLeGain());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "LoudnessEnhancerTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::loudnessEnhancer>(leParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> LoudnessEnhancerImpl::createContext(
+        const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+        return mContext;
+    }
+
+    mContext = std::make_shared<LoudnessEnhancerContext>(1 /* statusFmqDepth */, common);
+    return mContext;
+}
+
+RetCode LoudnessEnhancerImpl::releaseContext() {
+    if (mContext) {
+        mContext->disable();
+        mContext->resetBuffer();
+    }
+    return RetCode::SUCCESS;
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status LoudnessEnhancerImpl::effectProcessImpl(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->lvmProcess(in, out, samples);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
new file mode 100644
index 0000000..5b9e924
--- /dev/null
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "effect-impl/EffectImpl.h"
+#include "LoudnessEnhancerContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class LoudnessEnhancerImpl final : public EffectImpl {
+  public:
+    static const std::string kEffectName;
+    static const Descriptor kDescriptor;
+    LoudnessEnhancerImpl() { LOG(DEBUG) << __func__; }
+    ~LoudnessEnhancerImpl() {
+        cleanUp();
+        LOG(DEBUG) << __func__;
+    }
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    RetCode releaseContext() override;
+
+    std::shared_ptr<EffectContext> getContext() override { return mContext; }
+    std::string getEffectName() override { return kEffectName; }
+
+  private:
+    std::shared_ptr<LoudnessEnhancerContext> mContext;
+    ndk::ScopedAStatus getParameterLoudnessEnhancer(const LoudnessEnhancer::Tag& tag,
+                                                    Parameter::Specific* specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
new file mode 100644
index 0000000..bc3fa45
--- /dev/null
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "LoudnessEnhancerContext"
+
+#include <Utils.h>
+
+#include "LoudnessEnhancerContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+LoudnessEnhancerContext::LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common)
+    : EffectContext(statusDepth, common) {
+    LOG(DEBUG) << __func__;
+    init_params();
+}
+
+LoudnessEnhancerContext::~LoudnessEnhancerContext() {
+    LOG(DEBUG) << __func__;
+}
+
+RetCode LoudnessEnhancerContext::enable() {
+    std::lock_guard lg(mMutex);
+    if (mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = LOUDNESS_ENHANCER_STATE_ACTIVE;
+    return RetCode::SUCCESS;
+}
+
+RetCode LoudnessEnhancerContext::disable() {
+    std::lock_guard lg(mMutex);
+    if (mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+void LoudnessEnhancerContext::reset() {
+    float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
+    std::lock_guard lg(mMutex);
+    if (mCompressor != nullptr) {
+        // Get samplingRate from input
+        mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
+    }
+}
+
+RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
+    mGain = gainMb;
+    reset();  // apply parameter update
+    return RetCode::SUCCESS;
+}
+
+IEffect::Status LoudnessEnhancerContext::lvmProcess(float* in, float* out, int samples) {
+    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
+
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!in, status, "nullInput");
+    RETURN_VALUE_IF(!out, status, "nullOutput");
+    status = {EX_ILLEGAL_STATE, 0, 0};
+    RETURN_VALUE_IF(getInputFrameSize() != getOutputFrameSize(), status, "FrameSizeMismatch");
+    auto frameSize = getInputFrameSize();
+    RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
+
+    std::lock_guard lg(mMutex);
+    status = {STATUS_INVALID_OPERATION, 0, 0};
+    RETURN_VALUE_IF(mState != LOUDNESS_ENHANCER_STATE_ACTIVE, status, "stateNotActive");
+
+    LOG(DEBUG) << __func__ << " start processing";
+    // PcmType is always expected to be Float 32 bit.
+    constexpr float scale = 1 << 15;  // power of 2 is lossless conversion to int16_t range
+    constexpr float inverseScale = 1.f / scale;
+    const float inputAmp = pow(10, mGain / 2000.0f) * scale;
+    float leftSample, rightSample;
+
+    if (mCompressor != nullptr) {
+        for (int inIdx = 0; inIdx < samples; inIdx += 2) {
+            // makeup gain is applied on the input of the compressor
+            leftSample = inputAmp * in[inIdx];
+            rightSample = inputAmp * in[inIdx + 1];
+            mCompressor->Compress(&leftSample, &rightSample);
+            in[inIdx] = leftSample * inverseScale;
+            in[inIdx + 1] = rightSample * inverseScale;
+        }
+    } else {
+        for (int inIdx = 0; inIdx < samples; inIdx += 2) {
+            leftSample = inputAmp * in[inIdx];
+            rightSample = inputAmp * in[inIdx + 1];
+            in[inIdx] = leftSample * inverseScale;
+            in[inIdx + 1] = rightSample * inverseScale;
+        }
+    }
+    bool accumulate = false;
+    if (in != out) {
+        for (int i = 0; i < samples; i++) {
+            if (accumulate) {
+                out[i] += in[i];
+            } else {
+                out[i] = in[i];
+            }
+        }
+    }
+    return {STATUS_OK, samples, samples};
+}
+
+void LoudnessEnhancerContext::init_params() {
+    int channelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
+    LOG_ALWAYS_FATAL_IF(channelCount != 2, "channel count %d not supported", channelCount);
+
+    mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
+    float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
+    LOG(DEBUG) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
+
+    std::lock_guard lg(mMutex);
+    mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
+    mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
+    mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
new file mode 100644
index 0000000..9a1ec4c
--- /dev/null
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <audio_effects/effect_loudnessenhancer.h>
+
+#include "dsp/core/dynamic_range_compression.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+enum LoudnessEnhancerState {
+    LOUDNESS_ENHANCER_STATE_UNINITIALIZED,
+    LOUDNESS_ENHANCER_STATE_INITIALIZED,
+    LOUDNESS_ENHANCER_STATE_ACTIVE,
+};
+
+class LoudnessEnhancerContext final : public EffectContext {
+  public:
+    LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common);
+    ~LoudnessEnhancerContext();
+
+    RetCode enable();
+    RetCode disable();
+    void reset();
+
+    RetCode setLeGain(int gainMb);
+    int getLeGain() const { return mGain; }
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+  private:
+    std::mutex mMutex;
+    LoudnessEnhancerState mState GUARDED_BY(mMutex) = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+    int mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
+    // In this implementation, there is no coupling between the compression on the left and right
+    // channels
+    std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor GUARDED_BY(mMutex);
+
+    void init_params();
+};
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 7d7f8b9..0568fbd 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -11,48 +11,33 @@
 
 cc_test {
     name: "EffectReverbTest",
-    vendor: true,
-    gtest: true,
-    host_supported: true,
+    defaults: [
+      "libeffects-test-defaults",
+    ],
     srcs: [
         "EffectReverbTest.cpp",
-        "EffectTestHelper.cpp",
     ],
     static_libs: [
-        "libaudioutils",
         "libreverb",
         "libreverbwrapper",
     ],
-    shared_libs: [
-        "liblog",
-    ],
     header_libs: [
         "libaudioeffects",
-        "libhardware_headers",
     ],
 }
 
 cc_test {
     name: "EffectBundleTest",
-    vendor: true,
-    gtest: true,
-    host_supported: true,
-    test_suites: ["device-tests"],
+    defaults: [
+      "libeffects-test-defaults",
+    ],
     srcs: [
         "EffectBundleTest.cpp",
-        "EffectTestHelper.cpp",
     ],
     static_libs: [
-        "libaudioutils",
         "libbundlewrapper",
         "libmusicbundle",
     ],
-    shared_libs: [
-        "liblog",
-    ],
-    header_libs: [
-        "libhardware_headers",
-    ],
 }
 
 cc_test {
diff --git a/media/libeffects/lvm/tests/EffectReverbTest.cpp b/media/libeffects/lvm/tests/EffectReverbTest.cpp
index 59453eb..aaac782 100644
--- a/media/libeffects/lvm/tests/EffectReverbTest.cpp
+++ b/media/libeffects/lvm/tests/EffectReverbTest.cpp
@@ -33,6 +33,27 @@
 
 constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
 
+static constexpr audio_channel_mask_t kChMasks[] = {
+        AUDIO_CHANNEL_OUT_MONO,          AUDIO_CHANNEL_OUT_STEREO,
+        AUDIO_CHANNEL_OUT_2POINT1,       AUDIO_CHANNEL_OUT_5POINT1,
+        AUDIO_CHANNEL_OUT_7POINT1POINT4, AUDIO_CHANNEL_INDEX_MASK_23,
+        AUDIO_CHANNEL_OUT_22POINT2,
+};
+
+static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+static constexpr size_t kSampleRates[] = {8000, 11025, 44100, 48000, 192000};
+
+static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+static constexpr size_t kFrameCounts[] = {4, 512};
+
+static constexpr size_t kNumFrameCounts = std::size(kFrameCounts);
+
+static constexpr size_t kLoopCounts[] = {1, 4};
+
+static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
 static bool isAuxMode(const effect_uuid_t* uuid) {
     // Update this, if the order of effects in kEffectUuids is updated
     return (uuid == &kEffectUuids[2] || uuid == &kEffectUuids[3]);
@@ -50,15 +71,15 @@
 class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
   public:
     SingleEffectTest()
-        : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
-          mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
-          mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+        : mSampleRate(kSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(kFrameCounts[std::get<2>(GetParam())]),
+          mLoopCount(kLoopCounts[std::get<3>(GetParam())]),
           mTotalFrameCount(mFrameCount * mLoopCount),
           mUuid(&kEffectUuids[std::get<4>(GetParam())]),
           mInChMask(isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO
-                                     : EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+                                     : kChMasks[std::get<0>(GetParam())]),
           mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
-          mOutChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+          mOutChMask(kChMasks[std::get<0>(GetParam())]),
           mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
           mPreset(kPresets[std::get<5>(GetParam())]) {}
 
@@ -100,10 +121,10 @@
 
 INSTANTIATE_TEST_SUITE_P(
         EffectReverbTestAll, SingleEffectTest,
-        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+        ::testing::Combine(::testing::Range(0, (int)kNumChMasks),
+                           ::testing::Range(0, (int)kNumSampleRates),
+                           ::testing::Range(0, (int)kNumFrameCounts),
+                           ::testing::Range(0, (int)kNumLoopCounts),
                            ::testing::Range(0, (int)kNumEffectUuids),
                            ::testing::Range(0, (int)kNumPresets)));
 
@@ -112,9 +133,9 @@
     : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
   public:
     SingleEffectComparisonTest()
-        : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
-          mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
-          mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+        : mSampleRate(kSampleRates[std::get<0>(GetParam())]),
+          mFrameCount(kFrameCounts[std::get<1>(GetParam())]),
+          mLoopCount(kLoopCounts[std::get<2>(GetParam())]),
           mTotalFrameCount(mFrameCount * mLoopCount),
           mUuid(&kEffectUuids[std::get<3>(GetParam())]),
           mPreset(kPresets[std::get<4>(GetParam())]) {}
@@ -173,7 +194,7 @@
     std::vector<int16_t> monoRefI16(mTotalFrameCount);
     memcpy_to_i16_from_float(monoRefI16.data(), monoOutput.data(), mTotalFrameCount);
 
-    for (size_t outChMask : EffectTestHelper::kChMasks) {
+    for (size_t outChMask : kChMasks) {
         size_t outChannelCount = audio_channel_count_from_out_mask(outChMask);
         size_t inChMask = isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : outChMask;
 
@@ -225,9 +246,9 @@
 
 INSTANTIATE_TEST_SUITE_P(
         EffectReverbTestAll, SingleEffectComparisonTest,
-        ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
-                           ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+        ::testing::Combine(::testing::Range(0, (int)kNumSampleRates),
+                           ::testing::Range(0, (int)kNumFrameCounts),
+                           ::testing::Range(0, (int)kNumLoopCounts),
                            ::testing::Range(0, (int)kNumEffectUuids),
                            ::testing::Range(0, (int)kNumPresets)));
 
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
new file mode 100644
index 0000000..d026e2b
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -0,0 +1,851 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+
+#define LOG_TAG "BundleContext"
+#include <android-base/logging.h>
+#include <Utils.h>
+
+#include "BundleContext.h"
+#include "BundleTypes.h"
+#include "math.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioDeviceType;
+
+RetCode BundleContext::init() {
+    std::lock_guard lg(mMutex);
+    // init with pre-defined preset NORMAL
+    for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+        mBandGaindB[i] = lvm::kSoftPresets[0 /* normal */][i];
+    }
+
+    // allocate lvm instance
+    LVM_ReturnStatus_en status;
+    LVM_InstParams_t params = {.BufferMode = LVM_UNMANAGED_BUFFERS,
+                               .MaxBlockSize = lvm::MAX_CALL_SIZE,
+                               .EQNB_NumBands = lvm::MAX_NUM_BANDS,
+                               .PSA_Included = LVM_PSA_ON};
+    status = LVM_GetInstanceHandle(&mInstance, &params);
+    GOTO_IF_LVM_ERROR(status, deinit, "LVM_GetInstanceHandleFailed");
+
+    // set control
+    LVM_ControlParams_t controlParams;
+    initControlParameter(controlParams);
+    status = LVM_SetControlParameters(mInstance, &controlParams);
+    GOTO_IF_LVM_ERROR(status, deinit, "LVM_SetControlParametersFailed");
+
+    /* Set the headroom parameters */
+    LVM_HeadroomParams_t headroomParams;
+    initHeadroomParameter(headroomParams);
+    status = LVM_SetHeadroomParams(mInstance, &headroomParams);
+    GOTO_IF_LVM_ERROR(status, deinit, "LVM_SetHeadroomParamsFailed");
+
+    return RetCode::SUCCESS;
+
+deinit:
+    deInit();
+    return RetCode::ERROR_EFFECT_LIB_ERROR;
+}
+
+void BundleContext::deInit() {
+    std::lock_guard lg(mMutex);
+    if (mInstance) {
+        LVM_DelInstanceHandle(&mInstance);
+        mInstance = nullptr;
+    }
+}
+
+RetCode BundleContext::enable() {
+    if (mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
+    // Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due to
+    // their nature.
+    bool tempDisabled = false;
+    switch (mType) {
+        case lvm::BundleEffectType::EQUALIZER:
+            LOG(DEBUG) << __func__ << " enable bundle EQ";
+            if (mSamplesToExitCountEq <= 0) mNumberEffectsEnabled++;
+            mSamplesToExitCountEq = (mSamplesPerSecond * 0.1);
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
+            break;
+        case lvm::BundleEffectType::BASS_BOOST:
+            LOG(DEBUG) << __func__ << " enable bundle BB";
+            if (mSamplesToExitCountBb <= 0) mNumberEffectsEnabled++;
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
+            mSamplesToExitCountBb = (mSamplesPerSecond * 0.1);
+            tempDisabled = mBassTempDisabled;
+            break;
+        case lvm::BundleEffectType::VIRTUALIZER:
+            LOG(DEBUG) << __func__ << " enable bundle VR";
+            if (mSamplesToExitCountVirt <= 0) mNumberEffectsEnabled++;
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
+            mSamplesToExitCountVirt = (mSamplesPerSecond * 0.1);
+            tempDisabled = mVirtualizerTempDisabled;
+            break;
+        case lvm::BundleEffectType::VOLUME:
+            LOG(DEBUG) << __func__ << " enable bundle VOL";
+            if ((mEffectInDrain & (1 << int(lvm::BundleEffectType::VOLUME))) == 0)
+                mNumberEffectsEnabled++;
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
+            break;
+    }
+    mEnabled = true;
+    return (tempDisabled ? RetCode::SUCCESS : enableOperatingMode());
+}
+
+RetCode BundleContext::enableOperatingMode() {
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+        switch (mType) {
+            case lvm::BundleEffectType::EQUALIZER:
+                LOG(DEBUG) << __func__ << " enable bundle EQ";
+                params.EQNB_OperatingMode = LVM_EQNB_ON;
+                break;
+            case lvm::BundleEffectType::BASS_BOOST:
+                LOG(DEBUG) << __func__ << " enable bundle BB";
+                params.BE_OperatingMode = LVM_BE_ON;
+                break;
+            case lvm::BundleEffectType::VIRTUALIZER:
+                LOG(DEBUG) << __func__ << " enable bundle VR";
+                params.VirtualizerOperatingMode = LVM_MODE_ON;
+                break;
+            case lvm::BundleEffectType::VOLUME:
+                LOG(DEBUG) << __func__ << " enable bundle VOL";
+                break;
+        }
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+    }
+    return limitLevel();
+}
+
+RetCode BundleContext::disable() {
+    if (!mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
+    switch (mType) {
+        case lvm::BundleEffectType::EQUALIZER:
+            LOG(DEBUG) << __func__ << " disable bundle EQ";
+            mEffectInDrain |= 1 << int(lvm::BundleEffectType::EQUALIZER);
+            break;
+        case lvm::BundleEffectType::BASS_BOOST:
+            LOG(DEBUG) << __func__ << " disable bundle BB";
+            mEffectInDrain |= 1 << int(lvm::BundleEffectType::BASS_BOOST);
+            break;
+        case lvm::BundleEffectType::VIRTUALIZER:
+            LOG(DEBUG) << __func__ << " disable bundle VR";
+            mEffectInDrain |= 1 << int(lvm::BundleEffectType::VIRTUALIZER);
+            break;
+        case lvm::BundleEffectType::VOLUME:
+            LOG(DEBUG) << __func__ << " disable bundle VOL";
+            mEffectInDrain |= 1 << int(lvm::BundleEffectType::VOLUME);
+            break;
+    }
+    mEnabled = false;
+    return disableOperatingMode();
+}
+
+RetCode BundleContext::disableOperatingMode() {
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+        switch (mType) {
+            case lvm::BundleEffectType::EQUALIZER:
+                LOG(DEBUG) << __func__ << " disable bundle EQ";
+                params.EQNB_OperatingMode = LVM_EQNB_OFF;
+                break;
+            case lvm::BundleEffectType::BASS_BOOST:
+                LOG(DEBUG) << __func__ << " disable bundle BB";
+                params.BE_OperatingMode = LVM_BE_OFF;
+                break;
+            case lvm::BundleEffectType::VIRTUALIZER:
+                LOG(DEBUG) << __func__ << " disable bundle VR";
+                params.VirtualizerOperatingMode = LVM_MODE_OFF;
+                break;
+            case lvm::BundleEffectType::VOLUME:
+                LOG(DEBUG) << __func__ << " disable bundle VOL";
+                break;
+        }
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+    }
+    mEnabled = false;
+    return limitLevel();
+}
+
+RetCode BundleContext::limitLevel() {
+    int gainCorrection = 0;
+    // Count the energy contribution per band for EQ and BassBoost only if they are active.
+    float energyContribution = 0;
+    float energyCross = 0;
+    float energyBassBoost = 0;
+    float crossCorrection = 0;
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        bool eqEnabled = params.EQNB_OperatingMode == LVM_EQNB_ON;
+        bool bbEnabled = params.BE_OperatingMode == LVM_BE_ON;
+        bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
+
+        if (eqEnabled) {
+            for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+                float bandFactor = mBandGaindB[i] / 15.0;
+                float bandCoefficient = lvm::kBandEnergyCoefficient[i];
+                float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
+                if (bandEnergy > 0) energyContribution += bandEnergy;
+            }
+
+            // cross EQ coefficients
+            float bandFactorSum = 0;
+            for (int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
+                float bandFactor1 = mBandGaindB[i] / 15.0;
+                float bandFactor2 = mBandGaindB[i + 1] / 15.0;
+
+                if (bandFactor1 > 0 && bandFactor2 > 0) {
+                    float crossEnergy =
+                            bandFactor1 * bandFactor2 * lvm::kBandEnergyCrossCoefficient[i];
+                    bandFactorSum += bandFactor1 * bandFactor2;
+
+                    if (crossEnergy > 0) energyCross += crossEnergy;
+                }
+            }
+            bandFactorSum -= 1.0;
+            if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
+        }
+        // BassBoost contribution
+        if (bbEnabled) {
+            float boostFactor = mBassStrengthSaved / 1000.0;
+            float boostCoefficient = lvm::kBassBoostEnergyCoefficient;
+
+            energyContribution += boostFactor * boostCoefficient * boostCoefficient;
+
+            if (eqEnabled) {
+                for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+                    float bandFactor = mBandGaindB[i] / 15.0;
+                    float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
+                    float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
+                    if (bandEnergy > 0) energyBassBoost += bandEnergy;
+                }
+            }
+        }
+        // Virtualizer contribution
+        if (viEnabled) {
+            energyContribution += lvm::kVirtualizerContribution * lvm::kVirtualizerContribution;
+        }
+
+        double totalEnergyEstimation =
+                sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
+        LOG(INFO) << " TOTAL energy estimation: " << totalEnergyEstimation << " dB";
+
+        // roundoff
+        int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
+        if (maxLevelRound + mVolume > 0) {
+            gainCorrection = maxLevelRound + mVolume;
+        }
+
+        params.VC_EffectLevel = mVolume - gainCorrection;
+        if (params.VC_EffectLevel < -96) {
+            params.VC_EffectLevel = -96;
+        }
+        LOG(INFO) << "\tVol: " << mVolume << ", GainCorrection: " << gainCorrection
+                  << ", Actual vol: " << params.VC_EffectLevel;
+
+        /* Activate the initial settings */
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
+        if (mFirstVolume) {
+            RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetVolumeNoSmoothing(mInstance, &params),
+                            RetCode::ERROR_EFFECT_LIB_ERROR, " setVolumeNoSmoothingFailed");
+            LOG(INFO) << "\tLVM_VOLUME: Disabling Smoothing for first volume change to remove "
+                         "spikes/clicks";
+            mFirstVolume = false;
+        }
+    }
+
+    return RetCode::SUCCESS;
+}
+
+bool BundleContext::isDeviceSupportedBassBoost(
+        const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices) {
+    for (const auto& device : devices) {
+        if (device != AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER, ""} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_CARKIT,
+                                             AudioDeviceDescription::CONNECTION_BT_SCO} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER,
+                                             AudioDeviceDescription::CONNECTION_BT_A2DP}) {
+            return false;
+        }
+    }
+    return true;
+}
+
+bool BundleContext::isDeviceSupportedVirtualizer(
+        const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices) {
+    for (const auto& device : devices) {
+        if (device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
+                                             AudioDeviceDescription::CONNECTION_ANALOG} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
+                                             AudioDeviceDescription::CONNECTION_ANALOG} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
+                                             AudioDeviceDescription::CONNECTION_BT_A2DP} &&
+            device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
+                                             AudioDeviceDescription::CONNECTION_USB}) {
+            return false;
+        }
+    }
+    return true;
+}
+
+bool BundleContext::isConfigSupportedVirtualizer(size_t channelCount,
+                                                 const AudioDeviceDescription& device) {
+    return (channelCount >= 1 && channelCount <= FCC_2) && isDeviceSupportedVirtualizer({device});
+}
+
+RetCode BundleContext::setOutputDevice(
+        const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices) {
+    mOutputDevice = devices;
+    switch (mType) {
+        case lvm::BundleEffectType::BASS_BOOST:
+            if (!isDeviceSupportedBassBoost(devices)) {
+                // If a device doesn't support bass boost, the effect must be temporarily disabled.
+                // The effect must still report its original state as this can only be changed by
+                // the start/stop commands.
+                if (mEnabled) {
+                    disableOperatingMode();
+                }
+                mBassTempDisabled = true;
+            } else {
+                // If a device supports bass boost and the effect has been temporarily disabled
+                // previously then re-enable it
+                if (!mEnabled) {
+                    enableOperatingMode();
+                }
+                mBassTempDisabled = false;
+            }
+            break;
+        case lvm::BundleEffectType::VIRTUALIZER:
+            if (!isDeviceSupportedVirtualizer(devices)) {
+                if (mEnabled) {
+                    disableOperatingMode();
+                }
+                mVirtualizerTempDisabled = true;
+            } else {
+                if (!mEnabled) {
+                    enableOperatingMode();
+                }
+                mVirtualizerTempDisabled = false;
+            }
+            break;
+        default:
+            break;
+    }
+    return RetCode::SUCCESS;
+}
+
+LVM_INT16 BundleContext::LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) const {
+    LVM_INT16 db_fix;
+    LVM_INT16 Shift;
+    LVM_INT16 SmallRemainder;
+    LVM_UINT32 Remainder = (LVM_UINT32)Lin_fix;
+
+    /* Count leading bits, 1 cycle in assembly*/
+    for (Shift = 0; Shift < 32; Shift++) {
+        if ((Remainder & 0x80000000U) != 0) {
+            break;
+        }
+        Remainder = Remainder << 1;
+    }
+
+    /*
+     * Based on the approximation equation (for Q11.4 format):
+     *
+     * dB = -96 * Shift + 16 * (8 * Remainder - 2 * Remainder^2)
+     */
+    db_fix = (LVM_INT16)(-96 * Shift); /* Six dB steps in Q11.4 format*/
+    SmallRemainder = (LVM_INT16)((Remainder & 0x7fffffff) >> 24);
+    db_fix = (LVM_INT16)(db_fix + SmallRemainder);
+    SmallRemainder = (LVM_INT16)(SmallRemainder * SmallRemainder);
+    db_fix = (LVM_INT16)(db_fix - (LVM_INT16)((LVM_UINT16)SmallRemainder >> 9));
+
+    /* Correct for small offset */
+    db_fix = (LVM_INT16)(db_fix - 5);
+
+    return db_fix;
+}
+
+// TODO: replace with more generic approach, like: audio_utils_power_from_amplitude
+int16_t BundleContext::VolToDb(uint32_t vol) const {
+    int16_t dB;
+
+    dB = LVC_ToDB_s32Tos16(vol << 7);
+    dB = (dB + 8) >> 4;
+    dB = (dB < -96) ? -96 : dB;
+
+    return dB;
+}
+
+RetCode BundleContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
+    LVM_ControlParams_t params;
+    LVM_ReturnStatus_en status = LVM_SUCCESS;
+
+    // Convert volume to dB
+    int leftdB = VolToDb(volume.left);
+    int rightdB = VolToDb(volume.right);
+    int maxdB = std::max(leftdB, rightdB);
+    int pandB = rightdB - leftdB;
+    setVolumeLevel(maxdB * 100);
+    LOG(DEBUG) << __func__ << " pandB: " << pandB << " maxdB " << maxdB;
+
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, "");
+        params.VC_Balance = pandB;
+
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, "");
+    }
+    mVolumeStereo = volume;
+    return RetCode::SUCCESS;
+}
+
+RetCode BundleContext::setEqualizerPreset(const std::size_t presetIdx) {
+    if (presetIdx < 0 || presetIdx >= lvm::MAX_NUM_PRESETS) {
+        return RetCode::ERROR_ILLEGAL_PARAMETER;
+    }
+
+    std::vector<Equalizer::BandLevel> bandLevels;
+    bandLevels.reserve(lvm::MAX_NUM_BANDS);
+    for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+        bandLevels.emplace_back(
+                Equalizer::BandLevel{static_cast<int32_t>(i), lvm::kSoftPresets[presetIdx][i]});
+    }
+
+    RetCode ret = updateControlParameter(bandLevels);
+    if (RetCode::SUCCESS == ret) {
+        mCurPresetIdx = presetIdx;
+        LOG(INFO) << __func__ << " success with " << presetIdx;
+    } else {
+        LOG(ERROR) << __func__ << " failed to setPreset " << presetIdx;
+    }
+    return ret;
+}
+
+RetCode BundleContext::setEqualizerBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels) {
+    RETURN_VALUE_IF(bandLevels.size() > lvm::MAX_NUM_BANDS || bandLevels.empty(),
+                    RetCode::ERROR_ILLEGAL_PARAMETER, "sizeExceedMax");
+    RetCode ret = updateControlParameter(bandLevels);
+    if (RetCode::SUCCESS == ret) {
+        mCurPresetIdx = lvm::PRESET_CUSTOM;
+        LOG(INFO) << __func__ << " succeed with " << ::android::internal::ToString(bandLevels);
+    } else {
+        LOG(ERROR) << __func__ << " failed with " << ::android::internal::ToString(bandLevels);
+    }
+    return ret;
+}
+
+std::vector<Equalizer::BandLevel> BundleContext::getEqualizerBandLevels() const {
+    std::vector<Equalizer::BandLevel> bandLevels;
+    bandLevels.reserve(lvm::MAX_NUM_BANDS);
+    for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+        bandLevels.emplace_back(Equalizer::BandLevel{static_cast<int32_t>(i), mBandGaindB[i]});
+    }
+    return bandLevels;
+}
+
+std::vector<int32_t> BundleContext::getEqualizerCenterFreqs() {
+    std::vector<int32_t> freqs;
+
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        /* Get the current settings */
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params), freqs,
+                        " getControlParamFailed");
+        for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+            freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
+        }
+    }
+
+    return freqs;
+}
+
+bool BundleContext::isBandLevelIndexInRange(
+        const std::vector<Equalizer::BandLevel>& bandLevels) const {
+    const auto [min, max] =
+            std::minmax_element(bandLevels.begin(), bandLevels.end(),
+                                [](const auto& a, const auto& b) { return a.index < b.index; });
+    return min->index >= 0 && max->index < lvm::MAX_NUM_BANDS;
+}
+
+RetCode BundleContext::updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels) {
+    RETURN_VALUE_IF(!isBandLevelIndexInRange(bandLevels), RetCode::ERROR_ILLEGAL_PARAMETER,
+                    "indexOutOfRange");
+
+    std::array<int, lvm::MAX_NUM_BANDS> tempLevel;
+    for (const auto& it : bandLevels) {
+        tempLevel[it.index] = it.levelMb;
+    }
+
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+            params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
+            params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
+            params.pEQNB_BandDefinition[i].Gain = tempLevel[i];
+        }
+
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mBandGaindB = tempLevel;
+    LOG(INFO) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGaindB);
+
+    return RetCode::SUCCESS;
+}
+
+RetCode BundleContext::setBassBoostStrength(int strength) {
+    // Update Control Parameter
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        params.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
+        params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mBassStrengthSaved = strength;
+    LOG(INFO) << __func__ << " success with strength " << strength;
+    return limitLevel();
+}
+
+RetCode BundleContext::setVolumeLevel(int level) {
+    if (mMuteEnabled) {
+        mLevelSaved = level / 100;
+    } else {
+        mVolume = level / 100;
+    }
+    LOG(INFO) << __func__ << " success with level " << level;
+    return limitLevel();
+}
+
+int BundleContext::getVolumeLevel() const {
+    return (mMuteEnabled ? mLevelSaved * 100 : mVolume * 100);
+}
+
+RetCode BundleContext::setVolumeMute(bool mute) {
+    mMuteEnabled = mute;
+    if (mMuteEnabled) {
+        mLevelSaved = mVolume;
+        mVolume = -96;
+    } else {
+        mVolume = mLevelSaved;
+    }
+    return limitLevel();
+}
+
+RetCode BundleContext::setVirtualizerStrength(int strength) {
+    // Update Control Parameter
+    LVM_ControlParams_t params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        params.CS_EffectLevel = ((strength * 32767) / 1000);
+
+        RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+
+    mVirtStrengthSaved = strength;
+    LOG(INFO) << __func__ << " success with strength " << strength;
+    return limitLevel();
+}
+
+
+RetCode BundleContext::setForcedDevice(
+        const ::aidl::android::media::audio::common::AudioDeviceDescription& device) {
+    RETURN_VALUE_IF(true != isDeviceSupportedVirtualizer({device}), RetCode::ERROR_EFFECT_LIB_ERROR,
+                    " deviceNotSupportVirtualizer");
+    mForceDevice = device;
+    return RetCode::SUCCESS;
+}
+
+void BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+    /* General parameters */
+    params.OperatingMode = LVM_MODE_ON;
+    params.SampleRate = LVM_FS_44100;
+    params.SourceFormat = LVM_STEREO;
+    params.SpeakerType = LVM_HEADPHONES;
+
+    /* Concert Sound parameters */
+    params.VirtualizerOperatingMode = LVM_MODE_OFF;
+    params.VirtualizerType = LVM_CONCERTSOUND;
+    params.VirtualizerReverbLevel = 100;
+    params.CS_EffectLevel = LVM_CS_EFFECT_NONE;
+
+    params.EQNB_OperatingMode = LVM_EQNB_OFF;
+    params.EQNB_NBands = lvm::MAX_NUM_BANDS;
+    params.pEQNB_BandDefinition = getDefaultEqualizerBandDefs();
+
+    /* Volume Control parameters */
+    params.VC_EffectLevel = 0;
+    params.VC_Balance = 0;
+
+    /* Treble Enhancement parameters */
+    params.TE_OperatingMode = LVM_TE_OFF;
+    params.TE_EffectLevel = 0;
+
+    /* PSA Control parameters */
+    params.PSA_Enable = LVM_PSA_OFF;
+    params.PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0;
+
+    /* Bass Enhancement parameters */
+    params.BE_OperatingMode = LVM_BE_OFF;
+    params.BE_EffectLevel = 0;
+    params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+    params.BE_HPF = LVM_BE_HPF_ON;
+
+    /* PSA Control parameters */
+    params.PSA_Enable = LVM_PSA_OFF;
+    params.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
+
+    /* TE Control parameters */
+    params.TE_OperatingMode = LVM_TE_OFF;
+    params.TE_EffectLevel = 0;
+
+    params.NrChannels = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
+    params.ChMask = AUDIO_CHANNEL_OUT_STEREO;
+    params.SourceFormat = LVM_STEREO;
+}
+
+void BundleContext::initHeadroomParameter(LVM_HeadroomParams_t& params) const {
+    params.pHeadroomDefinition = getDefaultEqualizerHeadroomBanDefs();
+    params.NHeadroomBands = 2;
+    params.Headroom_OperatingMode = LVM_HEADROOM_OFF;
+}
+
+LVM_EQNB_BandDef_t *BundleContext::getDefaultEqualizerBandDefs() {
+    static LVM_EQNB_BandDef_t* BandDefs = []() {
+        static LVM_EQNB_BandDef_t tempDefs[lvm::MAX_NUM_BANDS];
+        /* N-Band Equaliser parameters */
+        for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+            tempDefs[i].Frequency = lvm::kPresetsFrequencies[i];
+            tempDefs[i].QFactor = lvm::kPresetsQFactors[i];
+            tempDefs[i].Gain = lvm::kSoftPresets[0/* normal */][i];
+        }
+        return tempDefs;
+    }();
+
+    return BandDefs;
+}
+
+LVM_HeadroomBandDef_t *BundleContext::getDefaultEqualizerHeadroomBanDefs() {
+    static LVM_HeadroomBandDef_t HeadroomBandDef[LVM_HEADROOM_MAX_NBANDS] = {
+            {
+                    .Limit_Low = 20,
+                    .Limit_High = 4999,
+                    .Headroom_Offset = 0,
+            },
+            {
+                    .Limit_Low = 5000,
+                    .Limit_High = 24000,
+                    .Headroom_Offset = 0,
+            },
+    };
+    return HeadroomBandDef;
+}
+
+std::vector<Virtualizer::ChannelAngle> BundleContext::getSpeakerAngles(
+        const Virtualizer::SpeakerAnglesPayload payload) {
+    std::vector<Virtualizer::ChannelAngle> angles;
+    auto chCount = ::aidl::android::hardware::audio::common::getChannelCount(payload.layout);
+    RETURN_VALUE_IF(!isConfigSupportedVirtualizer(chCount, payload.device), angles,
+                    "payloadNotSupported");
+
+    if (chCount == 1) {
+        angles = {{.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_LEFT,
+                   .azimuthDegree = 0,
+                   .elevationDegree = 0}};
+    } else {
+        angles = {{.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_LEFT,
+                   .azimuthDegree = -90,
+                   .elevationDegree = 0},
+                  {.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_RIGHT,
+                   .azimuthDegree = 90,
+                   .elevationDegree = 0}};
+    }
+    return angles;
+}
+
+IEffect::Status BundleContext::lvmProcess(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!in, status, "nullInput");
+    RETURN_VALUE_IF(!out, status, "nullOutput");
+    status = {EX_ILLEGAL_STATE, 0, 0};
+    int64_t inputFrameCount = getCommon().input.frameCount;
+    int64_t outputFrameCount = getCommon().output.frameCount;
+    RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
+    int isDataAvailable = true;
+
+    auto frameSize = getInputFrameSize();
+    RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
+
+    LOG(DEBUG) << __func__ << " start processing";
+    if ((mEffectProcessCalled & 1 << int(mType)) != 0) {
+        const int undrainedEffects = mEffectInDrain & ~mEffectProcessCalled;
+        if ((undrainedEffects & 1 << int(lvm::BundleEffectType::EQUALIZER)) != 0) {
+            LOG(DEBUG) << "Draining EQUALIZER";
+            mSamplesToExitCountEq = 0;
+            --mNumberEffectsEnabled;
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
+        }
+        if ((undrainedEffects & 1 << int(lvm::BundleEffectType::BASS_BOOST)) != 0) {
+            LOG(DEBUG) << "Draining BASS_BOOST";
+            mSamplesToExitCountBb = 0;
+            --mNumberEffectsEnabled;
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
+        }
+        if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VIRTUALIZER)) != 0) {
+            LOG(DEBUG) << "Draining VIRTUALIZER";
+            mSamplesToExitCountVirt = 0;
+            --mNumberEffectsEnabled;
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
+        }
+        if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VOLUME)) != 0) {
+            LOG(DEBUG) << "Draining VOLUME";
+            --mNumberEffectsEnabled;
+            mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
+        }
+    }
+    mEffectProcessCalled |= 1 << int(mType);
+    if (!mEnabled) {
+        switch (mType) {
+            case lvm::BundleEffectType::EQUALIZER:
+                if (mSamplesToExitCountEq > 0) {
+                    mSamplesToExitCountEq -= samples;
+                }
+                if (mSamplesToExitCountEq <= 0) {
+                    isDataAvailable = false;
+                    if ((mEffectInDrain & 1 << int(lvm::BundleEffectType::EQUALIZER)) != 0) {
+                        mNumberEffectsEnabled--;
+                        mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
+                    }
+                    LOG(DEBUG) << "Effect_process() this is the last frame for EQUALIZER";
+                }
+                break;
+            case lvm::BundleEffectType::BASS_BOOST:
+                if (mSamplesToExitCountBb > 0) {
+                    mSamplesToExitCountBb -= samples;
+                }
+                if (mSamplesToExitCountBb <= 0) {
+                    isDataAvailable = false;
+                    if ((mEffectInDrain & 1 << int(lvm::BundleEffectType::BASS_BOOST)) != 0) {
+                        mNumberEffectsEnabled--;
+                        mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
+                    }
+                    LOG(DEBUG) << "Effect_process() this is the last frame for BASS_BOOST";
+                }
+                break;
+            case lvm::BundleEffectType::VIRTUALIZER:
+                if (mSamplesToExitCountVirt > 0) {
+                    mSamplesToExitCountVirt -= samples;
+                }
+                if (mSamplesToExitCountVirt <= 0) {
+                    isDataAvailable = false;
+                    if ((mEffectInDrain & 1 << int(lvm::BundleEffectType::VIRTUALIZER)) != 0) {
+                        mNumberEffectsEnabled--;
+                        mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
+                    }
+                    LOG(DEBUG) << "Effect_process() this is the last frame for VIRTUALIZER";
+                }
+                break;
+            case lvm::BundleEffectType::VOLUME:
+                isDataAvailable = false;
+                if ((mEffectInDrain & 1 << int(lvm::BundleEffectType::VOLUME)) != 0) {
+                    mNumberEffectsEnabled--;
+                    mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
+                }
+                LOG(DEBUG) << "Effect_process() LVM_VOLUME Effect is not enabled";
+                break;
+        }
+    }
+    if (isDataAvailable) {
+        mNumberEffectsCalled++;
+    }
+    bool accumulate = false;
+    if (mNumberEffectsCalled >= mNumberEffectsEnabled) {
+        // We expect the # effects called to be equal to # effects enabled in sequence (including
+        // draining effects).  Warn if this is not the case due to inconsistent calls.
+        ALOGW_IF(mNumberEffectsCalled > mNumberEffectsEnabled,
+                 "%s Number of effects called %d is greater than number of effects enabled %d",
+                 __func__, mNumberEffectsCalled, mNumberEffectsEnabled);
+        mEffectProcessCalled = 0;  // reset our consistency check.
+        if (!isDataAvailable) {
+            LOG(DEBUG) << "Effect_process() processing last frame";
+        }
+        mNumberEffectsCalled = 0;
+        LVM_UINT16 frames = samples * sizeof(float) / frameSize;
+        float* outTmp = (accumulate ? getWorkBuffer() : out);
+        /* Process the samples */
+        LVM_ReturnStatus_en lvmStatus;
+        {
+            std::lock_guard lg(mMutex);
+            lvmStatus = LVM_Process(mInstance, in, outTmp, frames, 0);
+            if (lvmStatus != LVM_SUCCESS) {
+                LOG(ERROR) << __func__ << lvmStatus;
+                return {EX_UNSUPPORTED_OPERATION, 0, 0};
+            }
+            if (accumulate) {
+                for (int i = 0; i < samples; i++) {
+                    out[i] += outTmp[i];
+                }
+            }
+        }
+    } else {
+        for (int i = 0; i < samples; i++) {
+            if (accumulate) {
+                out[i] += in[i];
+            } else {
+                out[i] = in[i];
+            }
+        }
+    }
+    LOG(DEBUG) << __func__ << " done processing";
+    return {STATUS_OK, samples, samples};
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
new file mode 100644
index 0000000..47d5e5a
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+#include <array>
+#include <cstddef>
+
+#include "BundleTypes.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class BundleContext final : public EffectContext {
+  public:
+    BundleContext(int statusDepth, const Parameter::Common& common,
+                  const lvm::BundleEffectType& type)
+        : EffectContext(statusDepth, common), mType(type) {
+        LOG(DEBUG) << __func__ << type;
+    }
+    ~BundleContext() override {
+        LOG(DEBUG) << __func__;
+        deInit();
+    }
+
+    RetCode init();
+    void deInit();
+    lvm::BundleEffectType getBundleType() const { return mType; }
+
+    RetCode enable();
+    RetCode enableOperatingMode();
+    RetCode disable();
+    RetCode disableOperatingMode();
+
+    void setSampleRate(const int sampleRate) { mSampleRate = sampleRate; }
+    int getSampleRate() const { return mSampleRate; }
+
+    void setChannelMask(const aidl::android::media::audio::common::AudioChannelLayout& chMask) {
+        mChMask = chMask;
+    }
+    aidl::android::media::audio::common::AudioChannelLayout getChannelMask() const {
+        return mChMask;
+    }
+    bool isDeviceSupportedBassBoost(
+            const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>&
+                    devices);
+    bool isDeviceSupportedVirtualizer(
+            const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>&
+                    devices);
+    bool isConfigSupportedVirtualizer(
+            size_t channelCount,
+            const aidl::android::media::audio::common::AudioDeviceDescription& device);
+
+    RetCode setOutputDevice(
+            const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices)
+            override;
+
+    RetCode setEqualizerPreset(const std::size_t presetIdx);
+    int getEqualizerPreset() const { return mCurPresetIdx; }
+    RetCode setEqualizerBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels);
+    std::vector<Equalizer::BandLevel> getEqualizerBandLevels() const;
+
+    std::vector<int32_t> getEqualizerCenterFreqs();
+
+    RetCode setBassBoostStrength(int strength);
+    int getBassBoostStrength() const { return mBassStrengthSaved; }
+
+    RetCode setVolumeLevel(int level);
+    int getVolumeLevel() const;
+
+    RetCode setVolumeMute(bool mute);
+    int getVolumeMute() const { return mMuteEnabled; }
+
+    RetCode setVirtualizerStrength(int strength);
+    int getVirtualizerStrength() const { return mVirtStrengthSaved; }
+
+    RetCode setForcedDevice(
+            const ::aidl::android::media::audio::common::AudioDeviceDescription& device);
+    aidl::android::media::audio::common::AudioDeviceDescription getForcedDevice() const {
+        return mForceDevice;
+    }
+    std::vector<Virtualizer::ChannelAngle> getSpeakerAngles(
+            const Virtualizer::SpeakerAnglesPayload payload);
+
+    RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
+    Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+    IEffect::Status processEffect(float* in, float* out, int sampleToProcess);
+
+  private:
+    std::mutex mMutex;
+    const lvm::BundleEffectType mType;
+    bool mEnabled = false;
+    LVM_Handle_t mInstance GUARDED_BY(mMutex);
+
+    aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
+    aidl::android::media::audio::common::AudioChannelLayout mChMask;
+
+    int mSampleRate = LVM_FS_44100;
+    int mSamplesPerSecond = 0;
+    int mSamplesToExitCountEq = 0;
+    int mSamplesToExitCountBb = 0;
+    int mSamplesToExitCountVirt = 0;
+    int mFrameCount = 0;
+
+    /* Bitmask whether drain is in progress due to disabling the effect.
+       The corresponding bit to an effect is set by 1 << lvm_effect_en. */
+    int mEffectInDrain = 0;
+
+    /* Bitmask whether process() was called for a particular effect.
+       The corresponding bit to an effect is set by 1 << lvm_effect_en. */
+    int mEffectProcessCalled = 0;
+    int mNumberEffectsEnabled = 0;
+    int mNumberEffectsCalled = 0;
+    bool mFirstVolume = true;
+    // Bass
+    bool mBassTempDisabled = false;
+    int mBassStrengthSaved = 0;
+    // Equalizer
+    int mCurPresetIdx = lvm::PRESET_CUSTOM; /* Current preset being used */
+    std::array<int, lvm::MAX_NUM_BANDS> mBandGaindB;
+    // Virtualizer
+    int mVirtStrengthSaved = 0; /* Conversion between Get/Set */
+    bool mVirtualizerTempDisabled = false;
+    ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
+    // Volume
+    int mLevelSaved = 0; /* for when mute is set, level must be saved */
+    int mVolume = 0;
+    bool mMuteEnabled = false; /* Must store as mute = -96dB level */
+
+    void initControlParameter(LVM_ControlParams_t& params) const;
+    void initHeadroomParameter(LVM_HeadroomParams_t& params) const;
+    RetCode limitLevel();
+    int16_t VolToDb(uint32_t vol) const;
+    LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) const;
+    RetCode updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels);
+    bool isBandLevelIndexInRange(const std::vector<Equalizer::BandLevel>& bandLevels) const;
+    static LVM_EQNB_BandDef_t* getDefaultEqualizerBandDefs();
+    static LVM_HeadroomBandDef_t* getDefaultEqualizerHeadroomBanDefs();
+};
+
+}  // namespace aidl::android::hardware::audio::effect
+
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
new file mode 100644
index 0000000..3bc889c
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -0,0 +1,222 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <array>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "effect-impl/EffectTypes.h"
+#include "LVM.h"
+
+namespace aidl::android::hardware::audio::effect {
+namespace lvm {
+
+constexpr inline size_t MAX_NUM_PRESETS = 10;
+constexpr inline size_t MAX_NUM_BANDS = 5;
+constexpr inline size_t MAX_CALL_SIZE = 256;
+constexpr inline int BASS_BOOST_CUP_LOAD_ARM9E = 150;   // Expressed in 0.1 MIPS
+constexpr inline int VIRTUALIZER_CUP_LOAD_ARM9E = 120;  // Expressed in 0.1 MIPS
+constexpr inline int EQUALIZER_CUP_LOAD_ARM9E = 220;    // Expressed in 0.1 MIPS
+constexpr inline int VOLUME_CUP_LOAD_ARM9E = 0;         // Expressed in 0.1 MIPS
+constexpr inline int BUNDLE_MEM_USAGE = 25;             // Expressed in kB
+constexpr inline int PRESET_CUSTOM = -1;
+
+static const std::vector<Equalizer::BandFrequency> kEqBandFrequency = {{0, 30000, 120000},
+                                                                       {1, 120001, 460000},
+                                                                       {2, 460001, 1800000},
+                                                                       {3, 1800001, 7000000},
+                                                                       {4, 7000001, 20000000}};
+
+/*
+Frequencies in Hz
+Note: If these frequencies change, please update LimitLevel values accordingly.
+*/
+constexpr inline std::array<uint16_t, MAX_NUM_BANDS> kPresetsFrequencies = {60, 230, 910, 3600,
+                                                                            14000};
+
+/* Q factor multiplied by 100 */
+constexpr inline std::array<uint16_t, MAX_NUM_BANDS> kPresetsQFactors = {96, 96, 96, 96, 96};
+
+constexpr inline std::array<std::array<int16_t, MAX_NUM_BANDS>, MAX_NUM_PRESETS> kSoftPresets = {
+        {{3, 0, 0, 0, 3},    /* Normal Preset */
+         {5, 3, -2, 4, 4},   /* Classical Preset */
+         {6, 0, 2, 4, 1},    /* Dance Preset */
+         {0, 0, 0, 0, 0},    /* Flat Preset */
+         {3, 0, 0, 2, -1},   /* Folk Preset */
+         {4, 1, 9, 3, 0},    /* Heavy Metal Preset */
+         {5, 3, 0, 1, 3},    /* Hip Hop Preset */
+         {4, 2, -2, 2, 5},   /* Jazz Preset */
+         {-1, 2, 5, 1, -2},  /* Pop Preset */
+         {5, 3, -1, 3, 5}}}; /* Rock Preset */
+
+static const std::vector<Equalizer::Preset> kEqPresets = {
+        {0, "Normal"},      {1, "Classical"}, {2, "Dance"}, {3, "Flat"}, {4, "Folk"},
+        {5, "Heavy Metal"}, {6, "Hip Hop"},   {7, "Jazz"},  {8, "Pop"},  {9, "Rock"}};
+
+
+const std::vector<Range::EqualizerRange> kEqRanges = {
+        MAKE_RANGE(Equalizer, preset, 0, MAX_NUM_PRESETS - 1),
+        MAKE_RANGE(Equalizer, bandLevels,
+                   std::vector<Equalizer::BandLevel>{
+                           Equalizer::BandLevel({.index = 0, .levelMb = -15})},
+                   std::vector<Equalizer::BandLevel>{
+                           Equalizer::BandLevel({.index = MAX_NUM_BANDS - 1, .levelMb = 15})}),
+        /* capability definition */
+        MAKE_RANGE(Equalizer, bandFrequencies, kEqBandFrequency, kEqBandFrequency),
+        MAKE_RANGE(Equalizer, presets, kEqPresets, kEqPresets),
+        /* get only parameters with range min > max */
+        MAKE_RANGE(Equalizer, centerFreqMh, std::vector<int>({1}), std::vector<int>({}))};
+static const Capability kEqCap = {.range = kEqRanges};
+static const std::string kEqualizerEffectName = "EqualizerBundle";
+static const Descriptor kEqualizerDesc = {
+        .common = {.id = {.type = getEffectTypeUuidEqualizer(),
+                          .uuid = getEffectImplUuidEqualizerBundle()},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::FIRST,
+                             .volume = Flags::Volume::CTRL},
+                   .name = kEqualizerEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kEqCap};
+
+static const int mMaxStrengthSupported = 1000;
+static const std::vector<Range::BassBoostRange> kBassBoostRanges = {
+        MAKE_RANGE(BassBoost, strengthPm, 0, mMaxStrengthSupported)};
+static const Capability kBassBoostCap = {.range = kBassBoostRanges};
+static const std::string kBassBoostEffectName = "Dynamic Bass Boost";
+static const Descriptor kBassBoostDesc = {
+        .common = {.id = {.type = getEffectTypeUuidBassBoost(),
+                          .uuid = getEffectImplUuidBassBoostBundle()},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::FIRST,
+                             .volume = Flags::Volume::CTRL,
+                             .deviceIndication = true},
+                   .cpuLoad = BASS_BOOST_CUP_LOAD_ARM9E,
+                   .memoryUsage = BUNDLE_MEM_USAGE,
+                   .name = kBassBoostEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kBassBoostCap};
+
+static const std::vector<Range::VirtualizerRange> kVirtualizerRanges = {
+        MAKE_RANGE(Virtualizer, strengthPm, 0, mMaxStrengthSupported)};
+static const Capability kVirtualizerCap = {.range = kVirtualizerRanges};
+static const std::string kVirtualizerEffectName = "Virtualizer";
+
+static const Descriptor kVirtualizerDesc = {
+        .common = {.id = {.type = getEffectTypeUuidVirtualizer(),
+                          .uuid = getEffectImplUuidVirtualizerBundle()},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::LAST,
+                             .volume = Flags::Volume::CTRL,
+                             .deviceIndication = true},
+                   .cpuLoad = VIRTUALIZER_CUP_LOAD_ARM9E,
+                   .memoryUsage = BUNDLE_MEM_USAGE,
+                   .name = kVirtualizerEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kVirtualizerCap};
+
+static const std::vector<Range::VolumeRange> kVolumeRanges = {
+        MAKE_RANGE(Volume, levelDb, -9600, 0)};
+static const Capability kVolumeCap = {.range = kVolumeRanges};
+static const std::string kVolumeEffectName = "Volume";
+static const Descriptor kVolumeDesc = {
+        .common = {.id = {.type = getEffectTypeUuidVolume(),
+                          .uuid = getEffectImplUuidVolumeBundle()},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::LAST,
+                             .volume = Flags::Volume::CTRL},
+                   .cpuLoad = VOLUME_CUP_LOAD_ARM9E,
+                   .memoryUsage = BUNDLE_MEM_USAGE,
+                   .name = kVolumeEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kVolumeCap};
+
+/* The following tables have been computed using the actual levels measured by the output of
+ * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
+ * the actual energy that 'could' be present in the given band.
+ * If the frequency values in EQNB_5BandPresetsFrequencies change, these values might need to be
+ * updated.
+ */
+constexpr inline std::array<float, MAX_NUM_BANDS> kBandEnergyCoefficient = {7.56, 9.69, 9.59, 7.37,
+                                                                            2.88};
+
+constexpr inline std::array<float, MAX_NUM_BANDS - 1> kBandEnergyCrossCoefficient = {126.0, 115.0,
+                                                                                     125.0, 104.0};
+
+constexpr inline std::array<float, MAX_NUM_BANDS> kBassBoostEnergyCrossCoefficient = {
+        221.21, 208.10, 28.16, 0.0, 0.0};
+
+constexpr inline float kBassBoostEnergyCoefficient = 9.00;
+
+constexpr inline float kVirtualizerContribution = 1.9;
+
+enum class BundleEffectType {
+    BASS_BOOST,
+    VIRTUALIZER,
+    EQUALIZER,
+    VOLUME,
+};
+
+inline std::ostream& operator<<(std::ostream& out, const BundleEffectType& type) {
+    switch (type) {
+        case BundleEffectType::BASS_BOOST:
+            return out << "BASS_BOOST";
+        case BundleEffectType::VIRTUALIZER:
+            return out << "VIRTUALIZER";
+        case BundleEffectType::EQUALIZER:
+            return out << "EQUALIZER";
+        case BundleEffectType::VOLUME:
+            return out << "VOLUME";
+    }
+    return out << "EnumBundleEffectTypeError";
+}
+
+inline std::ostream& operator<<(std::ostream& out, const LVM_ReturnStatus_en& status) {
+    switch (status) {
+        case LVM_SUCCESS:
+            return out << "LVM_SUCCESS";
+        case LVM_ALIGNMENTERROR:
+            return out << "LVM_ALIGNMENTERROR";
+        case LVM_NULLADDRESS:
+            return out << "LVM_NULLADDRESS";
+        case LVM_OUTOFRANGE:
+            return out << "LVM_OUTOFRANGE";
+        case LVM_INVALIDNUMSAMPLES:
+            return out << "LVM_INVALIDNUMSAMPLES";
+        case LVM_WRONGAUDIOTIME:
+            return out << "LVM_WRONGAUDIOTIME";
+        case LVM_ALGORITHMDISABLED:
+            return out << "LVM_ALGORITHMDISABLED";
+        case LVM_ALGORITHMPSA:
+            return out << "LVM_ALGORITHMPSA";
+        case LVM_RETURNSTATUS_DUMMY:
+            return out << "LVM_RETURNSTATUS_DUMMY";
+    }
+    return out << "EnumLvmRetStatusError";
+}
+
+#define GOTO_IF_LVM_ERROR(status, tag, log)                                       \
+    do {                                                                          \
+        LVM_ReturnStatus_en temp = (status);                                      \
+        if (temp != LVM_SUCCESS) {                                                \
+            LOG(ERROR) << __func__ << " return status: " << temp << " " << (log); \
+            goto tag;                                                             \
+        }                                                                         \
+    } while (0)
+
+}  // namespace lvm
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
new file mode 100644
index 0000000..cd9fb60
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -0,0 +1,467 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "BundleTypes.h"
+#define LOG_TAG "EffectBundleAidl"
+#include <Utils.h>
+#include <algorithm>
+#include <unordered_set>
+
+#include <android-base/logging.h>
+#include <fmq/AidlMessageQueue.h>
+#include <audio_effects/effect_bassboost.h>
+#include <audio_effects/effect_equalizer.h>
+#include <audio_effects/effect_virtualizer.h>
+
+#include "EffectBundleAidl.h"
+#include <LVM.h>
+#include <limits.h>
+
+using aidl::android::hardware::audio::effect::getEffectImplUuidBassBoostBundle;
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::EffectBundleAidl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidEqualizerBundle;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVirtualizerBundle;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVolumeBundle;
+using aidl::android::media::audio::common::AudioUuid;
+
+bool isUuidSupported(const AudioUuid* uuid) {
+    return (*uuid == getEffectImplUuidBassBoostBundle() ||
+            *uuid == getEffectImplUuidEqualizerBundle() ||
+            *uuid == getEffectImplUuidVirtualizerBundle() ||
+            *uuid == getEffectImplUuidVolumeBundle());
+}
+
+extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (uuid == nullptr || !isUuidSupported(uuid)) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<EffectBundleAidl>(*uuid);
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || !isUuidSupported(in_impl_uuid)) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (*in_impl_uuid == getEffectImplUuidEqualizerBundle()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kEqualizerDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidBassBoostBundle()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm:: kBassBoostDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidVirtualizerBundle()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kVirtualizerDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidVolumeBundle()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kVolumeDesc;
+    }
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+EffectBundleAidl::EffectBundleAidl(const AudioUuid& uuid) {
+    LOG(DEBUG) << __func__ << uuid.toString();
+    if (uuid == getEffectImplUuidEqualizerBundle()) {
+        mType = lvm::BundleEffectType::EQUALIZER;
+        mDescriptor = &lvm::kEqualizerDesc;
+        mEffectName = &lvm::kEqualizerEffectName;
+    } else if (uuid == getEffectImplUuidBassBoostBundle()) {
+        mType = lvm::BundleEffectType::BASS_BOOST;
+        mDescriptor = &lvm::kBassBoostDesc;
+        mEffectName = &lvm::kBassBoostEffectName;
+    } else if (uuid == getEffectImplUuidVirtualizerBundle()) {
+        mType = lvm::BundleEffectType::VIRTUALIZER;
+        mDescriptor = &lvm::kVirtualizerDesc;
+        mEffectName = &lvm::kVirtualizerEffectName;
+    } else if (uuid == getEffectImplUuidVolumeBundle()) {
+        mType = lvm::BundleEffectType::VOLUME;
+        mDescriptor = &lvm::kVolumeDesc;
+        mEffectName = &lvm::kVolumeEffectName;
+    } else {
+        LOG(ERROR) << __func__ << uuid.toString() << " not supported!";
+    }
+}
+
+EffectBundleAidl::~EffectBundleAidl() {
+    cleanUp();
+    LOG(DEBUG) << __func__;
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << _aidl_return->toString();
+    *_aidl_return = *mDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterCommon(const Parameter& param) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto tag = param.getTag();
+    switch (tag) {
+        case Parameter::common:
+            RETURN_IF(mContext->setCommon(param.get<Parameter::common>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setCommFailed");
+            break;
+        case Parameter::deviceDescription:
+            RETURN_IF(mContext->setOutputDevice(param.get<Parameter::deviceDescription>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
+            break;
+        case Parameter::mode:
+            RETURN_IF(mContext->setAudioMode(param.get<Parameter::mode>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setModeFailed");
+            break;
+        case Parameter::source:
+            RETURN_IF(mContext->setAudioSource(param.get<Parameter::source>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setSourceFailed");
+            break;
+        case Parameter::volumeStereo:
+            RETURN_IF(mContext->setVolumeStereo(param.get<Parameter::volumeStereo>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed");
+            break;
+        default: {
+            LOG(ERROR) << __func__ << " unsupportedParameterTag " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commonParamNotSupported");
+        }
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterSpecific(const Parameter::Specific& specific) {
+    LOG(DEBUG) << __func__ << " specific " << specific.toString();
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto tag = specific.getTag();
+    switch (tag) {
+        case Parameter::Specific::equalizer:
+            return setParameterEqualizer(specific);
+        case Parameter::Specific::bassBoost:
+            return setParameterBassBoost(specific);
+        case Parameter::Specific::virtualizer:
+            return setParameterVirtualizer(specific);
+        case Parameter::Specific::volume:
+            return setParameterVolume(specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "specificParamNotSupported");
+    }
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterEqualizer(const Parameter::Specific& specific) {
+    auto& eq = specific.get<Parameter::Specific::equalizer>();
+    RETURN_IF(!inRange(eq, lvm::kEqRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto eqTag = eq.getTag();
+    switch (eqTag) {
+        case Equalizer::preset:
+            RETURN_IF(mContext->setEqualizerPreset(eq.get<Equalizer::preset>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed");
+            return ndk::ScopedAStatus::ok();
+        case Equalizer::bandLevels:
+            RETURN_IF(mContext->setEqualizerBandLevels(eq.get<Equalizer::bandLevels>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed");
+            return ndk::ScopedAStatus::ok();
+        default:
+            LOG(ERROR) << __func__ << " unsupported parameter " << specific.toString();
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "eqTagNotSupported");
+    }
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterBassBoost(const Parameter::Specific& specific) {
+    auto& bb = specific.get<Parameter::Specific::bassBoost>();
+    RETURN_IF(!inRange(bb, lvm::kBassBoostRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto bbTag = bb.getTag();
+    switch (bbTag) {
+        case BassBoost::strengthPm: {
+            RETURN_IF(mContext->setBassBoostStrength(bb.get<BassBoost::strengthPm>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setStrengthFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        default:
+            LOG(ERROR) << __func__ << " unsupported parameter " << specific.toString();
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "bbTagNotSupported");
+    }
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterVirtualizer(const Parameter::Specific& specific) {
+    auto& vr = specific.get<Parameter::Specific::virtualizer>();
+    RETURN_IF(!inRange(vr, lvm::kVirtualizerRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto vrTag = vr.getTag();
+    switch (vrTag) {
+        case Virtualizer::strengthPm: {
+            RETURN_IF(mContext->setVirtualizerStrength(vr.get<Virtualizer::strengthPm>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setStrengthFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case Virtualizer::device: {
+            RETURN_IF(mContext->setForcedDevice(vr.get<Virtualizer::device>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case Virtualizer::speakerAngles:
+            FALLTHROUGH_INTENDED;
+        case Virtualizer::vendor: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(vrTag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "VirtualizerTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectBundleAidl::setParameterVolume(const Parameter::Specific& specific) {
+    auto& vol = specific.get<Parameter::Specific::volume>();
+    RETURN_IF(!inRange(vol, lvm::kVolumeRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto volTag = vol.getTag();
+    switch (volTag) {
+        case Volume::levelDb: {
+            RETURN_IF(mContext->setVolumeLevel(vol.get<Volume::levelDb>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setLevelFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case Volume::mute:
+            RETURN_IF(mContext->setVolumeMute(vol.get<Volume::mute>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setMuteFailed");
+            return ndk::ScopedAStatus::ok();
+        default:
+            LOG(ERROR) << __func__ << " unsupported parameter " << specific.toString();
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "volTagNotSupported");
+    }
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getParameterSpecific(const Parameter::Id& id,
+                                                          Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+
+    switch (tag) {
+        case Parameter::Id::equalizerTag:
+            return getParameterEqualizer(id.get<Parameter::Id::equalizerTag>(), specific);
+        case Parameter::Id::bassBoostTag:
+            return getParameterBassBoost(id.get<Parameter::Id::bassBoostTag>(), specific);
+        case Parameter::Id::virtualizerTag:
+            return getParameterVirtualizer(id.get<Parameter::Id::virtualizerTag>(), specific);
+        case Parameter::Id::volumeTag:
+            return getParameterVolume(id.get<Parameter::Id::volumeTag>(), specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "wrongIdTag");
+    }
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getParameterEqualizer(const Equalizer::Id& id,
+                                                           Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != Equalizer::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "EqualizerTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    Equalizer eqParam;
+
+    auto tag = id.get<Equalizer::Id::commonTag>();
+    switch (tag) {
+        case Equalizer::bandLevels: {
+            eqParam.set<Equalizer::bandLevels>(mContext->getEqualizerBandLevels());
+            break;
+        }
+        case Equalizer::preset: {
+            eqParam.set<Equalizer::preset>(mContext->getEqualizerPreset());
+            break;
+        }
+        case Equalizer::bandFrequencies: {
+            eqParam.set<Equalizer::bandFrequencies>(lvm::kEqBandFrequency);
+            break;
+        }
+        case Equalizer::presets: {
+            eqParam.set<Equalizer::presets>(lvm::kEqPresets);
+            break;
+        }
+        case Equalizer::centerFreqMh: {
+            eqParam.set<Equalizer::centerFreqMh>(mContext->getEqualizerCenterFreqs());
+            break;
+        }
+        case Equalizer::vendor: {
+            LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "unsupportedTag");
+        }
+    }
+
+    specific->set<Parameter::Specific::equalizer>(eqParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getParameterBassBoost(const BassBoost::Id& id,
+                                                           Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != BassBoost::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "BassBoostTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    BassBoost bbParam;
+
+    auto tag = id.get<BassBoost::Id::commonTag>();
+    switch (tag) {
+        case BassBoost::strengthPm: {
+            bbParam.set<BassBoost::strengthPm>(mContext->getBassBoostStrength());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "BassBoostTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::bassBoost>(bbParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getParameterVolume(const Volume::Id& id,
+                                                        Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != Volume::Id::commonTag, EX_ILLEGAL_ARGUMENT, "VolumeTagNotSupported");
+
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    Volume volParam;
+
+    auto tag = id.get<Volume::Id::commonTag>();
+    switch (tag) {
+        case Volume::levelDb: {
+            volParam.set<Volume::levelDb>(mContext->getVolumeLevel());
+            break;
+        }
+        case Volume::mute: {
+            volParam.set<Volume::mute>(mContext->getVolumeMute());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "VolumeTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::volume>(volParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectBundleAidl::getParameterVirtualizer(const Virtualizer::Id& id,
+                                                             Parameter::Specific* specific) {
+    RETURN_IF((id.getTag() != Virtualizer::Id::commonTag) &&
+                      (id.getTag() != Virtualizer::Id::speakerAnglesPayload),
+              EX_ILLEGAL_ARGUMENT, "VirtualizerTagNotSupported");
+
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    Virtualizer vrParam;
+
+    if (id.getTag() == Virtualizer::Id::speakerAnglesPayload) {
+        auto angles = mContext->getSpeakerAngles(id.get<Virtualizer::Id::speakerAnglesPayload>());
+        Virtualizer param = Virtualizer::make<Virtualizer::speakerAngles>(angles);
+        specific->set<Parameter::Specific::virtualizer>(param);
+        return ndk::ScopedAStatus::ok();
+    }
+
+    auto tag = id.get<Virtualizer::Id::commonTag>();
+    switch (tag) {
+        case Virtualizer::strengthPm: {
+            vrParam.set<Virtualizer::strengthPm>(mContext->getVirtualizerStrength());
+            break;
+        }
+        case Virtualizer::device: {
+            vrParam.set<Virtualizer::device>(mContext->getForcedDevice());
+            break;
+        }
+        case Virtualizer::speakerAngles:
+            FALLTHROUGH_INTENDED;
+        case Virtualizer::vendor: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "VirtualizerTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::virtualizer>(vrParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> EffectBundleAidl::createContext(const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+    } else {
+        // GlobalSession is a singleton
+        mContext = GlobalSession::getGlobalSession().createSession(mType, 1 /* statusFmqDepth */,
+                                                                   common);
+    }
+
+    return mContext;
+}
+
+std::shared_ptr<EffectContext> EffectBundleAidl::getContext() {
+    return mContext;
+}
+
+RetCode EffectBundleAidl::releaseContext() {
+    if (mContext) {
+        GlobalSession::getGlobalSession().releaseSession(mType, mContext->getSessionId());
+        mContext.reset();
+    }
+    return RetCode::SUCCESS;
+}
+
+ndk::ScopedAStatus EffectBundleAidl::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->disable();
+            mContext->resetBuffer();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status EffectBundleAidl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->lvmProcess(in, out, sampleToProcess);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
new file mode 100644
index 0000000..ec1abe8
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <functional>
+#include <map>
+#include <memory>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <android-base/logging.h>
+
+#include "effect-impl/EffectImpl.h"
+
+#include "BundleContext.h"
+#include "BundleTypes.h"
+#include "GlobalSession.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectBundleAidl final : public EffectImpl {
+  public:
+    explicit EffectBundleAidl(const AudioUuid& uuid);
+    ~EffectBundleAidl() override;
+
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+    ndk::ScopedAStatus setParameterCommon(const Parameter& param) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    std::shared_ptr<EffectContext> getContext() override;
+    RetCode releaseContext() override;
+
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+
+    std::string getEffectName() override { return *mEffectName; }
+
+  private:
+    std::shared_ptr<BundleContext> mContext;
+    const Descriptor* mDescriptor;
+    const std::string* mEffectName;
+    lvm::BundleEffectType mType = lvm::BundleEffectType::EQUALIZER;
+
+    IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
+
+    ndk::ScopedAStatus setParameterBassBoost(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterBassBoost(const BassBoost::Id& id,
+                                             Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterEqualizer(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Id& id,
+                                             Parameter::Specific* specific);
+    ndk::ScopedAStatus setParameterVolume(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterVolume(const Volume::Id& id, Parameter::Specific* specific);
+    ndk::ScopedAStatus setParameterVirtualizer(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterVirtualizer(const Virtualizer::Id& id,
+                                               Parameter::Specific* specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
new file mode 100644
index 0000000..d31763b
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <memory>
+#include <unordered_map>
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+
+#include "BundleContext.h"
+#include "BundleTypes.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+/**
+ * @brief Maintain all effect bundle sessions.
+ *
+ * Sessions are identified with the session ID, maximum of MAX_BUNDLE_SESSIONS is supported by the
+ * bundle implementation.
+ */
+class GlobalSession {
+  public:
+    static GlobalSession& getGlobalSession() {
+        static GlobalSession instance;
+        return instance;
+    }
+
+    bool isSessionIdExist(int sessionId) {
+        std::lock_guard lg(mMutex);
+        return mSessionMap.count(sessionId);
+    }
+
+    static bool findBundleTypeInList(std::vector<std::shared_ptr<BundleContext>>& list,
+                                     const lvm::BundleEffectType& type, bool remove = false) {
+        auto itor = std::find_if(list.begin(), list.end(),
+                                  [type](const std::shared_ptr<BundleContext>& bundle) {
+                                      return bundle ? bundle->getBundleType() == type : false;
+                                  });
+        if (itor == list.end()) {
+            return false;
+        }
+        if (remove && *itor) {
+            (*itor)->deInit();
+            list.erase(itor);
+        }
+        return true;
+    }
+
+    /**
+     * Create a certain type of BundleContext in shared_ptr container, each session must not have
+     * more than one session for each type.
+     */
+    std::shared_ptr<BundleContext> createSession(const lvm::BundleEffectType& type, int statusDepth,
+                                                 const Parameter::Common& common) {
+        int sessionId = common.session;
+        LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
+        std::lock_guard lg(mMutex);
+        if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_BUNDLE_SESSIONS) {
+            LOG(ERROR) << __func__ << " exceed max bundle session";
+            return nullptr;
+        }
+
+        if (mSessionMap.count(sessionId)) {
+            if (findBundleTypeInList(mSessionMap[sessionId], type)) {
+                LOG(ERROR) << __func__ << type << " already exist in session " << sessionId;
+                return nullptr;
+            }
+        }
+
+        auto& list = mSessionMap[sessionId];
+        auto context = std::make_shared<BundleContext>(statusDepth, common, type);
+        RETURN_VALUE_IF(!context, nullptr, "failedToCreateContext");
+
+        RetCode ret = context->init();
+        if (RetCode::SUCCESS != ret) {
+            LOG(ERROR) << __func__ << " context init ret " << ret;
+            return nullptr;
+        }
+        list.push_back(context);
+        return context;
+    }
+
+    void releaseSession(const lvm::BundleEffectType& type, int sessionId) {
+        LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
+        std::lock_guard lg(mMutex);
+        if (mSessionMap.count(sessionId)) {
+            auto& list = mSessionMap[sessionId];
+            if (!findBundleTypeInList(list, type, true /* remove */)) {
+                LOG(ERROR) << __func__ << " can't find " << type << "in session " << sessionId;
+                return;
+            }
+            if (list.size() == 0) {
+                mSessionMap.erase(sessionId);
+            }
+        }
+    }
+
+  private:
+    // Lock for mSessionMap access.
+    std::mutex mMutex;
+    // Max session number supported.
+    static constexpr int MAX_BUNDLE_SESSIONS = 32;
+    std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<BundleContext>>>
+            mSessionMap GUARDED_BY(mMutex);
+};
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 1287514..fa300d2 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -100,3 +100,66 @@
         integer_overflow: true,
     },
 }
+
+cc_library_shared {
+    name: "libbundleaidl",
+    srcs: [
+        "Aidl/BundleContext.cpp",
+        "Aidl/EffectBundleAidl.cpp",
+        ":effectCommonFile",
+    ],
+    static_libs: ["libmusicbundle"],
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    local_include_dirs: ["Aidl"],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    cflags: [
+        "-Wthread-safety",
+    ],
+    relative_install_path: "soundfx",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
+    ],
+}
+
+cc_library_shared {
+    name: "libreverbaidl",
+    srcs: [
+        "Reverb/aidl/ReverbContext.cpp",
+        "Reverb/aidl/EffectReverb.cpp",
+        ":effectCommonFile",
+    ],
+    static_libs: ["libreverb"],
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    local_include_dirs: ["Reverb/aidl"],
+    header_libs: [
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+    shared_libs: [
+        "libbase",
+        "libaudioutils",
+        "libcutils",
+        "liblog",
+    ],
+    cflags: [
+        "-Wthread-safety",
+    ],
+    relative_install_path: "soundfx",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
+    ],
+}
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
new file mode 100644
index 0000000..73141b6
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -0,0 +1,399 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ReverbTypes.h"
+#define LOG_TAG "EffectReverb"
+#include <Utils.h>
+#include <algorithm>
+#include <unordered_set>
+
+#include <android-base/logging.h>
+#include <fmq/AidlMessageQueue.h>
+#include <audio_effects/effect_bassboost.h>
+#include <audio_effects/effect_equalizer.h>
+#include <audio_effects/effect_virtualizer.h>
+
+#include "EffectReverb.h"
+#include <limits.h>
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::EffectReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAuxEnvReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAuxPresetReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidInsertEnvReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidInsertPresetReverb;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+
+bool isReverbUuidSupported(const AudioUuid* uuid) {
+    return (*uuid == getEffectImplUuidAuxEnvReverb() ||
+            *uuid == getEffectImplUuidAuxPresetReverb() ||
+            *uuid == getEffectImplUuidInsertEnvReverb() ||
+            *uuid == getEffectImplUuidInsertPresetReverb());
+}
+
+extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (uuid == nullptr || !isReverbUuidSupported(uuid)) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<EffectReverb>(*uuid);
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (*in_impl_uuid == getEffectImplUuidAuxEnvReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxEnvReverbDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidInsertEnvReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertEnvReverbDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidAuxPresetReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxPresetReverbDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidInsertPresetReverb()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertPresetReverbDesc;
+    } else {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+EffectReverb::EffectReverb(const AudioUuid& uuid) {
+    LOG(DEBUG) << __func__ << uuid.toString();
+    if (uuid == getEffectImplUuidAuxEnvReverb()) {
+        mType = lvm::ReverbEffectType::AUX_ENV;
+        mDescriptor = &lvm::kAuxEnvReverbDesc;
+        mEffectName = &lvm::kAuxEnvReverbEffectName;
+    } else if (uuid == getEffectImplUuidInsertEnvReverb()) {
+        mType = lvm::ReverbEffectType::INSERT_ENV;
+        mDescriptor = &lvm::kInsertEnvReverbDesc;
+        mEffectName = &lvm::kInsertEnvReverbEffectName;
+    } else if (uuid == getEffectImplUuidAuxPresetReverb()) {
+        mType = lvm::ReverbEffectType::AUX_PRESET;
+        mDescriptor = &lvm::kAuxPresetReverbDesc;
+        mEffectName = &lvm::kAuxPresetReverbEffectName;
+    } else if (uuid == getEffectImplUuidInsertPresetReverb()) {
+        mType = lvm::ReverbEffectType::INSERT_PRESET;
+        mDescriptor = &lvm::kInsertPresetReverbDesc;
+        mEffectName = &lvm::kInsertPresetReverbEffectName;
+    } else {
+        LOG(ERROR) << __func__ << uuid.toString() << " not supported!";
+    }
+}
+
+EffectReverb::~EffectReverb() {
+    cleanUp();
+    LOG(DEBUG) << __func__;
+}
+
+ndk::ScopedAStatus EffectReverb::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << _aidl_return->toString();
+    *_aidl_return = *mDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectReverb::setParameterSpecific(const Parameter::Specific& specific) {
+    LOG(DEBUG) << __func__ << " specific " << specific.toString();
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto tag = specific.getTag();
+    switch (tag) {
+        case Parameter::Specific::presetReverb:
+            return setParameterPresetReverb(specific);
+        case Parameter::Specific::environmentalReverb:
+            return setParameterEnvironmentalReverb(specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "specificParamNotSupported");
+    }
+}
+
+ndk::ScopedAStatus EffectReverb::setParameterPresetReverb(const Parameter::Specific& specific) {
+    auto& prParam = specific.get<Parameter::Specific::presetReverb>();
+    RETURN_IF(!inRange(prParam, lvm::kPresetReverbRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = prParam.getTag();
+
+    switch (tag) {
+        case PresetReverb::preset: {
+            RETURN_IF(mContext->setPresetReverbPreset(prParam.get<PresetReverb::preset>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setPresetFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "PresetReverbTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectReverb::setParameterEnvironmentalReverb(
+        const Parameter::Specific& specific) {
+    auto& erParam = specific.get<Parameter::Specific::environmentalReverb>();
+    RETURN_IF(!inRange(erParam, lvm::kEnvReverbRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = erParam.getTag();
+
+    switch (tag) {
+        case EnvironmentalReverb::roomLevelMb: {
+            RETURN_IF(mContext->setEnvironmentalReverbRoomLevel(
+                              erParam.get<EnvironmentalReverb::roomLevelMb>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setRoomLevelFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::roomHfLevelMb: {
+            RETURN_IF(
+                    mContext->setEnvironmentalReverbRoomHfLevel(
+                            erParam.get<EnvironmentalReverb::roomHfLevelMb>()) != RetCode::SUCCESS,
+                    EX_ILLEGAL_ARGUMENT, "setRoomHfLevelFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::decayTimeMs: {
+            RETURN_IF(mContext->setEnvironmentalReverbDecayTime(
+                              erParam.get<EnvironmentalReverb::decayTimeMs>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDecayTimeFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::decayHfRatioPm: {
+            RETURN_IF(
+                    mContext->setEnvironmentalReverbDecayHfRatio(
+                            erParam.get<EnvironmentalReverb::decayHfRatioPm>()) != RetCode::SUCCESS,
+                    EX_ILLEGAL_ARGUMENT, "setDecayHfRatioFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::reflectionsLevelMb: {
+            RETURN_IF(mContext->setReflectionsLevel(
+                              erParam.get<EnvironmentalReverb::reflectionsLevelMb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setReflectionsLevelFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::reflectionsDelayMs: {
+            RETURN_IF(mContext->setReflectionsDelay(
+                              erParam.get<EnvironmentalReverb::reflectionsDelayMs>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setReflectionsDelayFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::levelMb: {
+            RETURN_IF(mContext->setEnvironmentalReverbLevel(
+                              erParam.get<EnvironmentalReverb::levelMb>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setLevelFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::delayMs: {
+            RETURN_IF(mContext->setEnvironmentalReverbDelay(
+                              erParam.get<EnvironmentalReverb::delayMs>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDelayFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::diffusionPm: {
+            RETURN_IF(mContext->setEnvironmentalReverbDiffusion(
+                              erParam.get<EnvironmentalReverb::diffusionPm>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDiffusionFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::densityPm: {
+            RETURN_IF(mContext->setEnvironmentalReverbDensity(
+                              erParam.get<EnvironmentalReverb::densityPm>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setDensityFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case EnvironmentalReverb::bypass: {
+            RETURN_IF(mContext->setEnvironmentalReverbBypass(
+                              erParam.get<EnvironmentalReverb::bypass>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setBypassFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectReverb::getParameterSpecific(const Parameter::Id& id,
+                                                      Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+
+    switch (tag) {
+        case Parameter::Id::presetReverbTag:
+            return getParameterPresetReverb(id.get<Parameter::Id::presetReverbTag>(), specific);
+        case Parameter::Id::environmentalReverbTag:
+            return getParameterEnvironmentalReverb(id.get<Parameter::Id::environmentalReverbTag>(),
+                                                   specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "wrongIdTag");
+    }
+}
+
+ndk::ScopedAStatus EffectReverb::getParameterPresetReverb(const PresetReverb::Id& id,
+                                                          Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != PresetReverb::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "PresetReverbTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    PresetReverb prParam;
+    auto tag = id.get<PresetReverb::Id::commonTag>();
+    switch (tag) {
+        case PresetReverb::preset: {
+            prParam.set<PresetReverb::preset>(mContext->getPresetReverbPreset());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "PresetReverbTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::presetReverb>(prParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectReverb::getParameterEnvironmentalReverb(const EnvironmentalReverb::Id& id,
+                                                                 Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != EnvironmentalReverb::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "EnvironmentalReverbTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    EnvironmentalReverb erParam;
+
+    auto tag = id.get<EnvironmentalReverb::Id::commonTag>();
+    switch (tag) {
+        case EnvironmentalReverb::roomLevelMb: {
+            erParam.set<EnvironmentalReverb::roomLevelMb>(
+                    mContext->getEnvironmentalReverbRoomLevel());
+            break;
+        }
+        case EnvironmentalReverb::roomHfLevelMb: {
+            erParam.set<EnvironmentalReverb::roomHfLevelMb>(
+                    mContext->getEnvironmentalReverbRoomHfLevel());
+            break;
+        }
+        case EnvironmentalReverb::decayTimeMs: {
+            erParam.set<EnvironmentalReverb::decayTimeMs>(
+                    mContext->getEnvironmentalReverbDecayTime());
+            break;
+        }
+        case EnvironmentalReverb::decayHfRatioPm: {
+            erParam.set<EnvironmentalReverb::decayHfRatioPm>(
+                    mContext->getEnvironmentalReverbDecayHfRatio());
+            break;
+        }
+        case EnvironmentalReverb::reflectionsLevelMb: {
+            erParam.set<EnvironmentalReverb::reflectionsLevelMb>(mContext->getReflectionsLevel());
+            break;
+        }
+        case EnvironmentalReverb::reflectionsDelayMs: {
+            erParam.set<EnvironmentalReverb::reflectionsDelayMs>(mContext->getReflectionsDelay());
+            break;
+        }
+        case EnvironmentalReverb::levelMb: {
+            erParam.set<EnvironmentalReverb::levelMb>(mContext->getEnvironmentalReverbLevel());
+            break;
+        }
+        case EnvironmentalReverb::delayMs: {
+            erParam.set<EnvironmentalReverb::delayMs>(mContext->getEnvironmentalReverbDelay());
+            break;
+        }
+        case EnvironmentalReverb::diffusionPm: {
+            erParam.set<EnvironmentalReverb::diffusionPm>(
+                    mContext->getEnvironmentalReverbDiffusion());
+            break;
+        }
+        case EnvironmentalReverb::densityPm: {
+            erParam.set<EnvironmentalReverb::densityPm>(mContext->getEnvironmentalReverbDensity());
+            break;
+        }
+        case EnvironmentalReverb::bypass: {
+            erParam.set<EnvironmentalReverb::bypass>(mContext->getEnvironmentalReverbBypass());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::environmentalReverb>(erParam);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> EffectReverb::createContext(const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+    } else {
+        mContext = std::make_shared<ReverbContext>(1 /* statusFmqDepth */, common, mType);
+    }
+
+    return mContext;
+}
+
+std::shared_ptr<EffectContext> EffectReverb::getContext() {
+    return mContext;
+}
+
+RetCode EffectReverb::releaseContext() {
+    if (mContext) {
+        mContext.reset();
+    }
+    return RetCode::SUCCESS;
+}
+
+ndk::ScopedAStatus EffectReverb::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->disable();
+            mContext->resetBuffer();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status EffectReverb::effectProcessImpl(float* in, float* out, int sampleToProcess) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->lvmProcess(in, out, sampleToProcess);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
new file mode 100644
index 0000000..d7d2bbd
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "effect-impl/EffectImpl.h"
+#include "ReverbContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectReverb final : public EffectImpl {
+  public:
+    explicit EffectReverb(const AudioUuid& uuid);
+    ~EffectReverb() override;
+
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    std::shared_ptr<EffectContext> getContext() override;
+    RetCode releaseContext() override;
+
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+
+    std::string getEffectName() override { return *mEffectName; }
+
+  private:
+    std::shared_ptr<ReverbContext> mContext;
+    const Descriptor* mDescriptor;
+    const std::string* mEffectName;
+    lvm::ReverbEffectType mType;
+
+    IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
+
+    ndk::ScopedAStatus setParameterPresetReverb(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterPresetReverb(const PresetReverb::Id& id,
+                                                Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterEnvironmentalReverb(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterEnvironmentalReverb(const EnvironmentalReverb::Id& id,
+                                                       Parameter::Specific* specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
new file mode 100644
index 0000000..79e67f2
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -0,0 +1,521 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+
+#define LOG_TAG "ReverbContext"
+#include <android-base/logging.h>
+#include <Utils.h>
+
+#include "ReverbContext.h"
+#include "VectorArithmetic.h"
+#include "math.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+using aidl::android::media::audio::common::AudioDeviceDescription;
+using aidl::android::media::audio::common::AudioDeviceType;
+
+#define GOTO_IF_LVREV_ERROR(status, tag, log)                                     \
+    do {                                                                          \
+        LVREV_ReturnStatus_en temp = (status);                                    \
+        if (temp != LVREV_SUCCESS) {                                              \
+            LOG(ERROR) << __func__ << " return status: " << temp << " " << (log); \
+            goto tag;                                                             \
+        }                                                                         \
+    } while (0)
+
+RetCode ReverbContext::init() {
+    if (isPreset()) {
+        // force reloading preset at first call to process()
+        mPreset = PresetReverb::Presets::NONE;
+        mNextPreset = PresetReverb::Presets::NONE;
+    }
+
+    mVolume.left = kUnitVolume;
+    mVolume.right = kUnitVolume;
+    mPrevVolume.left = kUnitVolume;
+    mPrevVolume.right = kUnitVolume;
+    volumeMode = VOLUME_FLAT;
+
+    mSamplesToExitCount = kDefaultDecayTime * mCommon.input.base.sampleRate / 1000;
+
+    /* Saved strength is used to return the exact strength that was used in the set to the get
+     * because we map the original strength range of 0:1000 to 1:15, and this will avoid
+     * quantisation like effect when returning
+     */
+    mRoomLevel = lvm::kMinLevel;
+    mRoomHfLevel = 0;
+    mEnabled = LVM_FALSE;
+    mDecayTime = kDefaultDecayTime;
+    mDecayHfRatio = kDefaultDamping * 20;
+    mDensity = kDefaultRoomSize * 10;
+    mDiffusion = kDefaultDensity * 10;
+    mLevel = lvm::kMinLevel;
+
+    // allocate lvm reverb instance
+    LVREV_ReturnStatus_en status = LVREV_SUCCESS;
+    {
+        std::lock_guard lg(mMutex);
+        LVREV_InstanceParams_st params = {
+                .MaxBlockSize = lvm::kMaxCallSize,
+                // Max format, could be mono during process
+                .SourceFormat = LVM_STEREO,
+                .NumDelays = LVREV_DELAYLINES_4,
+        };
+        /* Init sets the instance handle */
+        status = LVREV_GetInstanceHandle(&mInstance, &params);
+        GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
+
+        // set control
+        LVREV_ControlParams_st controlParams;
+        initControlParameter(controlParams);
+        status = LVREV_SetControlParameters(mInstance, &controlParams);
+        GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
+    }
+
+    return RetCode::SUCCESS;
+
+deinit:
+    deInit();
+    return RetCode::ERROR_EFFECT_LIB_ERROR;
+}
+
+void ReverbContext::deInit() {
+    std::lock_guard lg(mMutex);
+    if (mInstance) {
+        LVREV_FreeInstance(mInstance);
+        mInstance = nullptr;
+    }
+}
+
+RetCode ReverbContext::enable() {
+    if (mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
+    mEnabled = true;
+    mSamplesToExitCount = (mDecayTime * mCommon.input.base.sampleRate) / 1000;
+    // force no volume ramp for first buffer processed after enabling the effect
+    volumeMode = VOLUME_FLAT;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::disable() {
+    if (!mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
+    mEnabled = false;
+    return RetCode::SUCCESS;
+}
+
+bool ReverbContext::isAuxiliary() {
+    return (mType == lvm::ReverbEffectType::AUX_ENV || mType == lvm::ReverbEffectType::AUX_PRESET);
+}
+
+bool ReverbContext::isPreset() {
+    return (mType == lvm::ReverbEffectType::AUX_PRESET ||
+            mType == lvm::ReverbEffectType::INSERT_PRESET);
+}
+
+RetCode ReverbContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
+    if (volumeMode == VOLUME_OFF) {
+        // force no volume ramp for first buffer processed after getting volume control
+        volumeMode = VOLUME_FLAT;
+    }
+    mVolumeStereo = volume;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setPresetReverbPreset(const PresetReverb::Presets& preset) {
+    mNextPreset = preset;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbRoomLevel(int roomLevel) {
+    // Update Control Parameter
+    LVREV_ControlParams_st params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        // Sum of room and reverb level controls
+        // needs to subtract max levels for both room level and reverb level
+        int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
+        params.Level = convertLevel(combinedLevel);
+
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mRoomLevel = roomLevel;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbRoomHfLevel(int roomHfLevel) {
+    // Update Control Parameter
+    LVREV_ControlParams_st params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        params.LPF = convertHfLevel(roomHfLevel);
+
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mRoomHfLevel = roomHfLevel;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDecayTime(int decayTime) {
+    int time = decayTime;
+    if (time > lvm::kMaxT60) {
+        time = lvm::kMaxT60;
+    }
+
+    // Update Control Parameter
+    LVREV_ControlParams_st params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        params.T60 = (LVM_UINT16)time;
+        mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
+
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mDecayTime = time;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDecayHfRatio(int decayHfRatio) {
+    // Update Control Parameter
+    LVREV_ControlParams_st params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        params.Damping = (LVM_INT16)(decayHfRatio / 20);
+
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mDecayHfRatio = decayHfRatio;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbLevel(int level) {
+    // Update Control Parameter
+    LVREV_ControlParams_st params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        // Sum of room and reverb level controls
+        // needs to subtract max levels for both room level and level
+        int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
+        params.Level = convertLevel(combinedLevel);
+
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mLevel = level;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDelay(int delay) {
+    mDelay = delay;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDiffusion(int diffusion) {
+    // Update Control Parameter
+    LVREV_ControlParams_st params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        params.Density = (LVM_INT16)(diffusion / 10);
+
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mDiffusion = diffusion;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDensity(int density) {
+    // Update Control Parameter
+    LVREV_ControlParams_st params;
+    {
+        std::lock_guard lg(mMutex);
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+        params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
+
+        RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, &params),
+                        RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+    }
+    mDensity = density;
+    return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbBypass(bool bypass) {
+    mBypass = bypass;
+    return RetCode::SUCCESS;
+}
+
+void ReverbContext::loadPreset() {
+    // TODO: add delay when early reflections are implemented
+    mPreset = mNextPreset;
+
+    if (mPreset != PresetReverb::Presets::NONE) {
+        const t_reverb_settings preset = mReverbPresets[mPreset];
+        setEnvironmentalReverbRoomLevel(preset.roomLevel);
+        setEnvironmentalReverbRoomHfLevel(preset.roomHFLevel);
+        setEnvironmentalReverbDecayTime(preset.decayTime);
+        setEnvironmentalReverbDecayHfRatio(preset.decayHFRatio);
+        setEnvironmentalReverbLevel(preset.reverbLevel);
+        // reverbDelay
+        setEnvironmentalReverbDiffusion(preset.diffusion);
+        setEnvironmentalReverbDensity(preset.density);
+    }
+}
+
+void ReverbContext::initControlParameter(LVREV_ControlParams_st& params) {
+    /* Set the initial process parameters */
+    /* General parameters */
+    params.OperatingMode = LVM_MODE_ON;
+    params.SampleRate = LVM_FS_44100;
+    params.SourceFormat = (::aidl::android::hardware::audio::common::getChannelCount(
+                                   mCommon.input.base.channelMask) == 1
+                                   ? LVM_MONO
+                                   : LVM_STEREO);
+
+    if (!isAuxiliary() && params.SourceFormat == LVM_MONO) {
+        params.SourceFormat = LVM_STEREO;
+    }
+
+    /* Reverb parameters */
+    params.Level = kDefaultLevel;
+    params.LPF = kDefaultLPF;
+    params.HPF = kDefaultHPF;
+    params.T60 = kDefaultDecayTime;
+    params.Density = kDefaultDensity;
+    params.Damping = kDefaultDamping;
+    params.RoomSize = kDefaultRoomSize;
+}
+
+/*
+ * Convert level from OpenSL ES format to LVM format
+ *
+ *  @param level : level to be applied
+ */
+
+int ReverbContext::convertLevel(int level) {
+    for (int i = 0; i < kLevelMapping.size(); i++) {
+        if (level <= kLevelMapping[i]) {
+            return i;
+        }
+    }
+    return kDefaultLevel;
+}
+
+/*
+ * Convert level HF from OpenSL ES format to LVM format
+ *
+ * @param hfLevel : level to be applied
+ */
+
+int16_t ReverbContext::convertHfLevel(int hfLevel) {
+    for (auto lpfPair : kLPFMapping) {
+        if (hfLevel <= lpfPair.roomHf) {
+            return lpfPair.lpf;
+        }
+    }
+    return kDefaultLPF;
+}
+
+IEffect::Status ReverbContext::lvmProcess(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!in, status, "nullInput");
+    RETURN_VALUE_IF(!out, status, "nullOutput");
+    status = {EX_ILLEGAL_STATE, 0, 0};
+    int64_t inputFrameCount = getCommon().input.frameCount;
+    int64_t outputFrameCount = getCommon().output.frameCount;
+    RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
+    RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
+
+    LOG(DEBUG) << __func__ << " start processing";
+    std::lock_guard lg(mMutex);
+
+    int channels = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.input.base.channelMask);
+    int outChannels = ::aidl::android::hardware::audio::common::getChannelCount(
+            mCommon.output.base.channelMask);
+    int frameCount = mCommon.input.frameCount;
+
+    // Reverb only effects the stereo channels in multichannel source.
+    if (channels < 1 || channels > LVM_MAX_CHANNELS) {
+        LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
+        return status;
+    }
+
+    std::vector<float> inFrames(samples);
+    std::vector<float> outFrames(frameCount * FCC_2);
+
+    if (isPreset() && mNextPreset != mPreset) {
+        loadPreset();
+    }
+
+    if (isAuxiliary()) {
+        inFrames.assign(in, in + samples);
+    } else {
+        // mono input is duplicated
+        if (channels >= FCC_2) {
+            for (int i = 0; i < frameCount; i++) {
+                inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
+                inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+            }
+        } else {
+            for (int i = 0; i < frameCount; i++) {
+                inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+            }
+        }
+    }
+
+    if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
+        std::fill(outFrames.begin(), outFrames.end(), 0);  // always stereo here
+    } else {
+        if (!mEnabled && mSamplesToExitCount > 0) {
+            std::fill(outFrames.begin(), outFrames.end(), 0);
+            LOG(VERBOSE) << "Zeroing " << channels << " samples per frame at the end of call ";
+        }
+
+        /* Process the samples, producing a stereo output */
+        LVREV_ReturnStatus_en lvrevStatus =
+                LVREV_Process(mInstance,        /* Instance handle */
+                              inFrames.data(),  /* Input buffer */
+                              outFrames.data(), /* Output buffer */
+                              frameCount);      /* Number of samples to read */
+        if (lvrevStatus != LVREV_SUCCESS) {
+            LOG(ERROR) << __func__ << lvrevStatus;
+            return {EX_UNSUPPORTED_OPERATION, 0, 0};
+        }
+    }
+    // Convert to 16 bits
+    if (isAuxiliary()) {
+        // nothing to do here
+    } else {
+        if (channels >= FCC_2) {
+            for (int i = 0; i < frameCount; i++) {
+                // Mix with dry input
+                outFrames[FCC_2 * i] += in[channels * i];
+                outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+            }
+        } else {
+            for (int i = 0; i < frameCount; i++) {
+                // Mix with dry input
+                outFrames[FCC_2 * i] += in[i];
+                outFrames[FCC_2 * i + 1] += in[i];
+            }
+        }
+
+        // apply volume with ramp if needed
+        if (mVolume != mPrevVolume && volumeMode == VOLUME_RAMP) {
+            float vl = mPrevVolume.left;
+            float incl = (mVolume.left - vl) / frameCount;
+            float vr = mPrevVolume.right;
+            float incr = (mVolume.right - vr) / frameCount;
+
+            for (int i = 0; i < frameCount; i++) {
+                outFrames[FCC_2 * i] *= vl;
+                outFrames[FCC_2 * i + 1] *= vr;
+
+                vl += incl;
+                vr += incr;
+            }
+            mPrevVolume = mVolume;
+        } else if (volumeMode != VOLUME_OFF) {
+            if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
+                for (int i = 0; i < frameCount; i++) {
+                    outFrames[FCC_2 * i] *= mVolume.left;
+                    outFrames[FCC_2 * i + 1] *= mVolume.right;
+                }
+            }
+            mPrevVolume = mVolume;
+            volumeMode = VOLUME_RAMP;
+        }
+    }
+
+    bool accumulate = false;
+    if (outChannels > 2) {
+        // Accumulate if required
+        if (accumulate) {
+            for (int i = 0; i < frameCount; i++) {
+                out[outChannels * i] += outFrames[FCC_2 * i];
+                out[outChannels * i + 1] += outFrames[FCC_2 * i + 1];
+            }
+        } else {
+            for (int i = 0; i < frameCount; i++) {
+                out[outChannels * i] = outFrames[FCC_2 * i];
+                out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
+            }
+        }
+        if (!isAuxiliary()) {
+            for (int i = 0; i < frameCount; i++) {
+                // channels and outChannels are expected to be same.
+                for (int j = FCC_2; j < outChannels; j++) {
+                    out[outChannels * i + j] = in[outChannels * i + j];
+                }
+            }
+        }
+    } else {
+        if (accumulate) {
+            if (outChannels == FCC_1) {
+                for (int i = 0; i < frameCount; i++) {
+                    out[i] += ((outFrames[i * FCC_2] + outFrames[i * FCC_2 + 1]) * 0.5f);
+                }
+            } else {
+                for (int i = 0; i < frameCount * FCC_2; i++) {
+                    out[i] += outFrames[i];
+                }
+            }
+        } else {
+            if (outChannels == FCC_1) {
+                From2iToMono_Float(outFrames.data(), out, frameCount);
+            } else {
+                for (int i = 0; i < frameCount * FCC_2; i++) {
+                    out[i] = outFrames[i];
+                }
+            }
+        }
+    }
+
+    LOG(DEBUG) << __func__ << " done processing";
+
+    if (!mEnabled && mSamplesToExitCount > 0) {
+        // signed - unsigned will trigger integer overflow if result becomes negative.
+        mSamplesToExitCount -= samples;
+    }
+
+    return {STATUS_OK, samples, outChannels * frameCount};
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
new file mode 100644
index 0000000..9bb0b1a
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+#include <unordered_map>
+
+#include "ReverbTypes.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+enum VolumeMode {
+    VOLUME_OFF,
+    VOLUME_FLAT,
+    VOLUME_RAMP,
+};
+
+struct LPFPair {
+    int roomHf;
+    int lpf;
+};
+
+class ReverbContext final : public EffectContext {
+  public:
+    ReverbContext(int statusDepth, const Parameter::Common& common,
+                  const lvm::ReverbEffectType& type)
+        : EffectContext(statusDepth, common), mType(type) {
+        LOG(DEBUG) << __func__ << type;
+        init();
+    }
+    ~ReverbContext() override {
+        LOG(DEBUG) << __func__;
+        deInit();
+    }
+
+    RetCode init();
+    void deInit();
+
+    RetCode enable();
+    RetCode disable();
+
+    bool isAuxiliary();
+    bool isPreset();
+
+    RetCode setPresetReverbPreset(const PresetReverb::Presets& preset);
+    PresetReverb::Presets getPresetReverbPreset() const { return mNextPreset; }
+
+    RetCode setEnvironmentalReverbRoomLevel(int roomLevel);
+    int getEnvironmentalReverbRoomLevel() const { return mRoomLevel; }
+    RetCode setEnvironmentalReverbRoomHfLevel(int roomHfLevel);
+    int getEnvironmentalReverbRoomHfLevel() const { return mRoomHfLevel; }
+    RetCode setEnvironmentalReverbDecayTime(int decayTime);
+    int getEnvironmentalReverbDecayTime() const { return mDecayTime; }
+    RetCode setEnvironmentalReverbDecayHfRatio(int decayHfRatio);
+    int getEnvironmentalReverbDecayHfRatio() const { return mDecayHfRatio; }
+    RetCode setEnvironmentalReverbLevel(int level);
+    int getEnvironmentalReverbLevel() const { return mLevel; }
+    RetCode setEnvironmentalReverbDelay(int delay);
+    int getEnvironmentalReverbDelay() const { return mDelay; }
+    RetCode setEnvironmentalReverbDiffusion(int diffusion);
+    int getEnvironmentalReverbDiffusion() const { return mDiffusion; }
+    RetCode setEnvironmentalReverbDensity(int density);
+    int getEnvironmentalReverbDensity() const { return mDensity; }
+    RetCode setEnvironmentalReverbBypass(bool bypass);
+    bool getEnvironmentalReverbBypass() const { return mBypass; }
+
+    RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
+    Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
+
+    RetCode setReflectionsDelay(int delay) {
+        mReflectionsDelayMs = delay;
+        return RetCode::SUCCESS;
+    }
+    bool getReflectionsDelay() const { return mReflectionsDelayMs; }
+
+    RetCode setReflectionsLevel(int level) {
+        mReflectionsLevelMb = level;
+        return RetCode::SUCCESS;
+    }
+    bool getReflectionsLevel() const { return mReflectionsLevelMb; }
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+  private:
+    static constexpr inline float kUnitVolume = 1;
+    static constexpr inline float kSendLevel = 0.75f;
+    static constexpr inline int kDefaultLevel = 0;
+    static constexpr inline int kDefaultLPF = 23999;      /* Default low pass filter, in Hz */
+    static constexpr inline int kDefaultHPF = 50;         /* Default high pass filter, in Hz */
+    static constexpr inline int kDefaultDecayTime = 1490; /* Default Decay time, in ms */
+    static constexpr inline int kDefaultDensity = 100;    /* Default Echo density */
+    static constexpr inline int kDefaultDamping = 21;
+    static constexpr inline int kDefaultRoomSize = 100;
+
+    static inline const std::vector<LPFPair> kLPFMapping = {
+            // Limit range to 50 for LVREV parameter range
+            {-10000, 50}, {-5000, 50},  {-4000, 50},  {-3000, 158}, {-2000, 502}, {-1000, 1666},
+            {-900, 1897}, {-800, 2169}, {-700, 2496}, {-600, 2895}, {-500, 3400}, {-400, 4066},
+            {-300, 5011}, {-200, 6537}, {-100, 9826}, {-99, 9881},  {-98, 9937},  {-97, 9994},
+            {-96, 10052}, {-95, 10111}, {-94, 10171}, {-93, 10231}, {-92, 10293}, {-91, 10356},
+            {-90, 10419}, {-89, 10484}, {-88, 10549}, {-87, 10616}, {-86, 10684}, {-85, 10753},
+            {-84, 10823}, {-83, 10895}, {-82, 10968}, {-81, 11042}, {-80, 11117}, {-79, 11194},
+            {-78, 11272}, {-77, 11352}, {-76, 11433}, {-75, 11516}, {-74, 11600}, {-73, 11686},
+            {-72, 11774}, {-71, 11864}, {-70, 11955}, {-69, 12049}, {-68, 12144}, {-67, 12242},
+            {-66, 12341}, {-65, 12443}, {-64, 12548}, {-63, 12654}, {-62, 12763}, {-61, 12875},
+            {-60, 12990}, {-59, 13107}, {-58, 13227}, {-57, 13351}, {-56, 13477}, {-55, 13607},
+            {-54, 13741}, {-53, 13878}, {-52, 14019}, {-51, 14164}, {-50, 14313}, {-49, 14467},
+            {-48, 14626}, {-47, 14789}, {-46, 14958}, {-45, 15132}, {-44, 15312}, {-43, 15498},
+            {-42, 15691}, {-41, 15890}, {-40, 16097}, {-39, 16311}, {-38, 16534}, {-37, 16766},
+            {-36, 17007}, {-35, 17259}, {-34, 17521}, {-33, 17795}, {-32, 18081}, {-31, 18381},
+            {-30, 18696}, {-29, 19027}, {-28, 19375}, {-27, 19742}, {-26, 20129}, {-25, 20540},
+            {-24, 20976}, {-23, 21439}, {-22, 21934}, {-21, 22463}, {-20, 23031}, {-19, 23643},
+            {-18, 23999}};
+
+    static inline const std::vector<int> kLevelMapping = {
+            -12000, -4000, -3398, -3046, -2796, -2603, -2444, -2310, -2194, -2092, -2000, -1918,
+            -1842,  -1773, -1708, -1648, -1592, -1540, -1490, -1443, -1398, -1356, -1316, -1277,
+            -1240,  -1205, -1171, -1138, -1106, -1076, -1046, -1018, -990,  -963,  -938,  -912,
+            -888,   -864,  -841,  -818,  -796,  -775,  -754,  -734,  -714,  -694,  -675,  -656,
+            -638,   -620,  -603,  -585,  -568,  -552,  -536,  -520,  -504,  -489,  -474,  -459,
+            -444,   -430,  -416,  -402,  -388,  -375,  -361,  -348,  -335,  -323,  -310,  -298,
+            -286,   -274,  -262,  -250,  -239,  -228,  -216,  -205,  -194,  -184,  -173,  -162,
+            -152,   -142,  -132,  -121,  -112,  -102,  -92,   -82,   -73,   -64,   -54,   -45,
+            -36,    -27,   -18,   -9,    0};
+
+    static inline std::unordered_map<PresetReverb::Presets, t_reverb_settings> mReverbPresets = {
+            {PresetReverb::Presets::NONE, {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}},
+            {PresetReverb::Presets::SMALLROOM,
+             {-400, -600, 1100, 830, -400, 5, 500, 10, 1000, 1000}},
+            {PresetReverb::Presets::MEDIUMROOM,
+             {-400, -600, 1300, 830, -1000, 20, -200, 20, 1000, 1000}},
+            {PresetReverb::Presets::LARGEROOM,
+             {-400, -600, 1500, 830, -1600, 5, -1000, 40, 1000, 1000}},
+            {PresetReverb::Presets::MEDIUMHALL,
+             {-400, -600, 1800, 700, -1300, 15, -800, 30, 1000, 1000}},
+            {PresetReverb::Presets::LARGEHALL,
+             {-400, -600, 1800, 700, -2000, 30, -1400, 60, 1000, 1000}},
+            {PresetReverb::Presets::PLATE, {-400, -200, 1300, 900, 0, 2, 0, 10, 1000, 750}}};
+
+    std::mutex mMutex;
+    const lvm::ReverbEffectType mType;
+    bool mEnabled = false;
+    LVREV_Handle_t mInstance GUARDED_BY(mMutex);
+
+    int mRoomLevel = 0;
+    int mRoomHfLevel = 0;
+    int mDecayTime = 0;
+    int mDecayHfRatio = 0;
+    int mLevel = 0;
+    int mDelay = 0;
+    int mDiffusion = 0;
+    int mDensity = 0;
+    bool mBypass = 0;
+    int mReflectionsLevelMb = 0;
+    int mReflectionsDelayMs = 0;
+
+    PresetReverb::Presets mPreset;
+    PresetReverb::Presets mNextPreset;
+
+    int mSamplesToExitCount;
+
+    Parameter::VolumeStereo mVolume;
+    Parameter::VolumeStereo mPrevVolume;
+    VolumeMode volumeMode;
+
+    void initControlParameter(LVREV_ControlParams_st& params);
+    int16_t convertHfLevel(int hfLevel);
+    int convertLevel(int level);
+    void loadPreset();
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
new file mode 100644
index 0000000..37f9287
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <android/binder_enums.h>
+#include <audio_effects/effect_environmentalreverb.h>
+#include <audio_effects/effect_presetreverb.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "effect-impl/EffectTypes.h"
+// from Reverb/lib
+#include "LVREV.h"
+
+namespace aidl::android::hardware::audio::effect {
+namespace lvm {
+
+constexpr inline int kMaxCallSize = 256;
+constexpr inline int kMinLevel = -6000;
+constexpr inline int kMaxT60 = 7000; /* Maximum decay time */
+constexpr inline int kMaxReverbLevel = 2000;
+constexpr inline int kMaxFrameSize = 2560;
+constexpr inline int kCpuLoadARM9E = 470;                      // Expressed in 0.1 MIPS
+constexpr inline int kMemUsage = (71 + (kMaxFrameSize >> 7));  // Expressed in kB
+
+static const std::vector<Range::EnvironmentalReverbRange> kEnvReverbRanges = {
+        MAKE_RANGE(EnvironmentalReverb, roomLevelMb, lvm::kMinLevel, 0),
+        MAKE_RANGE(EnvironmentalReverb, roomHfLevelMb, -4000, 0),
+        MAKE_RANGE(EnvironmentalReverb, decayTimeMs, 0, lvm::kMaxT60),
+        MAKE_RANGE(EnvironmentalReverb, decayHfRatioPm, 100, 2000),
+        MAKE_RANGE(EnvironmentalReverb, levelMb, lvm::kMinLevel, 0),
+        MAKE_RANGE(EnvironmentalReverb, delayMs, 0, 65),
+        MAKE_RANGE(EnvironmentalReverb, diffusionPm, 0, 1000),
+        MAKE_RANGE(EnvironmentalReverb, densityPm, 0, 1000)};
+static const Capability kEnvReverbCap = {
+        .range = Range::make<Range::environmentalReverb>(kEnvReverbRanges)};
+
+// NXP SW auxiliary environmental reverb
+static const std::string kAuxEnvReverbEffectName = "Auxiliary Environmental Reverb";
+static const Descriptor kAuxEnvReverbDesc = {
+        .common = {.id = {.type = getEffectTypeUuidEnvReverb(),
+                          .uuid = getEffectImplUuidAuxEnvReverb(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::AUXILIARY},
+                   .cpuLoad = kCpuLoadARM9E,
+                   .memoryUsage = kMemUsage,
+                   .name = kAuxEnvReverbEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kEnvReverbCap};
+
+// NXP SW insert environmental reverb
+static const std::string kInsertEnvReverbEffectName = "Insert Environmental Reverb";
+static const Descriptor kInsertEnvReverbDesc = {
+        .common = {.id = {.type = getEffectTypeUuidEnvReverb(),
+                          .uuid = getEffectImplUuidInsertEnvReverb(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::FIRST,
+                             .volume = Flags::Volume::CTRL},
+                   .cpuLoad = kCpuLoadARM9E,
+                   .memoryUsage = kMemUsage,
+                   .name = kInsertEnvReverbEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kEnvReverbCap};
+
+static const std::vector<PresetReverb::Presets> kSupportedPresets{
+        ndk::enum_range<PresetReverb::Presets>().begin(),
+        ndk::enum_range<PresetReverb::Presets>().end()};
+static const std::vector<Range::PresetReverbRange> kPresetReverbRanges = {
+        MAKE_RANGE(PresetReverb, supportedPresets, kSupportedPresets, kSupportedPresets)};
+static const Capability kPresetReverbCap = {
+        .range = Range::make<Range::presetReverb>(kPresetReverbRanges)};
+
+// NXP SW auxiliary preset reverb
+static const std::string kAuxPresetReverbEffectName = "Auxiliary Preset Reverb";
+static const Descriptor kAuxPresetReverbDesc = {
+        .common = {.id = {.type = getEffectTypeUuidPresetReverb(),
+                          .uuid = getEffectImplUuidAuxPresetReverb(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::AUXILIARY},
+                   .cpuLoad = kCpuLoadARM9E,
+                   .memoryUsage = kMemUsage,
+                   .name = kAuxPresetReverbEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kPresetReverbCap};
+
+// NXP SW insert preset reverb
+static const std::string kInsertPresetReverbEffectName = "Insert Preset Reverb";
+static const Descriptor kInsertPresetReverbDesc = {
+        .common = {.id = {.type = getEffectTypeUuidPresetReverb(),
+                          .uuid = getEffectImplUuidInsertPresetReverb(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::FIRST,
+                             .volume = Flags::Volume::CTRL},
+                   .cpuLoad = kCpuLoadARM9E,
+                   .memoryUsage = kMemUsage,
+                   .name = kInsertPresetReverbEffectName,
+                   .implementor = "NXP Software Ltd."},
+        .capability = kPresetReverbCap};
+
+enum class ReverbEffectType {
+    AUX_ENV,
+    INSERT_ENV,
+    AUX_PRESET,
+    INSERT_PRESET,
+};
+
+inline std::ostream& operator<<(std::ostream& out, const ReverbEffectType& type) {
+    switch (type) {
+        case ReverbEffectType::AUX_ENV:
+            return out << kAuxEnvReverbEffectName;
+        case ReverbEffectType::INSERT_ENV:
+            return out << kInsertEnvReverbEffectName;
+        case ReverbEffectType::AUX_PRESET:
+            return out << kAuxPresetReverbEffectName;
+        case ReverbEffectType::INSERT_PRESET:
+            return out << kInsertPresetReverbEffectName;
+    }
+    return out << "EnumReverbEffectTypeError";
+}
+
+inline std::ostream& operator<<(std::ostream& out, const LVREV_ReturnStatus_en& status) {
+    switch (status) {
+        case LVREV_SUCCESS:
+            return out << "LVREV_SUCCESS";
+        case LVREV_NULLADDRESS:
+            return out << "LVREV_NULLADDRESS";
+        case LVREV_OUTOFRANGE:
+            return out << "LVREV_OUTOFRANGE";
+        case LVREV_INVALIDNUMSAMPLES:
+            return out << "LVREV_INVALIDNUMSAMPLES";
+        case LVREV_RETURNSTATUS_DUMMY:
+            return out << "LVREV_RETURNSTATUS_DUMMY";
+    }
+    return out << "EnumLvrevRetStatusError";
+}
+
+}  // namespace lvm
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index c6e036a..d018c47 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -58,3 +58,39 @@
         "libwebrtc_absl_headers",
     ],
 }
+
+cc_library_shared {
+    name: "libpreprocessingaidl",
+    srcs: [
+        "aidl/PreProcessingContext.cpp",
+        "aidl/EffectPreProcessing.cpp",
+        ":effectCommonFile",
+    ],
+    defaults: [
+        "aidlaudioservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+    ],
+    local_include_dirs: ["aidl"],
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libaudioutils",
+    ],
+    static_libs: [
+        "webrtc_audio_processing",
+    ],
+    header_libs: [
+        "libwebrtc_absl_headers",
+        "libaudioeffects",
+        "libhardware_headers",
+    ],
+    cflags: [
+        "-Wthread-safety",
+        "-Wno-unused-parameter",
+    ],
+    relative_install_path: "soundfx",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
+    ],
+}
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 61a2bf5..59f1fc3 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -102,7 +102,8 @@
     uint32_t state;                // current state (enum preproc_session_state)
     int id;                        // audio session ID
     int io;                        // handle of input stream this session is on
-    webrtc::AudioProcessing* apm;  // handle on webRTC audio processing module (APM)
+    rtc::scoped_refptr<webrtc::AudioProcessing>
+            apm;  // handle on webRTC audio processing module (APM)
     // Audio Processing module builder
     webrtc::AudioProcessingBuilder ap_builder;
     // frameCount represents the size of the buffers used for processing, and must represent 10ms.
@@ -260,9 +261,6 @@
     ALOGV("Agc2Init");
     effect->session->config = effect->session->apm->GetConfig();
     effect->session->config.gain_controller2.fixed_digital.gain_db = 0.f;
-    effect->session->config.gain_controller2.adaptive_digital.level_estimator =
-            effect->session->config.gain_controller2.kRms;
-    effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db = 2.f;
     effect->session->apm->ApplyConfig(effect->session->config);
     return 0;
 }
@@ -332,24 +330,19 @@
             ALOGV("Agc2GetParameter() target level %f dB", *(float*)pValue);
             break;
         case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
-            *(uint32_t*)pValue = (uint32_t)(
-                    effect->session->config.gain_controller2.adaptive_digital.level_estimator);
-            ALOGV("Agc2GetParameter() level estimator %d",
-                  *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+            // WebRTC only supports RMS level estimator now
+            *(uint32_t*)pValue = (uint32_t)(0);
+            ALOGV("Agc2GetParameter() level estimator RMS");
             break;
         case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
-            *(float*)pValue = (float)(effect->session->config.gain_controller2.adaptive_digital
-                                              .extra_saturation_margin_db);
+            *(float*)pValue = (float)(2.0);
             ALOGV("Agc2GetParameter() extra saturation margin %f dB", *(float*)pValue);
             break;
         case AGC2_PARAM_PROPERTIES:
             pProperties->fixedDigitalGain =
                     (float)(effect->session->config.gain_controller2.fixed_digital.gain_db);
-            pProperties->level_estimator = (uint32_t)(
-                    effect->session->config.gain_controller2.adaptive_digital.level_estimator);
-            pProperties->extraSaturationMargin =
-                    (float)(effect->session->config.gain_controller2.adaptive_digital
-                                    .extra_saturation_margin_db);
+            pProperties->level_estimator = 0;
+            pProperties->extraSaturationMargin = 2.0;
             break;
         default:
             ALOGW("Agc2GetParameter() unknown param %d", param);
@@ -438,16 +431,19 @@
             effect->session->config.gain_controller2.fixed_digital.gain_db = valueFloat;
             break;
         case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR:
-            ALOGV("Agc2SetParameter() level estimator %d",
-                  *(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
-            effect->session->config.gain_controller2.adaptive_digital.level_estimator =
-                    (*(webrtc::AudioProcessing::Config::GainController2::LevelEstimator*)pValue);
+            ALOGV("Agc2SetParameter() level estimator %d", *(uint32_t*)pValue);
+            if (*(uint32_t*)pValue != 0) {
+              // only RMS is supported
+              status = -EINVAL;
+            }
             break;
         case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN:
             valueFloat = (float)(*(int32_t*)pValue);
             ALOGV("Agc2SetParameter() extra saturation margin %f dB", valueFloat);
-            effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
-                    valueFloat;
+            if (valueFloat != 2.0) {
+              // extra_staturation_margin_db is no longer configurable in webrtc
+              status = -EINVAL;
+            }
             break;
         case AGC2_PARAM_PROPERTIES:
             ALOGV("Agc2SetParameter() properties gain %f, level %d margin %f",
@@ -455,11 +451,9 @@
                   pProperties->extraSaturationMargin);
             effect->session->config.gain_controller2.fixed_digital.gain_db =
                     pProperties->fixedDigitalGain;
-            effect->session->config.gain_controller2.adaptive_digital.level_estimator =
-                    (webrtc::AudioProcessing::Config::GainController2::LevelEstimator)
-                            pProperties->level_estimator;
-            effect->session->config.gain_controller2.adaptive_digital.extra_saturation_margin_db =
-                    pProperties->extraSaturationMargin;
+            if (pProperties->level_estimator != 0 || pProperties->extraSaturationMargin != 2.0) {
+              status = -EINVAL;
+            }
             break;
         default:
             ALOGW("Agc2SetParameter() unknown param %08x value %08x", param, *(uint32_t*)pValue);
@@ -879,8 +873,8 @@
 
 error:
     if (session->createdMsk == 0) {
-        delete session->apm;
-        session->apm = NULL;
+        // Scoped_refptr will handle reference counting here
+        session->apm = nullptr;
     }
     return status;
 }
@@ -889,8 +883,8 @@
     ALOGW_IF(Effect_Release(fx) != 0, " Effect_Release() failed for proc ID %d", fx->procId);
     session->createdMsk &= ~(1 << fx->procId);
     if (session->createdMsk == 0) {
-        delete session->apm;
-        session->apm = NULL;
+        // Scoped_refptr will handle reference counting here
+        session->apm = nullptr;
         session->id = 0;
     }
 
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
new file mode 100644
index 0000000..e8ae8b3
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -0,0 +1,456 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectPreProcessing"
+#include <algorithm>
+#include <unordered_set>
+
+#include <Utils.h>
+#include <android-base/logging.h>
+#include <fmq/AidlMessageQueue.h>
+
+#include "EffectPreProcessing.h"
+
+using aidl::android::hardware::audio::effect::getEffectImplUuidAcousticEchoCancelerSw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV1Sw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV2Sw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidNoiseSuppressionSw;
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::EffectPreProcessing;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+
+bool isPreProcessingUuidSupported(const AudioUuid& uuid) {
+    return uuid == getEffectImplUuidAcousticEchoCancelerSw() ||
+           uuid == getEffectImplUuidAutomaticGainControlV1Sw() ||
+           uuid == getEffectImplUuidAutomaticGainControlV2Sw() ||
+           uuid == getEffectImplUuidNoiseSuppressionSw();
+}
+
+extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (!uuid || !isPreProcessingUuidSupported(*uuid)) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<EffectPreProcessing>(*uuid);
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || !isPreProcessingUuidSupported(*in_impl_uuid)) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (*in_impl_uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kAcousticEchoCancelerDesc;
+    } else if (*in_impl_uuid == getEffectImplUuidAutomaticGainControlV1Sw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kAutomaticGainControlV1Desc;
+    } else if (*in_impl_uuid == getEffectImplUuidAutomaticGainControlV2Sw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kAutomaticGainControlV2Desc;
+    } else if (*in_impl_uuid == getEffectImplUuidNoiseSuppressionSw()) {
+        *_aidl_return = aidl::android::hardware::audio::effect::kNoiseSuppressionDesc;
+    }
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+EffectPreProcessing::EffectPreProcessing(const AudioUuid& uuid) {
+    LOG(DEBUG) << __func__ << uuid.toString();
+    if (uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
+        mType = PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION;
+        mDescriptor = &kAcousticEchoCancelerDesc;
+        mEffectName = &kAcousticEchoCancelerEffectName;
+    } else if (uuid == getEffectImplUuidAutomaticGainControlV1Sw()) {
+        mType = PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1;
+        mDescriptor = &kAutomaticGainControlV1Desc;
+        mEffectName = &kAutomaticGainControlV1EffectName;
+    } else if (uuid == getEffectImplUuidAutomaticGainControlV2Sw()) {
+        mType = PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2;
+        mDescriptor = &kAutomaticGainControlV2Desc;
+        mEffectName = &kAutomaticGainControlV2EffectName;
+    } else if (uuid == getEffectImplUuidNoiseSuppressionSw()) {
+        mType = PreProcessingEffectType::NOISE_SUPPRESSION;
+        mDescriptor = &kNoiseSuppressionDesc;
+        mEffectName = &kNoiseSuppressionEffectName;
+    } else {
+        LOG(ERROR) << __func__ << uuid.toString() << " not supported!";
+    }
+}
+
+EffectPreProcessing::~EffectPreProcessing() {
+    cleanUp();
+    LOG(DEBUG) << __func__;
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << _aidl_return->toString();
+    *_aidl_return = *mDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterSpecific(const Parameter::Specific& specific) {
+    LOG(DEBUG) << __func__ << " specific " << specific.toString();
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto tag = specific.getTag();
+    switch (tag) {
+        case Parameter::Specific::acousticEchoCanceler:
+            return setParameterAcousticEchoCanceler(specific);
+        case Parameter::Specific::automaticGainControlV1:
+            return setParameterAutomaticGainControlV1(specific);
+        case Parameter::Specific::automaticGainControlV2:
+            return setParameterAutomaticGainControlV2(specific);
+        case Parameter::Specific::noiseSuppression:
+            return setParameterNoiseSuppression(specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "specificParamNotSupported");
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterAcousticEchoCanceler(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::acousticEchoCanceler>();
+    RETURN_IF(!inRange(param, kAcousticEchoCancelerRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case AcousticEchoCanceler::echoDelayUs: {
+            RETURN_IF(mContext->setAcousticEchoCancelerEchoDelay(
+                              param.get<AcousticEchoCanceler::echoDelayUs>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "echoDelayNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AcousticEchoCanceler::mobileMode: {
+            RETURN_IF(mContext->setAcousticEchoCancelerMobileMode(
+                              param.get<AcousticEchoCanceler::mobileMode>()) != RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "SettingMobileModeNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterAutomaticGainControlV1(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::automaticGainControlV1>();
+    RETURN_IF(!inRange(param, kAutomaticGainControlV1Ranges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case AutomaticGainControlV1::targetPeakLevelDbFs: {
+            RETURN_IF(mContext->setAutomaticGainControlV1TargetPeakLevel(
+                              param.get<AutomaticGainControlV1::targetPeakLevelDbFs>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "targetPeakLevelNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV1::maxCompressionGainDb: {
+            RETURN_IF(mContext->setAutomaticGainControlV1MaxCompressionGain(
+                              param.get<AutomaticGainControlV1::maxCompressionGainDb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "maxCompressionGainNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV1::enableLimiter: {
+            RETURN_IF(
+                    mContext->setAutomaticGainControlV1EnableLimiter(
+                            param.get<AutomaticGainControlV1::enableLimiter>()) != RetCode::SUCCESS,
+                    EX_ILLEGAL_ARGUMENT, "enableLimiterNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterAutomaticGainControlV2(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::automaticGainControlV2>();
+    RETURN_IF(!inRange(param, kAutomaticGainControlV2Ranges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case AutomaticGainControlV2::fixedDigitalGainMb: {
+            RETURN_IF(mContext->setAutomaticGainControlV2DigitalGain(
+                              param.get<AutomaticGainControlV2::fixedDigitalGainMb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "digitalGainNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV2::levelEstimator: {
+            RETURN_IF(mContext->setAutomaticGainControlV2LevelEstimator(
+                              param.get<AutomaticGainControlV2::levelEstimator>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "levelEstimatorNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        case AutomaticGainControlV2::saturationMarginMb: {
+            RETURN_IF(mContext->setAutomaticGainControlV2SaturationMargin(
+                              param.get<AutomaticGainControlV2::saturationMarginMb>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "saturationMarginNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::setParameterNoiseSuppression(
+        const Parameter::Specific& specific) {
+    auto& param = specific.get<Parameter::Specific::noiseSuppression>();
+    auto tag = param.getTag();
+
+    switch (tag) {
+        case NoiseSuppression::level: {
+            RETURN_IF(mContext->setNoiseSuppressionLevel(param.get<NoiseSuppression::level>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "levelNotSupported");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "NoiseSuppressionTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterSpecific(const Parameter::Id& id,
+                                                             Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+
+    switch (tag) {
+        case Parameter::Id::acousticEchoCancelerTag:
+            return getParameterAcousticEchoCanceler(
+                    id.get<Parameter::Id::acousticEchoCancelerTag>(), specific);
+        case Parameter::Id::automaticGainControlV1Tag:
+            return getParameterAutomaticGainControlV1(
+                    id.get<Parameter::Id::automaticGainControlV1Tag>(), specific);
+        case Parameter::Id::automaticGainControlV2Tag:
+            return getParameterAutomaticGainControlV2(
+                    id.get<Parameter::Id::automaticGainControlV2Tag>(), specific);
+        case Parameter::Id::noiseSuppressionTag:
+            return getParameterNoiseSuppression(id.get<Parameter::Id::noiseSuppressionTag>(),
+                                                specific);
+        default:
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "wrongIdTag");
+    }
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterAcousticEchoCanceler(
+        const AcousticEchoCanceler::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != AcousticEchoCanceler::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "AcousticEchoCancelerTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    AcousticEchoCanceler param;
+    auto tag = id.get<AcousticEchoCanceler::Id::commonTag>();
+    switch (tag) {
+        case AcousticEchoCanceler::echoDelayUs: {
+            param.set<AcousticEchoCanceler::echoDelayUs>(
+                    mContext->getAcousticEchoCancelerEchoDelay());
+            break;
+        }
+        case AcousticEchoCanceler::mobileMode: {
+            param.set<AcousticEchoCanceler::mobileMode>(
+                    mContext->getAcousticEchoCancelerMobileMode());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::acousticEchoCanceler>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterAutomaticGainControlV1(
+        const AutomaticGainControlV1::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != AutomaticGainControlV1::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "AutomaticGainControlV1TagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    AutomaticGainControlV1 param;
+
+    auto tag = id.get<AutomaticGainControlV1::Id::commonTag>();
+    switch (tag) {
+        case AutomaticGainControlV1::targetPeakLevelDbFs: {
+            param.set<AutomaticGainControlV1::targetPeakLevelDbFs>(
+                    mContext->getAutomaticGainControlV1TargetPeakLevel());
+            break;
+        }
+        case AutomaticGainControlV1::maxCompressionGainDb: {
+            param.set<AutomaticGainControlV1::maxCompressionGainDb>(
+                    mContext->getAutomaticGainControlV1MaxCompressionGain());
+            break;
+        }
+        case AutomaticGainControlV1::enableLimiter: {
+            param.set<AutomaticGainControlV1::enableLimiter>(
+                    mContext->getAutomaticGainControlV1EnableLimiter());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::automaticGainControlV1>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterAutomaticGainControlV2(
+        const AutomaticGainControlV2::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != AutomaticGainControlV2::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "AutomaticGainControlV2TagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    AutomaticGainControlV2 param;
+
+    auto tag = id.get<AutomaticGainControlV2::Id::commonTag>();
+    switch (tag) {
+        case AutomaticGainControlV2::fixedDigitalGainMb: {
+            param.set<AutomaticGainControlV2::fixedDigitalGainMb>(
+                    mContext->getAutomaticGainControlV2DigitalGain());
+            break;
+        }
+        case AutomaticGainControlV2::levelEstimator: {
+            param.set<AutomaticGainControlV2::levelEstimator>(
+                    mContext->getAutomaticGainControlV2LevelEstimator());
+            break;
+        }
+        case AutomaticGainControlV2::saturationMarginMb: {
+            param.set<AutomaticGainControlV2::saturationMarginMb>(
+                    mContext->getAutomaticGainControlV2SaturationMargin());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::automaticGainControlV2>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectPreProcessing::getParameterNoiseSuppression(
+        const NoiseSuppression::Id& id, Parameter::Specific* specific) {
+    RETURN_IF(id.getTag() != NoiseSuppression::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+              "NoiseSuppressionTagNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    NoiseSuppression param;
+
+    auto tag = id.get<NoiseSuppression::Id::commonTag>();
+    switch (tag) {
+        case NoiseSuppression::level: {
+            param.set<NoiseSuppression::level>(mContext->getNoiseSuppressionLevel());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "NoiseSuppressionTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::noiseSuppression>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> EffectPreProcessing::createContext(const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+    } else {
+        // PreProcessingSession is a singleton
+        mContext = PreProcessingSession::getPreProcessingSession().createSession(
+                mType, 1 /* statusFmqDepth */, common);
+    }
+
+    return mContext;
+}
+
+std::shared_ptr<EffectContext> EffectPreProcessing::getContext() {
+    return mContext;
+}
+
+RetCode EffectPreProcessing::releaseContext() {
+    if (mContext) {
+        PreProcessingSession::getPreProcessingSession().releaseSession(mType,
+                                                                       mContext->getSessionId());
+        mContext.reset();
+    }
+    return RetCode::SUCCESS;
+}
+
+ndk::ScopedAStatus EffectPreProcessing::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->disable();
+            mContext->resetBuffer();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status EffectPreProcessing::effectProcessImpl(float* in, float* out, int sampleToProcess) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->lvmProcess(in, out, sampleToProcess);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
new file mode 100644
index 0000000..fad848a
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "PreProcessingContext.h"
+#include "PreProcessingSession.h"
+#include "effect-impl/EffectImpl.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectPreProcessing final : public EffectImpl {
+  public:
+    explicit EffectPreProcessing(const AudioUuid& uuid);
+    ~EffectPreProcessing() override;
+
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    std::shared_ptr<EffectContext> getContext() override;
+    RetCode releaseContext() override;
+
+    IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+
+    std::string getEffectName() override { return *mEffectName; }
+
+  private:
+    std::shared_ptr<PreProcessingContext> mContext;
+    const Descriptor* mDescriptor;
+    const std::string* mEffectName;
+    PreProcessingEffectType mType;
+
+    ndk::ScopedAStatus setParameterAcousticEchoCanceler(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterAcousticEchoCanceler(const AcousticEchoCanceler::Id& id,
+                                                        Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterAutomaticGainControlV1(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterAutomaticGainControlV1(const AutomaticGainControlV1::Id& id,
+                                                          Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterAutomaticGainControlV2(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterAutomaticGainControlV2(const AutomaticGainControlV2::Id& id,
+                                                          Parameter::Specific* specific);
+
+    ndk::ScopedAStatus setParameterNoiseSuppression(const Parameter::Specific& specific);
+    ndk::ScopedAStatus getParameterNoiseSuppression(const NoiseSuppression::Id& id,
+                                                    Parameter::Specific* specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
new file mode 100644
index 0000000..c1e4eda
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -0,0 +1,311 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#define LOG_TAG "PreProcessingContext"
+#include <Utils.h>
+
+#include "PreProcessingContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+using aidl::android::media::audio::common::AudioDeviceDescription;
+using aidl::android::media::audio::common::AudioDeviceType;
+
+RetCode PreProcessingContext::init(const Parameter::Common& common) {
+    std::lock_guard lg(mMutex);
+    webrtc::AudioProcessingBuilder apBuilder;
+    mAudioProcessingModule = apBuilder.Create();
+    if (mAudioProcessingModule == nullptr) {
+        LOG(ERROR) << "init could not get apm engine";
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+
+    updateConfigs(common);
+
+    mEnabledMsk = 0;
+    mProcessedMsk = 0;
+    mRevEnabledMsk = 0;
+    mRevProcessedMsk = 0;
+
+    auto config = mAudioProcessingModule->GetConfig();
+    switch (mType) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            config.echo_canceller.mobile_mode = true;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            config.gain_controller1.target_level_dbfs = kAgcDefaultTargetLevel;
+            config.gain_controller1.compression_gain_db = kAgcDefaultCompGain;
+            config.gain_controller1.enable_limiter = kAgcDefaultLimiter;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            config.gain_controller2.fixed_digital.gain_db = 0.f;
+            break;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            config.noise_suppression.level = kNsDefaultLevel;
+            break;
+    }
+    mAudioProcessingModule->ApplyConfig(config);
+    mState = PRE_PROC_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::deInit() {
+    std::lock_guard lg(mMutex);
+    mAudioProcessingModule = nullptr;
+    mState = PRE_PROC_STATE_UNINITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::enable() {
+    if (mState != PRE_PROC_STATE_INITIALIZED) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    int typeMsk = (1 << int(mType));
+    std::lock_guard lg(mMutex);
+    // Check if effect is already enabled.
+    if ((mEnabledMsk & typeMsk) == typeMsk) {
+        return RetCode::ERROR_ILLEGAL_PARAMETER;
+    }
+    mEnabledMsk |= typeMsk;
+    auto config = mAudioProcessingModule->GetConfig();
+    switch (mType) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            config.echo_canceller.enabled = true;
+            // AEC has reverse stream
+            mRevEnabledMsk |= typeMsk;
+            mRevProcessedMsk = 0;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            config.gain_controller1.enabled = true;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            config.gain_controller2.enabled = true;
+            break;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            config.noise_suppression.enabled = true;
+            break;
+    }
+    mProcessedMsk = 0;
+    mAudioProcessingModule->ApplyConfig(config);
+    mState = PRE_PROC_STATE_ACTIVE;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::disable() {
+    if (mState != PRE_PROC_STATE_ACTIVE) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    int typeMsk = (1 << int(mType));
+    std::lock_guard lg(mMutex);
+    // Check if effect is already disabled.
+    if ((mEnabledMsk & typeMsk) != typeMsk) {
+        return RetCode::ERROR_ILLEGAL_PARAMETER;
+    }
+    mEnabledMsk &= ~typeMsk;
+    auto config = mAudioProcessingModule->GetConfig();
+    switch (mType) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            config.echo_canceller.enabled = false;
+            // AEC has reverse stream
+            mRevEnabledMsk &= ~typeMsk;
+            mRevProcessedMsk = 0;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            config.gain_controller1.enabled = false;
+            break;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            config.gain_controller2.enabled = false;
+            break;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            config.noise_suppression.enabled = false;
+            break;
+    }
+    mProcessedMsk = 0;
+    mAudioProcessingModule->ApplyConfig(config);
+    mState = PRE_PROC_STATE_INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+RetCode PreProcessingContext::setCommon(const Parameter::Common& common) {
+    mCommon = common;
+    updateConfigs(common);
+    return RetCode::SUCCESS;
+}
+
+void PreProcessingContext::updateConfigs(const Parameter::Common& common) {
+    mInputConfig.set_sample_rate_hz(common.input.base.sampleRate);
+    mInputConfig.set_num_channels(::aidl::android::hardware::audio::common::getChannelCount(
+                    common.input.base.channelMask));
+    mOutputConfig.set_sample_rate_hz(common.input.base.sampleRate);
+    mOutputConfig.set_num_channels(::aidl::android::hardware::audio::common::getChannelCount(
+                    common.output.base.channelMask));
+}
+
+RetCode PreProcessingContext::setAcousticEchoCancelerEchoDelay(int echoDelayUs) {
+    mEchoDelayUs = echoDelayUs;
+    std::lock_guard lg(mMutex);
+    mAudioProcessingModule->set_stream_delay_ms(mEchoDelayUs / 1000);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAcousticEchoCancelerEchoDelay() const {
+    return mEchoDelayUs;
+}
+
+RetCode PreProcessingContext::setAcousticEchoCancelerMobileMode(bool mobileMode) {
+    mMobileMode = mobileMode;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.echo_canceller.mobile_mode = mobileMode;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+bool PreProcessingContext::getAcousticEchoCancelerMobileMode() const {
+    return mMobileMode;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV1TargetPeakLevel(int targetPeakLevel) {
+    mTargetPeakLevel = targetPeakLevel;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller1.target_level_dbfs = -(mTargetPeakLevel / 100);
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV1TargetPeakLevel() const {
+    return mTargetPeakLevel;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV1MaxCompressionGain(int maxCompressionGain) {
+    mMaxCompressionGain = maxCompressionGain;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller1.compression_gain_db = mMaxCompressionGain / 100;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV1MaxCompressionGain() const {
+    return mMaxCompressionGain;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV1EnableLimiter(bool enableLimiter) {
+    mEnableLimiter = enableLimiter;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller1.enable_limiter = mEnableLimiter;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+bool PreProcessingContext::getAutomaticGainControlV1EnableLimiter() const {
+    return mEnableLimiter;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV2DigitalGain(int gain) {
+    mDigitalGain = gain;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.gain_controller2.fixed_digital.gain_db = mDigitalGain;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV2DigitalGain() const {
+    return mDigitalGain;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV2LevelEstimator(
+        AutomaticGainControlV2::LevelEstimator levelEstimator) {
+    mLevelEstimator = levelEstimator;
+    return RetCode::SUCCESS;
+}
+
+AutomaticGainControlV2::LevelEstimator
+PreProcessingContext::getAutomaticGainControlV2LevelEstimator() const {
+    return mLevelEstimator;
+}
+
+RetCode PreProcessingContext::setAutomaticGainControlV2SaturationMargin(int saturationMargin) {
+    mSaturationMargin = saturationMargin;
+    return RetCode::SUCCESS;
+}
+
+int PreProcessingContext::getAutomaticGainControlV2SaturationMargin() const {
+    return mSaturationMargin;
+}
+
+RetCode PreProcessingContext::setNoiseSuppressionLevel(NoiseSuppression::Level level) {
+    mLevel = level;
+    std::lock_guard lg(mMutex);
+    auto config = mAudioProcessingModule->GetConfig();
+    config.noise_suppression.level =
+            (webrtc::AudioProcessing::Config::NoiseSuppression::Level)level;
+    mAudioProcessingModule->ApplyConfig(config);
+    return RetCode::SUCCESS;
+}
+
+NoiseSuppression::Level PreProcessingContext::getNoiseSuppressionLevel() const {
+    return mLevel;
+}
+
+IEffect::Status PreProcessingContext::lvmProcess(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!in, status, "nullInput");
+    RETURN_VALUE_IF(!out, status, "nullOutput");
+    status = {EX_ILLEGAL_STATE, 0, 0};
+    int64_t inputFrameCount = getCommon().input.frameCount;
+    int64_t outputFrameCount = getCommon().output.frameCount;
+    RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
+    RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
+
+    LOG(DEBUG) << __func__ << " start processing";
+    std::lock_guard lg(mMutex);
+
+    mProcessedMsk |= (1 << int(mType));
+
+    // webrtc implementation clear out was_stream_delay_set every time after ProcessStream() call
+    mAudioProcessingModule->set_stream_delay_ms(mEchoDelayUs / 1000);
+
+    if ((mProcessedMsk & mEnabledMsk) == mEnabledMsk) {
+        mProcessedMsk = 0;
+        int processStatus = mAudioProcessingModule->ProcessStream(
+                (const int16_t* const)in, mInputConfig, mOutputConfig, (int16_t* const)out);
+        if (processStatus != 0) {
+            LOG(ERROR) << "Process stream failed with error " << processStatus;
+            return status;
+        }
+    }
+
+    mRevProcessedMsk |= (1 << int(mType));
+
+    if ((mRevProcessedMsk & mRevEnabledMsk) == mRevEnabledMsk) {
+        mRevProcessedMsk = 0;
+        int revProcessStatus = mAudioProcessingModule->ProcessReverseStream(
+                (const int16_t* const)in, mInputConfig, mInputConfig, (int16_t* const)out);
+        if (revProcessStatus != 0) {
+            LOG(ERROR) << "Process reverse stream failed with error " << revProcessStatus;
+            return status;
+        }
+    }
+
+    return {STATUS_OK, samples, samples};
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
new file mode 100644
index 0000000..9ba1bbe
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+#include <audio_processing.h>
+#include <unordered_map>
+
+#include "PreProcessingTypes.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+enum PreProcEffectState {
+    PRE_PROC_STATE_UNINITIALIZED,
+    PRE_PROC_STATE_INITIALIZED,
+    PRE_PROC_STATE_ACTIVE,
+};
+
+class PreProcessingContext final : public EffectContext {
+  public:
+    PreProcessingContext(int statusDepth, const Parameter::Common& common,
+                         const PreProcessingEffectType& type)
+        : EffectContext(statusDepth, common), mType(type) {
+        LOG(DEBUG) << __func__ << type;
+        mState = PRE_PROC_STATE_UNINITIALIZED;
+    }
+    ~PreProcessingContext() override { LOG(DEBUG) << __func__; }
+
+    RetCode init(const Parameter::Common& common);
+    RetCode deInit();
+
+    PreProcessingEffectType getPreProcessingType() const { return mType; }
+
+    RetCode enable();
+    RetCode disable();
+
+    RetCode setCommon(const Parameter::Common& common) override;
+    void updateConfigs(const Parameter::Common& common);
+
+    RetCode setAcousticEchoCancelerEchoDelay(int echoDelayUs);
+    int getAcousticEchoCancelerEchoDelay() const;
+    RetCode setAcousticEchoCancelerMobileMode(bool mobileMode);
+    bool getAcousticEchoCancelerMobileMode() const;
+
+    RetCode setAutomaticGainControlV1TargetPeakLevel(int targetPeakLevel);
+    int getAutomaticGainControlV1TargetPeakLevel() const;
+    RetCode setAutomaticGainControlV1MaxCompressionGain(int maxCompressionGain);
+    int getAutomaticGainControlV1MaxCompressionGain() const;
+    RetCode setAutomaticGainControlV1EnableLimiter(bool enableLimiter);
+    bool getAutomaticGainControlV1EnableLimiter() const;
+
+    RetCode setAutomaticGainControlV2DigitalGain(int gain);
+    int getAutomaticGainControlV2DigitalGain() const;
+    RetCode setAutomaticGainControlV2LevelEstimator(
+            AutomaticGainControlV2::LevelEstimator levelEstimator);
+    AutomaticGainControlV2::LevelEstimator getAutomaticGainControlV2LevelEstimator() const;
+    RetCode setAutomaticGainControlV2SaturationMargin(int saturationMargin);
+    int getAutomaticGainControlV2SaturationMargin() const;
+
+    RetCode setNoiseSuppressionLevel(NoiseSuppression::Level level);
+    NoiseSuppression::Level getNoiseSuppressionLevel() const;
+
+    IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+  private:
+    static constexpr inline int kAgcDefaultTargetLevel = 3;
+    static constexpr inline int kAgcDefaultCompGain = 9;
+    static constexpr inline bool kAgcDefaultLimiter = true;
+    static constexpr inline webrtc::AudioProcessing::Config::NoiseSuppression::Level
+            kNsDefaultLevel = webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
+
+    std::mutex mMutex;
+    const PreProcessingEffectType mType;
+    PreProcEffectState mState;  // current state
+
+    // handle on webRTC audio processing module (APM)
+    rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule GUARDED_BY(mMutex);
+
+    int mEnabledMsk GUARDED_BY(mMutex);       // bit field containing IDs of enabled pre processors
+    int mProcessedMsk GUARDED_BY(mMutex);     // bit field containing IDs of pre processors already
+                                              // processed in current round
+    int mRevEnabledMsk GUARDED_BY(mMutex);    // bit field containing IDs of enabled pre processors
+                                              // with reverse channel
+    int mRevProcessedMsk GUARDED_BY(mMutex);  // bit field containing IDs of pre processors with
+                                              // reverse channel already processed in current round
+
+    webrtc::StreamConfig mInputConfig;   // input stream configuration
+    webrtc::StreamConfig mOutputConfig;  // output stream configuration
+
+    // Acoustic Echo Canceler
+    int mEchoDelayUs = 0;
+    bool mMobileMode = false;
+
+    // Automatic Gain Control V1
+    int mTargetPeakLevel = 0;
+    int mMaxCompressionGain = 0;
+    bool mEnableLimiter = false;
+
+    // Automatic Gain Control V2
+    int mDigitalGain = 0;
+    AutomaticGainControlV2::LevelEstimator mLevelEstimator =
+            AutomaticGainControlV2::LevelEstimator::RMS;
+    int mSaturationMargin = 2;
+
+    // NoiseSuppression
+    NoiseSuppression::Level mLevel = NoiseSuppression::Level::LOW;
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingSession.h b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
new file mode 100644
index 0000000..877292f
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <memory>
+#include <unordered_map>
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+
+#include "PreProcessingContext.h"
+#include "PreProcessingTypes.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+/**
+ * @brief Maintain all effect pre-processing sessions.
+ *
+ * Sessions are identified with the session ID, maximum of MAX_BUNDLE_SESSIONS is supported by the
+ * pre-processing implementation.
+ */
+class PreProcessingSession {
+  public:
+    static PreProcessingSession& getPreProcessingSession() {
+        static PreProcessingSession instance;
+        return instance;
+    }
+
+    static bool findPreProcessingTypeInList(
+            std::vector<std::shared_ptr<PreProcessingContext>>& list,
+            const PreProcessingEffectType& type, bool remove = false) {
+        auto itor = std::find_if(list.begin(), list.end(),
+                                 [type](const std::shared_ptr<PreProcessingContext>& bundle) {
+                                     return bundle->getPreProcessingType() == type;
+                                 });
+        if (itor == list.end()) {
+            return false;
+        }
+        if (remove) {
+            (*itor)->deInit();
+            list.erase(itor);
+        }
+        return true;
+    }
+
+    /**
+     * Create a certain type of PreProcessingContext in shared_ptr container, each session must not
+     * have more than one session for each type.
+     */
+    std::shared_ptr<PreProcessingContext> createSession(const PreProcessingEffectType& type,
+                                                        int statusDepth,
+                                                        const Parameter::Common& common) {
+        int sessionId = common.session;
+        LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
+        std::lock_guard lg(mMutex);
+        if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_PRE_PROC_SESSIONS) {
+            LOG(ERROR) << __func__ << " exceed max bundle session";
+            return nullptr;
+        }
+
+        if (mSessionMap.count(sessionId)) {
+            if (findPreProcessingTypeInList(mSessionMap[sessionId], type)) {
+                LOG(ERROR) << __func__ << type << " already exist in session " << sessionId;
+                return nullptr;
+            }
+        }
+
+        auto& list = mSessionMap[sessionId];
+        auto context = std::make_shared<PreProcessingContext>(statusDepth, common, type);
+        RETURN_VALUE_IF(!context, nullptr, "failedToCreateContext");
+
+        RetCode ret = context->init(common);
+        if (RetCode::SUCCESS != ret) {
+            LOG(ERROR) << __func__ << " context init ret " << ret;
+            return nullptr;
+        }
+        list.push_back(context);
+        return context;
+    }
+
+    void releaseSession(const PreProcessingEffectType& type, int sessionId) {
+        LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
+        std::lock_guard lg(mMutex);
+        if (mSessionMap.count(sessionId)) {
+            auto& list = mSessionMap[sessionId];
+            if (!findPreProcessingTypeInList(list, type, true /* remove */)) {
+                LOG(ERROR) << __func__ << " can't find " << type << "in session " << sessionId;
+                return;
+            }
+            if (list.empty()) {
+                mSessionMap.erase(sessionId);
+            }
+        }
+    }
+
+  private:
+    // Lock for mSessionMap access.
+    std::mutex mMutex;
+    // Max session number supported.
+    static constexpr int MAX_PRE_PROC_SESSIONS = 8;
+    std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<PreProcessingContext>>>
+            mSessionMap GUARDED_BY(mMutex);
+};
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingTypes.h b/media/libeffects/preprocessing/aidl/PreProcessingTypes.h
new file mode 100644
index 0000000..4c2b8ba
--- /dev/null
+++ b/media/libeffects/preprocessing/aidl/PreProcessingTypes.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <optional>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_agc.h>
+#include <audio_effects/effect_agc2.h>
+#include <audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "effect-impl/EffectTypes.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+// Acoustic Echo Cancellation
+static const std::string kAcousticEchoCancelerEffectName = "Acoustic Echo Canceler";
+static const std::vector<Range::AcousticEchoCancelerRange> kAcousticEchoCancelerRanges = {
+        MAKE_RANGE(AcousticEchoCanceler, AcousticEchoCanceler::echoDelayUs, 0, 500)};
+static const Capability kAcousticEchoCancelerCap = {.range = kAcousticEchoCancelerRanges};
+static const Descriptor kAcousticEchoCancelerDesc = {
+        .common = {.id = {.type = getEffectTypeUuidAcousticEchoCanceler(),
+                          .uuid = getEffectImplUuidAcousticEchoCancelerSw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kAcousticEchoCancelerEffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = kAcousticEchoCancelerCap};
+
+// Automatic Gain Control 1
+static const std::string kAutomaticGainControlV1EffectName = "Automatic Gain Control V1";
+static const std::vector<Range::AutomaticGainControlV1Range> kAutomaticGainControlV1Ranges = {
+        MAKE_RANGE(AutomaticGainControlV1, AutomaticGainControlV1::targetPeakLevelDbFs, -3100, 0),
+        MAKE_RANGE(AutomaticGainControlV1, AutomaticGainControlV1::maxCompressionGainDb, 0, 9000)};
+static const Capability kAutomaticGainControlV1Cap = {.range = kAutomaticGainControlV1Ranges};
+static const Descriptor kAutomaticGainControlV1Desc = {
+        .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV1(),
+                          .uuid = getEffectImplUuidAutomaticGainControlV1Sw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kAutomaticGainControlV1EffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = kAutomaticGainControlV1Cap};
+
+// Automatic Gain Control 2
+static const std::string kAutomaticGainControlV2EffectName = "Automatic Gain Control V2";
+const std::vector<Range::AutomaticGainControlV2Range> kAutomaticGainControlV2Ranges = {
+        MAKE_RANGE(AutomaticGainControlV2, AutomaticGainControlV2::fixedDigitalGainMb, 0, 90),
+        // extra_staturation_margin_db is no longer configurable in webrtc
+        MAKE_RANGE(AutomaticGainControlV2, AutomaticGainControlV2::saturationMarginMb, 2, 2),
+        // WebRTC only supports RMS level estimator now
+        MAKE_RANGE(AutomaticGainControlV2, AutomaticGainControlV2::levelEstimator,
+                   AutomaticGainControlV2::LevelEstimator::RMS,
+                   AutomaticGainControlV2::LevelEstimator::RMS)};
+static const Capability kAutomaticGainControlV2Cap = {.range = kAutomaticGainControlV2Ranges};
+static const Descriptor kAutomaticGainControlV2Desc = {
+        .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV2(),
+                          .uuid = getEffectImplUuidAutomaticGainControlV2Sw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kAutomaticGainControlV2EffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = kAutomaticGainControlV2Cap};
+
+// Noise suppression
+static const std::string kNoiseSuppressionEffectName = "Noise Suppression";
+static const Descriptor kNoiseSuppressionDesc = {
+        .common = {.id = {.type = getEffectTypeUuidNoiseSuppression(),
+                          .uuid = getEffectImplUuidNoiseSuppressionSw(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
+                   .name = kNoiseSuppressionEffectName,
+                   .implementor = "The Android Open Source Project"}};
+
+enum class PreProcessingEffectType {
+    ACOUSTIC_ECHO_CANCELLATION,
+    AUTOMATIC_GAIN_CONTROL_V1,
+    AUTOMATIC_GAIN_CONTROL_V2,
+    NOISE_SUPPRESSION,
+};
+
+inline std::ostream& operator<<(std::ostream& out, const PreProcessingEffectType& type) {
+    switch (type) {
+        case PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION:
+            return out << kAcousticEchoCancelerEffectName;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1:
+            return out << kAutomaticGainControlV1EffectName;
+        case PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2:
+            return out << kAutomaticGainControlV2EffectName;
+        case PreProcessingEffectType::NOISE_SUPPRESSION:
+            return out << kNoiseSuppressionEffectName;
+    }
+    return out << "EnumPreProcessingEffectTypeError";
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
index 07006a1..8fdb864 100644
--- a/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/EffectPreprocessingTest.cpp
@@ -143,10 +143,6 @@
         const AGC2Params* agc2Params = &params->agc2Params;
         ASSERT_NO_FATAL_FAILURE(
                 effect.setParam(AGC2_PARAM_FIXED_DIGITAL_GAIN, agc2Params->fixedDigitalGain));
-        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
-                                                agc2Params->adaptDigiLevelEstimator));
-        ASSERT_NO_FATAL_FAILURE(effect.setParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
-                                                agc2Params->extraSaturationMargin));
     } else if (isAECEffect(uuid)) {
         const AECParams* aecParams = &params->aecParams;
         ASSERT_NO_FATAL_FAILURE(effect.setParam(AEC_PARAM_ECHO_DELAY, aecParams->echoDelay));
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
index 3bd93f8..03400d7 100644
--- a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -400,20 +400,6 @@
             ALOGE("Invalid AGC2 Fixed Digital Gain. Error %d\n", status);
             return EXIT_FAILURE;
         }
-        if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR,
-                                               (uint32_t)preProcCfgParams.agc2Level,
-                                               effectHandle[PREPROC_AGC2]);
-            status != 0) {
-            ALOGE("Invalid AGC2 Level Estimator. Error %d\n", status);
-            return EXIT_FAILURE;
-        }
-        if (int status = preProcSetConfigParam(AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN,
-                                               (float)preProcCfgParams.agc2SaturationMargin,
-                                               effectHandle[PREPROC_AGC2]);
-            status != 0) {
-            ALOGE("Invalid AGC2 Saturation Margin. Error %d\n", status);
-            return EXIT_FAILURE;
-        }
     }
     if (effectEn[PREPROC_NS]) {
         if (int status = preProcSetConfigParam(NS_PARAM_LEVEL, (uint32_t)preProcCfgParams.nsLevel,
diff --git a/media/libeffects/spatializer/benchmarks/Android.bp b/media/libeffects/spatializer/benchmarks/Android.bp
new file mode 100644
index 0000000..ab7e468
--- /dev/null
+++ b/media/libeffects/spatializer/benchmarks/Android.bp
@@ -0,0 +1,21 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_benchmark {
+    name: "spatializer_benchmark",
+    vendor: true,
+    srcs: ["spatializer_benchmark.cpp"],
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+    ],
+    header_libs: [
+        "libhardware_headers",
+    ],
+}
diff --git a/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp b/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp
new file mode 100644
index 0000000..e8ac480
--- /dev/null
+++ b/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <dlfcn.h>
+#include <random>
+#include <vector>
+
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = [] {
+    audio_effect_library_t symbol{};
+    void* effectLib = dlopen("libspatialaudio.so", RTLD_NOW);
+    if (effectLib) {
+        audio_effect_library_t* effectInterface =
+                (audio_effect_library_t*)dlsym(effectLib, AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
+        if (effectInterface == nullptr) {
+            ALOGE("dlsym failed: %s", dlerror());
+            exit(-1);
+        }
+        symbol = (audio_effect_library_t)(*effectInterface);
+    } else {
+        ALOGE("dlopen failed: %s", dlerror());
+        exit(-1);
+    }
+    return symbol;
+}();
+
+// channel masks
+constexpr int kInputChMask = AUDIO_CHANNEL_OUT_5POINT1;
+
+// sampleRates
+constexpr size_t kSampleRates[] = {
+        44100,
+        48000,
+        96000,
+};
+constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+// duration in ms
+constexpr size_t kDurations[] = {2, 5, 10};
+constexpr size_t kNumDurations = std::size(kDurations);
+
+// effect uuids
+constexpr effect_uuid_t kEffectUuid = {
+        0xcc4677de, 0xff72, 0x11eb, 0x9a03, {0x02, 0x42, 0xac, 0x13, 0x00, 0x03}};
+
+constexpr float kMinAmplitude = -1.0f;
+constexpr float kMaxAmplitude = 1.0f;
+
+/*******************************************************************
+ * A test result running on Pixel 5 for comparison.
+ * The first parameter indicates the sample rate.
+ * 0: 44100, 1: 48000, 2: 96000
+ * The second parameter indicates the duration in ms.
+ * 0: 2, 1: 5, 2: 10
+ * -------------------------------------------------------------
+ * Benchmark                   Time             CPU   Iterations
+ * -------------------------------------------------------------
+ * BM_SPATIALIZER/0/0     739848 ns       738497 ns          934
+ * BM_SPATIALIZER/0/1    1250503 ns      1248337 ns          480
+ * BM_SPATIALIZER/0/2    2094092 ns      2090092 ns          310
+ * BM_SPATIALIZER/1/0     783114 ns       781626 ns          683
+ * BM_SPATIALIZER/1/1    1332951 ns      1330473 ns          452
+ * BM_SPATIALIZER/1/2    2258313 ns      2254022 ns          289
+ * BM_SPATIALIZER/2/0    1210332 ns      1207957 ns          477
+ * BM_SPATIALIZER/2/1    2356259 ns      2351764 ns          269
+ * BM_SPATIALIZER/2/2    4267814 ns      4259567 ns          155
+ *******************************************************************/
+
+static void BM_SPATIALIZER(benchmark::State& state) {
+    const size_t sampleRate = kSampleRates[state.range(0)];
+    const size_t durationMs = kDurations[state.range(1)];
+    const size_t frameCount = durationMs * sampleRate / 1000;
+    const size_t inputChannelCount = audio_channel_count_from_out_mask(kInputChMask);
+    const size_t outputChannelCount = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::minstd_rand gen(kInputChMask);
+    std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
+    std::vector<float> input(frameCount * inputChannelCount);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+
+    effect_handle_t effectHandle = nullptr;
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kEffectUuid, 1 /* sessionId */,
+                                                                 1 /* ioId */, &effectHandle);
+        status != 0) {
+        ALOGE("create_effect returned an error = %d\n", status);
+        return;
+    }
+
+    effect_config_t config{};
+    config.inputCfg.samplingRate = config.outputCfg.samplingRate = sampleRate;
+    config.inputCfg.channels = kInputChMask;
+    config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+    config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+    int reply = 0;
+    uint32_t replySize = sizeof(reply);
+    if (int status = (*effectHandle)
+                             ->command(effectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+                                       &config, &replySize, &reply);
+        status != 0) {
+        ALOGE("command returned an error = %d\n", status);
+        return;
+    }
+
+    if (int status = (*effectHandle)
+                             ->command(effectHandle, EFFECT_CMD_ENABLE, sizeof(effect_config_t),
+                                       &config, &replySize, &reply);
+        status != 0) {
+        ALOGE("command returned an error = %d\n", status);
+        return;
+    }
+
+    // Run the test
+    std::vector<float> output(frameCount * outputChannelCount);
+    for (auto _ : state) {
+        benchmark::DoNotOptimize(input.data());
+        benchmark::DoNotOptimize(output.data());
+
+        audio_buffer_t inBuffer = {.frameCount = frameCount, .f32 = input.data()};
+        audio_buffer_t outBuffer = {.frameCount = frameCount, .f32 = output.data()};
+        (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+
+        benchmark::ClobberMemory();
+    }
+
+    state.SetComplexityN(frameCount);
+
+    if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+        ALOGE("release_effect returned an error = %d\n", status);
+        return;
+    }
+}
+
+static void SPATIALIZERArgs(benchmark::internal::Benchmark* b) {
+    for (int i = 0; i < kNumSampleRates; i++) {
+        for (int j = 0; j < kNumDurations; ++j) {
+            b->Args({i, j});
+        }
+    }
+}
+
+BENCHMARK(BM_SPATIALIZER)->Apply(SPATIALIZERArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
new file mode 100644
index 0000000..704e873
--- /dev/null
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -0,0 +1,21 @@
+// Build the unit tests for spatializer effect
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "SpatializerTest",
+    defaults: [
+      "libeffects-test-defaults",
+    ],
+    host_supported: false,
+    srcs: [
+        "SpatializerTest.cpp",
+    ],
+}
diff --git a/media/libeffects/spatializer/tests/SpatializerTest.cpp b/media/libeffects/spatializer/tests/SpatializerTest.cpp
new file mode 100644
index 0000000..110fbb1
--- /dev/null
+++ b/media/libeffects/spatializer/tests/SpatializerTest.cpp
@@ -0,0 +1,300 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SpatializerTest"
+
+#include <system/audio_effects/effect_spatializer.h>
+#include "EffectTestHelper.h"
+
+using namespace android;
+
+// relying on dlsym to fill the interface context
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = [] {
+    audio_effect_library_t symbol{};
+    void* effectLib = dlopen("libspatialaudio.so", RTLD_NOW);
+    if (effectLib) {
+        audio_effect_library_t* effectInterface =
+                (audio_effect_library_t*)dlsym(effectLib, AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
+        if (effectInterface == nullptr) {
+            ALOGE("dlsym failed: %s", dlerror());
+            exit(-1);
+        }
+        symbol = (audio_effect_library_t)(*effectInterface);
+    } else {
+        ALOGE("dlopen failed: %s", dlerror());
+        exit(-1);
+    }
+    return symbol;
+}();
+
+// channel masks
+constexpr audio_channel_mask_t kSpatializerChMasks[] = {
+        AUDIO_CHANNEL_OUT_5POINT1,
+};
+constexpr size_t kNumSpatializerChMasks = std::size(kSpatializerChMasks);
+
+// sampleRates
+// TODO(b/234170025): Add all sampling rates once they are handled by spatializer
+constexpr int kSpatializerSampleRates[] = {44100, 48000, 96000};
+constexpr size_t kNumSpatializerSampleRates = std::size(kSpatializerSampleRates);
+
+// frame counts
+// TODO(b/234620538): Add sizes smaller than 80 once they are handled by spatializer
+constexpr size_t kSpatializerFrameCounts[] = {4800, 1920, 480, 80};
+constexpr size_t kNumSpatializerFrameCounts = std::size(kSpatializerFrameCounts);
+
+// effect uuids
+constexpr effect_uuid_t kSpatializerEffectUuids[] = {
+        {0xcc4677de, 0xff72, 0x11eb, 0x9a03, {0x02, 0x42, 0xac, 0x13, 0x00, 0x03}},
+};
+const size_t kNumSpatializerEffectUuids = std::size(kSpatializerEffectUuids);
+
+constexpr float kMinAmplitude = -1.0f;
+constexpr float kMaxAmplitude = 1.0f;
+constexpr float kSNRThreshold = 100.0f;
+constexpr size_t kNumBufferSplits = 2;
+
+using SingleEffectTestParam = std::tuple<int, int, int, int, int>;
+
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+  public:
+    SingleEffectTest()
+        : mInputChMask(kSpatializerChMasks[std::get<0>(GetParam())]),
+          mInputChannelCount(audio_channel_count_from_out_mask(mInputChMask)),
+          mOutputChMask(AUDIO_CHANNEL_OUT_STEREO),
+          mOutputChannelCount(audio_channel_count_from_out_mask(mOutputChMask)),
+          mSampleRate(kSpatializerSampleRates[std::get<1>(GetParam())]),
+          mFrameCount(kSpatializerFrameCounts[std::get<2>(GetParam())]),
+          mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+          mTotalFrameCount(mFrameCount * mLoopCount),
+          mUuid(&kSpatializerEffectUuids[std::get<4>(GetParam())]) {}
+    void SetUp() override {
+        ASSERT_EQ(AUDIO_EFFECT_LIBRARY_TAG, AUDIO_EFFECT_LIBRARY_INFO_SYM.tag)
+                << "Invalid effect tag";
+    }
+    const size_t mInputChMask;
+    const size_t mInputChannelCount;
+    const size_t mOutputChMask;
+    const size_t mOutputChannelCount;
+    const size_t mSampleRate;
+    const size_t mFrameCount;
+    const size_t mLoopCount;
+    const size_t mTotalFrameCount;
+    const effect_uuid_t* mUuid;
+};
+
+// Test basic spatializer functionality (does not crash) for various combinations of sampling
+// rates, channel masks and frame counts.
+TEST_P(SingleEffectTest, SimpleProcess) {
+    SCOPED_TRACE(testing::Message()
+                 << "chMask: " << mInputChMask << " sampleRate: " << mSampleRate);
+
+    EffectTestHelper effect(mUuid, mInputChMask, mOutputChMask, mSampleRate, mFrameCount,
+                            mLoopCount);
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::vector<float> input(mTotalFrameCount * mInputChannelCount);
+    std::vector<float> output(mTotalFrameCount * mOutputChannelCount);
+    std::minstd_rand gen(mInputChMask);
+    std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+    ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data()));
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(SpatializerTest, SingleEffectTest,
+                         ::testing::Combine(::testing::Range(0, (int)kNumSpatializerChMasks),
+                                            ::testing::Range(0, (int)kNumSpatializerSampleRates),
+                                            ::testing::Range(0, (int)kNumSpatializerFrameCounts),
+                                            ::testing::Range(0,
+                                                             (int)EffectTestHelper::kNumLoopCounts),
+                                            ::testing::Range(0, (int)kNumSpatializerEffectUuids)));
+
+using SingleEffectComparisonTestParam = std::tuple<int, int, int>;
+
+class SingleEffectComparisonTest
+    : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+  public:
+    SingleEffectComparisonTest()
+        : mInputChMask(kSpatializerChMasks[std::get<0>(GetParam())]),
+          mInputChannelCount(audio_channel_count_from_out_mask(mInputChMask)),
+          mOutputChMask(AUDIO_CHANNEL_OUT_STEREO),
+          mOutputChannelCount(audio_channel_count_from_out_mask(mOutputChMask)),
+          mSampleRate(kSpatializerSampleRates[std::get<1>(GetParam())]),
+          mUuid(&kSpatializerEffectUuids[std::get<2>(GetParam())]) {}
+
+    const size_t mInputChMask;
+    const size_t mInputChannelCount;
+    const size_t mOutputChMask;
+    const size_t mOutputChannelCount;
+    const size_t mSampleRate;
+    const effect_uuid_t* mUuid;
+};
+
+// Ensure that effect produces similar output when an input is fed in a single call
+// or called multiples times with buffer split into smaller parts
+
+// TODO(b/234619903): This is currently disabled as output from the spatializer has
+// an algorithm delay that varies with frame count and hence makes it tricky to
+// compare output from two cases with different frame counts.
+// Feed valid input to spatializer and dump the output to verify spatializer is being
+// correctly initialized and once that is verified, enable the following
+TEST_P(SingleEffectComparisonTest, DISABLED_SimpleProcess) {
+    SCOPED_TRACE(testing::Message()
+                 << "chMask: " << mInputChMask << " sampleRate: " << mSampleRate);
+    int testDurationMs = 20; // 20 ms
+    int testFrameCount = (mSampleRate * testDurationMs) / 1000;
+    int totalFrameCount = testFrameCount * kNumBufferSplits;
+    size_t totalInSamples = totalFrameCount * mInputChannelCount;
+    size_t totalOutSamples = totalFrameCount * mOutputChannelCount;
+    std::vector<float> input(totalInSamples);
+    std::vector<float> outRef(totalOutSamples);
+    std::vector<float> outTest(totalOutSamples);
+
+    // Initialize input buffer with deterministic pseudo-random values
+    std::minstd_rand gen(mInputChMask);
+    std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
+    for (auto& in : input) {
+        in = dis(gen);
+    }
+
+    EffectTestHelper refEffect(mUuid, mInputChMask, mOutputChMask, mSampleRate, totalFrameCount, 1);
+    ASSERT_NO_FATAL_FAILURE(refEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(refEffect.setConfig());
+    ASSERT_NO_FATAL_FAILURE(refEffect.process(input.data(), outRef.data()));
+    ASSERT_NO_FATAL_FAILURE(refEffect.releaseEffect());
+
+    EffectTestHelper testEffect(mUuid, mInputChMask, mOutputChMask, mSampleRate,
+                                totalFrameCount / kNumBufferSplits, kNumBufferSplits);
+    ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+    ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+    ASSERT_NO_FATAL_FAILURE(testEffect.process(input.data(), outTest.data()));
+    ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+    float snr = computeSnr(outTest.data(), outRef.data(), totalOutSamples);
+    ASSERT_GT(snr, kSNRThreshold) << "SNR between reference and test output " << snr
+                                  << " is lower than required " << kSNRThreshold;
+}
+
+INSTANTIATE_TEST_SUITE_P(SpatializerTest, SingleEffectComparisonTest,
+                         ::testing::Combine(::testing::Range(0, (int)kNumSpatializerChMasks),
+                                            ::testing::Range(0, (int)kNumSpatializerSampleRates),
+                                            ::testing::Range(0, (int)kNumSpatializerEffectUuids)));
+
+// This test checks if get/set Spatializer effect params are in accordance with documentation. The
+// test doesn't validate the functionality of the params configured. It only checks the return
+// status of API calls.
+TEST(ParameterTests, CheckParameterSupport) {
+    EffectTestHelper effect(&kSpatializerEffectUuids[0], kSpatializerChMasks[0],
+                            AUDIO_CHANNEL_OUT_STEREO, kSpatializerSampleRates[0],
+                            kSpatializerFrameCounts[0], EffectTestHelper::kLoopCounts[0]);
+    ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+
+    // capture list of channel masks supported
+    std::vector<audio_channel_mask_t> channelMasks;
+    int status = effect.getParam<true>(SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS, channelMasks);
+    EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+    if (!status) {
+        EXPECT_EQ(1, channelMasks.size());
+        EXPECT_EQ(AUDIO_CHANNEL_OUT_5POINT1, channelMasks[0]);
+    }
+
+    // capture list of spatialization levels supported
+    std::vector<int8_t> spatializationLevels;
+    status = effect.getParam<true>(SPATIALIZER_PARAM_SUPPORTED_LEVELS, spatializationLevels);
+    EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+    if (!status) {
+        EXPECT_EQ(1, spatializationLevels.size());
+        EXPECT_EQ(SPATIALIZATION_LEVEL_MULTICHANNEL, spatializationLevels[0]);
+    }
+
+    // capture list of spatialization modes supported
+    std::vector<int8_t> spatializationModes;
+    status = effect.getParam<true>(SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
+                                   spatializationModes);
+    EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+    if (!status) {
+        EXPECT_EQ(1, spatializationModes.size());
+        EXPECT_EQ(SPATIALIZATION_MODE_BINAURAL, spatializationModes[0]);
+    }
+
+    // check if head tracking is supported
+    std::vector<int8_t> headTracking;
+    status = effect.getParam<false>(SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED, headTracking);
+    EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+    if (!status) {
+        EXPECT_EQ(1, headTracking.size());
+        EXPECT_EQ(true, headTracking[0]);
+    }
+
+    // verify spatialization level setting
+    std::vector<int8_t> level;
+    status = effect.getParam<false>(SPATIALIZER_PARAM_LEVEL, level);
+    EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+    if (!status) {
+        EXPECT_EQ(1, level.size());
+        EXPECT_EQ(SPATIALIZATION_LEVEL_NONE, level[0]);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+
+    status = effect.getParam<false>(SPATIALIZER_PARAM_LEVEL, level);
+    EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+    if (!status) {
+        EXPECT_EQ(1, level.size());
+        EXPECT_EQ(SPATIALIZATION_LEVEL_MULTICHANNEL, level[0]);
+    }
+
+    // try setting unsupported parameters
+    level.clear();
+    level.push_back(SPATIALIZATION_LEVEL_MCHAN_BED_PLUS_OBJECTS);
+    ASSERT_EQ(1, level.size());
+    EXPECT_NE(0, effect.setParam(SPATIALIZER_PARAM_LEVEL, level));
+
+    // Ensure that unsupported level isn't set by above setParam
+    status = effect.getParam<false>(SPATIALIZER_PARAM_LEVEL, level);
+    EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+    if (!status) {
+        EXPECT_EQ(1, level.size());
+        EXPECT_EQ(SPATIALIZATION_LEVEL_MULTICHANNEL, level[0]);
+    }
+
+    std::vector<float> hingeAngle = {3.1415f};
+    ASSERT_EQ(1, hingeAngle.size());
+    EXPECT_NE(0, effect.setParam(SPATIALIZER_PARAM_HINGE_ANGLE, hingeAngle));
+
+    std::vector<int8_t> headTrackingMode = {2};  // RELATIVE_WORLD
+    ASSERT_EQ(1, headTrackingMode.size());
+    EXPECT_NE(0, effect.setParam(SPATIALIZER_PARAM_HEADTRACKING_MODE, headTrackingMode));
+
+    // try setting supported parameters
+    std::vector<float> vectorFloat = {0.1, 0.2, 0.15, 0.04, 2.23, 3.14};
+    ASSERT_EQ(6, vectorFloat.size());
+    EXPECT_EQ(0, effect.setParam(SPATIALIZER_PARAM_HEAD_TO_STAGE, vectorFloat));
+
+    ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGD("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/libeffects/tests/common/Android.bp b/media/libeffects/tests/common/Android.bp
new file mode 100644
index 0000000..73179fb
--- /dev/null
+++ b/media/libeffects/tests/common/Android.bp
@@ -0,0 +1,45 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+filegroup {
+    name: "libeffects-test-helper-srcs",
+    srcs: [
+        "EffectTestHelper.cpp",
+    ],
+}
+
+cc_library_headers {
+    name: "libeffects-test-helper-headers",
+    vendor: true,
+    host_supported: true,
+    export_include_dirs: [
+        ".",
+    ],
+}
+
+cc_defaults {
+    name: "libeffects-test-defaults",
+    vendor: true,
+    gtest: true,
+    host_supported: true,
+    test_suites: ["device-tests"],
+    static_libs: [
+        "libaudioutils",
+    ],
+    srcs: [
+        ":libeffects-test-helper-srcs",
+    ],
+    header_libs: [
+        "libeffects-test-helper-headers",
+        "libhardware_headers",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+}
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.cpp b/media/libeffects/tests/common/EffectTestHelper.cpp
similarity index 97%
rename from media/libeffects/lvm/tests/EffectTestHelper.cpp
rename to media/libeffects/tests/common/EffectTestHelper.cpp
index ec727c7..db085ba 100644
--- a/media/libeffects/lvm/tests/EffectTestHelper.cpp
+++ b/media/libeffects/tests/common/EffectTestHelper.cpp
@@ -15,7 +15,6 @@
  */
 
 #include "EffectTestHelper.h"
-extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
 
 namespace android {
 
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.h b/media/libeffects/tests/common/EffectTestHelper.h
similarity index 69%
rename from media/libeffects/lvm/tests/EffectTestHelper.h
rename to media/libeffects/tests/common/EffectTestHelper.h
index bcee84e..c99e27a 100644
--- a/media/libeffects/lvm/tests/EffectTestHelper.h
+++ b/media/libeffects/tests/common/EffectTestHelper.h
@@ -21,6 +21,7 @@
 #include <audio_utils/primitives.h>
 #include <climits>
 #include <cstdlib>
+#include <dlfcn.h>
 #include <gtest/gtest.h>
 #include <hardware/audio_effect.h>
 #include <log/log.h>
@@ -29,7 +30,9 @@
 #include <system/audio.h>
 #include <vector>
 
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
 namespace android {
+
 template <typename T>
 static float computeSnr(const T* ref, const T* tst, size_t count) {
     double signal{};
@@ -82,6 +85,7 @@
     void createEffect();
     void releaseEffect();
     void setConfig();
+
     template <typename VALUE_DTYPE>
     void setParam(uint32_t type, VALUE_DTYPE const value) {
         int reply = 0;
@@ -101,6 +105,76 @@
         ASSERT_EQ(status, 0) << "set_param returned an error " << status;
         ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
     };
+
+    template <bool MULTI_VALUES, typename T>
+    int32_t getParam(uint32_t type, std::vector<T>& values) {
+        const int kMaxEffectParamValues = 10;
+        uint32_t cmd[sizeof(effect_param_t) / sizeof(uint32_t) + 1];
+        uint32_t reply[sizeof(effect_param_t) / sizeof(uint32_t) + 1 + 1 + kMaxEffectParamValues];
+
+        effect_param_t* p = (effect_param_t*)cmd;
+        p->psize = sizeof(uint32_t);
+        if (MULTI_VALUES) {
+            p->vsize = (kMaxEffectParamValues + 1) * sizeof(T);
+        } else {
+            p->vsize = sizeof(T);
+        }
+        *(uint32_t*)p->data = type;
+        uint32_t replySize = sizeof(effect_param_t) + p->psize + p->vsize;
+
+        int32_t status = (*mEffectHandle)
+                                 ->command(mEffectHandle, EFFECT_CMD_GET_PARAM,
+                                           sizeof(effect_param_t) + sizeof(uint32_t), cmd,
+                                           &replySize, reply);
+        if (status) {
+            return status;
+        }
+        if (p->status) {
+            return p->status;
+        }
+        if (replySize <
+            sizeof(effect_param_t) + sizeof(uint32_t) + (MULTI_VALUES ? 2 : 1) * sizeof(T)) {
+            return -EINVAL;
+        }
+
+        T* params = (T*)((uint8_t*)reply + sizeof(effect_param_t) + sizeof(uint32_t));
+        int numParams = 1;
+        if (MULTI_VALUES) {
+            numParams = (int)*params++;
+        }
+        if (numParams > kMaxEffectParamValues) {
+            return -EINVAL;
+        }
+        values.clear();
+        std::copy(&params[0], &params[numParams], back_inserter(values));
+        return 0;
+    }
+
+    template <typename T>
+    int setParam(uint32_t type, const std::vector<T>& values) {
+        int reply = 0;
+        uint32_t replySize = sizeof(reply);
+
+        uint32_t cmd[sizeof(effect_param_t) / sizeof(uint32_t) + 1 + values.size()];
+        effect_param_t* p = (effect_param_t*)cmd;
+        p->psize = sizeof(uint32_t);
+        p->vsize = sizeof(T) * values.size();
+        *(uint32_t*)p->data = type;
+        memcpy((uint32_t*)p->data + 1, values.data(), sizeof(T) * values.size());
+
+        int status = (*mEffectHandle)
+                             ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+                                       sizeof(effect_param_t) + p->psize + p->vsize, p, &replySize,
+                                       &reply);
+        if (status) {
+            return status;
+        }
+        if (reply) {
+            return reply;
+        }
+        return 0;
+    }
+
     void process(float* input, float* output);
 
     // Corresponds to SNR for 1 bit difference between two int16_t signals
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index 8dd6789..cf782f7 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -18,34 +18,60 @@
     ],
 }
 
-cc_library_shared {
-    name: "libvisualizer",
-
+cc_defaults {
+    name: "visualizer_defaults",
     vendor: true,
-
-    srcs: [
-        "EffectVisualizer.cpp",
-    ],
-
     cflags: [
-        "-O2",
-        "-fvisibility=hidden",
-
-        "-DBUILD_FLOAT",
+        "-DBUILD_FLOAT", // TODO: remove BUILD_FLOAT and SUPPORT_MC in lvm libs
         "-DSUPPORT_MC",
-
         "-Wall",
         "-Werror",
     ],
-
     shared_libs: [
         "liblog",
     ],
-
-    relative_install_path: "soundfx",
-
     header_libs: [
         "libaudioeffects",
         "libaudioutils_headers",
     ],
 }
+
+cc_library_shared {
+    name: "libvisualizer",
+    defaults: [
+        "visualizer_defaults",
+    ],
+    srcs: [
+        "EffectVisualizer.cpp",
+    ],
+    relative_install_path: "soundfx",
+    cflags: [
+        "-O2",
+        "-fvisibility=hidden",
+    ],
+}
+
+cc_library_shared {
+    name: "libvisualizeraidl",
+    srcs: [
+        "aidl/Visualizer.cpp",
+        "aidl/VisualizerContext.cpp",
+        ":effectCommonFile",
+    ],
+    defaults: [
+        "aidlaudioeffectservice_defaults",
+        "latest_android_hardware_audio_effect_ndk_shared",
+        "latest_android_media_audio_common_types_ndk_shared",
+        "visualizer_defaults",
+    ],
+    cflags: [
+        "-Wthread-safety",
+    ],
+    shared_libs: [
+        "libcutils",
+    ],
+    relative_install_path: "soundfx",
+    visibility: [
+        "//hardware/interfaces/audio/aidl/default",
+    ],
+}
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
new file mode 100644
index 0000000..53bfb41
--- /dev/null
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -0,0 +1,237 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AHAL_VisualizerLibEffects"
+
+#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "Visualizer.h"
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVisualizer;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::hardware::audio::effect::VisualizerImpl;
+using aidl::android::media::audio::common::AudioUuid;
+
+extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
+                                           std::shared_ptr<IEffect>* instanceSpp) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizer()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    if (instanceSpp) {
+        *instanceSpp = ndk::SharedRefBase::make<VisualizerImpl>();
+        LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+        return EX_NONE;
+    } else {
+        LOG(ERROR) << __func__ << " invalid input parameter!";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+    if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizer()) {
+        LOG(ERROR) << __func__ << "uuid not supported";
+        return EX_ILLEGAL_ARGUMENT;
+    }
+    *_aidl_return = VisualizerImpl::kDescriptor;
+    return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+const std::string VisualizerImpl::kEffectName = "Visualizer";
+const std::vector<Range::VisualizerRange> VisualizerImpl::kRanges = {
+        MAKE_RANGE(Visualizer, latencyMs, 0, VisualizerContext::kMaxLatencyMs),
+        MAKE_RANGE(Visualizer, captureSamples, 0, VisualizerContext::kMaxCaptureBufSize),
+        /* get only parameters, set invalid range (min > max) to indicate not support set */
+        MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.peak = 1, .rms = 1}),
+                   Visualizer::Measurement({.peak = 0, .rms = 0})),
+        MAKE_RANGE(Visualizer, captureSampleBuffer, std::vector<uint8_t>({1}),
+                   std::vector<uint8_t>({0}))};
+const Capability VisualizerImpl::kCapability = {
+        .range = Range::make<Range::visualizer>(VisualizerImpl::kRanges)};
+const Descriptor VisualizerImpl::kDescriptor = {
+        .common = {.id = {.type = getEffectTypeUuidVisualizer(),
+                          .uuid = getEffectImplUuidVisualizer(),
+                          .proxy = std::nullopt},
+                   .flags = {.type = Flags::Type::INSERT,
+                             .insert = Flags::Insert::LAST,
+                             .volume = Flags::Volume::CTRL},
+                   .name = VisualizerImpl::kEffectName,
+                   .implementor = "The Android Open Source Project"},
+        .capability = VisualizerImpl::kCapability};
+
+ndk::ScopedAStatus VisualizerImpl::getDescriptor(Descriptor* _aidl_return) {
+    RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+    LOG(DEBUG) << __func__ << kDescriptor.toString();
+    *_aidl_return = kDescriptor;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus VisualizerImpl::commandImpl(CommandId command) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+    switch (command) {
+        case CommandId::START:
+            mContext->enable();
+            break;
+        case CommandId::STOP:
+            mContext->disable();
+            break;
+        case CommandId::RESET:
+            mContext->disable();
+            mContext->resetBuffer();
+            break;
+        default:
+            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "commandIdNotSupported");
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus VisualizerImpl::setParameterSpecific(const Parameter::Specific& specific) {
+    RETURN_IF(Parameter::Specific::visualizer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
+              "EffectNotSupported");
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    auto& param = specific.get<Parameter::Specific::visualizer>();
+    RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
+    const auto tag = param.getTag();
+    switch (tag) {
+        case Visualizer::captureSamples: {
+            RETURN_IF(mContext->setCaptureSamples(param.get<Visualizer::captureSamples>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setCaptureSizeFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case Visualizer::scalingMode: {
+            RETURN_IF(mContext->setScalingMode(param.get<Visualizer::scalingMode>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setScalingModeFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case Visualizer::measurementMode: {
+            RETURN_IF(mContext->setMeasurementMode(param.get<Visualizer::measurementMode>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setMeasurementModeFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        case Visualizer::latencyMs: {
+            RETURN_IF(mContext->setDownstreamLatency(param.get<Visualizer::latencyMs>()) !=
+                              RetCode::SUCCESS,
+                      EX_ILLEGAL_ARGUMENT, "setLatencyFailed");
+            return ndk::ScopedAStatus::ok();
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "VisualizerTagNotSupported");
+        }
+    }
+}
+
+ndk::ScopedAStatus VisualizerImpl::getParameterSpecific(const Parameter::Id& id,
+                                                        Parameter::Specific* specific) {
+    RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+    auto tag = id.getTag();
+    RETURN_IF(Parameter::Id::visualizerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
+    auto specificId = id.get<Parameter::Id::visualizerTag>();
+    auto specificTag = specificId.getTag();
+    switch (specificTag) {
+        case Visualizer::Id::commonTag: {
+            return getParameterVisualizer(specificId.get<Visualizer::Id::commonTag>(), specific);
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(specificTag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+                                                                    "VisualizerTagNotSupported");
+        }
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus VisualizerImpl::getParameterVisualizer(const Visualizer::Tag& tag,
+                                                          Parameter::Specific* specific) {
+    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+    Visualizer param;
+    switch (tag) {
+        case Visualizer::captureSamples: {
+            param.set<Visualizer::captureSamples>(mContext->getCaptureSamples());
+            break;
+        }
+        case Visualizer::scalingMode: {
+            param.set<Visualizer::scalingMode>(mContext->getScalingMode());
+            break;
+        }
+        case Visualizer::measurementMode: {
+            param.set<Visualizer::measurementMode>(mContext->getMeasurementMode());
+            break;
+        }
+        case Visualizer::measurement: {
+            param.set<Visualizer::measurement>(mContext->getMeasure());
+            break;
+        }
+        case Visualizer::captureSampleBuffer: {
+            param.set<Visualizer::captureSampleBuffer>(mContext->capture());
+            break;
+        }
+        case Visualizer::latencyMs: {
+            param.set<Visualizer::latencyMs>(mContext->getDownstreamLatency());
+            break;
+        }
+        default: {
+            LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+                    EX_ILLEGAL_ARGUMENT, "VisualizerTagNotSupported");
+        }
+    }
+
+    specific->set<Parameter::Specific::visualizer>(param);
+    return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> VisualizerImpl::createContext(const Parameter::Common& common) {
+    if (mContext) {
+        LOG(DEBUG) << __func__ << " context already exist";
+        return mContext;
+    }
+
+    mContext = std::make_shared<VisualizerContext>(1 /* statusFmqDepth */, common);
+    mContext->initParams(common);
+    return mContext;
+}
+
+RetCode VisualizerImpl::releaseContext() {
+    if (mContext) {
+        mContext->disable();
+        mContext->resetBuffer();
+    }
+    return RetCode::SUCCESS;
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status VisualizerImpl::effectProcessImpl(float* in, float* out, int samples) {
+    IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+    RETURN_VALUE_IF(!mContext, status, "nullContext");
+    return mContext->process(in, out, samples);
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
new file mode 100644
index 0000000..ec725db
--- /dev/null
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "effect-impl/EffectImpl.h"
+
+#include "VisualizerContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class VisualizerImpl final : public EffectImpl {
+  public:
+    static const std::string kEffectName;
+    static const Capability kCapability;
+    static const Descriptor kDescriptor;
+    VisualizerImpl() { LOG(DEBUG) << __func__; }
+    ~VisualizerImpl() {
+        cleanUp();
+        LOG(DEBUG) << __func__;
+    }
+
+    ndk::ScopedAStatus commandImpl(CommandId command) override;
+    ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+    ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+    ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+                                            Parameter::Specific* specific) override;
+    IEffect::Status effectProcessImpl(float* in, float* out, int process) override;
+    std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+    RetCode releaseContext() override;
+
+    std::shared_ptr<EffectContext> getContext() override { return mContext; }
+    std::string getEffectName() override { return kEffectName; }
+
+  private:
+    static const std::vector<Range::VisualizerRange> kRanges;
+    std::shared_ptr<VisualizerContext> mContext;
+    ndk::ScopedAStatus getParameterVisualizer(const Visualizer::Tag& tag,
+                                                    Parameter::Specific* specific);
+};
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
new file mode 100644
index 0000000..5d0d08d
--- /dev/null
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -0,0 +1,333 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "VisualizerContext.h"
+
+#include <algorithm>
+#include <math.h>
+#include <time.h>
+
+#include <android/binder_status.h>
+#include <audio_utils/primitives.h>
+#include <system/audio.h>
+#include <Utils.h>
+
+#ifndef BUILD_FLOAT
+        #error AIDL Visualizer only support float 32bits, make sure add cflags -DBUILD_FLOAT,
+#endif
+
+using aidl::android::hardware::audio::common::getChannelCount;
+
+namespace aidl::android::hardware::audio::effect {
+
+VisualizerContext::VisualizerContext(int statusDepth, const Parameter::Common& common)
+    : EffectContext(statusDepth, common) {
+}
+
+VisualizerContext::~VisualizerContext() {
+    std::lock_guard lg(mMutex);
+    LOG(DEBUG) << __func__;
+    mState = State::UNINITIALIZED;
+}
+
+RetCode VisualizerContext::initParams(const Parameter::Common& common) {
+    std::lock_guard lg(mMutex);
+    LOG(DEBUG) << __func__;
+    if (common.input != common.output) {
+        LOG(ERROR) << __func__ << " mismatch input: " << common.input.toString()
+                   << " and output: " << common.output.toString();
+        return RetCode::ERROR_ILLEGAL_PARAMETER;
+    }
+
+    mState = State::INITIALIZED;
+    auto channelCount = getChannelCount(common.input.base.channelMask);
+#ifdef SUPPORT_MC
+    if (channelCount < 1 || channelCount > FCC_LIMIT) return RetCode::ERROR_ILLEGAL_PARAMETER;
+#else
+    if (channelCount != FCC_2) return RetCode::ERROR_ILLEGAL_PARAMETER;
+#endif
+    mChannelCount = channelCount;
+    mCommon = common;
+    return RetCode::SUCCESS;
+}
+
+RetCode VisualizerContext::enable() {
+    std::lock_guard lg(mMutex);
+    if (mState != State::INITIALIZED) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = State::ACTIVE;
+    return RetCode::SUCCESS;
+}
+
+RetCode VisualizerContext::disable() {
+    std::lock_guard lg(mMutex);
+    if (mState != State::ACTIVE) {
+        return RetCode::ERROR_EFFECT_LIB_ERROR;
+    }
+    mState = State::INITIALIZED;
+    return RetCode::SUCCESS;
+}
+
+void VisualizerContext::reset() {
+    std::lock_guard lg(mMutex);
+    std::fill_n(mCaptureBuf.begin(), kMaxCaptureBufSize, 0x80);
+}
+
+RetCode VisualizerContext::setCaptureSamples(int samples) {
+    std::lock_guard lg(mMutex);
+    mCaptureSamples = samples;
+    return RetCode::SUCCESS;
+}
+int VisualizerContext::getCaptureSamples() {
+    std::lock_guard lg(mMutex);
+    return mCaptureSamples;
+}
+
+RetCode VisualizerContext::setMeasurementMode(Visualizer::MeasurementMode mode) {
+    std::lock_guard lg(mMutex);
+    mMeasurementMode = mode;
+    return RetCode::SUCCESS;
+}
+Visualizer::MeasurementMode VisualizerContext::getMeasurementMode() {
+    std::lock_guard lg(mMutex);
+    return mMeasurementMode;
+}
+
+RetCode VisualizerContext::setScalingMode(Visualizer::ScalingMode mode) {
+    std::lock_guard lg(mMutex);
+    mScalingMode = mode;
+    return RetCode::SUCCESS;
+}
+Visualizer::ScalingMode VisualizerContext::getScalingMode() {
+    std::lock_guard lg(mMutex);
+    return mScalingMode;
+}
+
+RetCode VisualizerContext::setDownstreamLatency(int latency) {
+    std::lock_guard lg(mMutex);
+    mDownstreamLatency = latency;
+    return RetCode::SUCCESS;
+}
+
+int VisualizerContext::getDownstreamLatency() {
+    std::lock_guard lg(mMutex);
+    return mDownstreamLatency;
+}
+
+uint32_t VisualizerContext::getDeltaTimeMsFromUpdatedTime_l() {
+    uint32_t deltaMs = 0;
+    if (mBufferUpdateTime.tv_sec != 0) {
+        struct timespec ts;
+        if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
+            time_t secs = ts.tv_sec - mBufferUpdateTime.tv_sec;
+            long nsec = ts.tv_nsec - mBufferUpdateTime.tv_nsec;
+            if (nsec < 0) {
+                --secs;
+                nsec += 1000000000;
+            }
+            deltaMs = secs * 1000 + nsec / 1000000;
+        }
+    }
+    return deltaMs;
+}
+
+Visualizer::Measurement VisualizerContext::getMeasure() {
+    uint16_t peakU16 = 0;
+    float sumRmsSquared = 0.0f;
+    uint8_t nbValidMeasurements = 0;
+
+    {
+        std::lock_guard lg(mMutex);
+        // reset measurements if last measurement was too long ago (which implies stored
+        // measurements aren't relevant anymore and shouldn't bias the new one)
+        const uint32_t delayMs = getDeltaTimeMsFromUpdatedTime_l();
+        if (delayMs > kDiscardMeasurementsTimeMs) {
+            LOG(INFO) << __func__ << " Discarding " << delayMs << " ms old measurements";
+            for (uint32_t i = 0; i < mMeasurementWindowSizeInBuffers; i++) {
+                mPastMeasurements[i].mIsValid = false;
+                mPastMeasurements[i].mPeakU16 = 0;
+                mPastMeasurements[i].mRmsSquared = 0;
+            }
+            mMeasurementBufferIdx = 0;
+        } else {
+            // only use actual measurements, otherwise the first RMS measure happening before
+            // MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially
+            // low
+            for (uint32_t i = 0; i < mMeasurementWindowSizeInBuffers; i++) {
+                if (mPastMeasurements[i].mIsValid) {
+                    if (mPastMeasurements[i].mPeakU16 > peakU16) {
+                        peakU16 = mPastMeasurements[i].mPeakU16;
+                    }
+                    sumRmsSquared += mPastMeasurements[i].mRmsSquared;
+                    nbValidMeasurements++;
+                }
+            }
+        }
+    }
+
+    float rms = nbValidMeasurements == 0 ? 0.0f : sqrtf(sumRmsSquared / nbValidMeasurements);
+    Visualizer::Measurement measure;
+    // convert from I16 sample values to mB and write results
+    measure.rms = (rms < 0.000016f) ? -9600 : (int32_t)(2000 * log10(rms / 32767.0f));
+    measure.peak = (peakU16 == 0) ? -9600 : (int32_t)(2000 * log10(peakU16 / 32767.0f));
+    LOG(INFO) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms
+              << " (" << measure.rms << "mB)";
+    return measure;
+}
+
+std::vector<uint8_t> VisualizerContext::capture() {
+    std::vector<uint8_t> result;
+    std::lock_guard lg(mMutex);
+    // cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running
+    // RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState");
+    if (mState != State::ACTIVE) {
+        result.resize(mCaptureSamples);
+        memset(result.data(), 0x80, mCaptureSamples);
+        return result;
+    }
+
+    const uint32_t deltaMs = getDeltaTimeMsFromUpdatedTime_l();
+    // if audio framework has stopped playing audio although the effect is still active we must
+    // clear the capture buffer to return silence
+    if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) &&
+        (deltaMs > kMaxStallTimeMs)) {
+        LOG(INFO) << __func__ << " capture going to idle";
+        mBufferUpdateTime.tv_sec = 0;
+        return result;
+    }
+    int32_t latencyMs = mDownstreamLatency;
+    latencyMs -= deltaMs;
+    if (latencyMs < 0) {
+        latencyMs = 0;
+    }
+    uint32_t deltaSamples = mCaptureSamples + mCommon.input.base.sampleRate * latencyMs / 1000;
+
+    // large sample rate, latency, or capture size, could cause overflow.
+    // do not offset more than the size of buffer.
+    if (deltaSamples > kMaxCaptureBufSize) {
+        android_errorWriteLog(0x534e4554, "31781965");
+        deltaSamples = kMaxCaptureBufSize;
+    }
+
+    int32_t capturePoint;
+    //capturePoint = (int32_t)mCaptureIdx - deltaSamples;
+    __builtin_sub_overflow((int32_t) mCaptureIdx, deltaSamples, &capturePoint);
+    // a negative capturePoint means we wrap the buffer.
+    if (capturePoint < 0) {
+        uint32_t size = -capturePoint;
+        if (size > mCaptureSamples) {
+            size = mCaptureSamples;
+        }
+        result.insert(result.end(), &mCaptureBuf[kMaxCaptureBufSize + capturePoint],
+                        &mCaptureBuf[kMaxCaptureBufSize + capturePoint + size]);
+        mCaptureSamples -= size;
+        capturePoint = 0;
+    }
+    result.insert(result.end(), &mCaptureBuf[capturePoint],
+                    &mCaptureBuf[capturePoint + mCaptureSamples]);
+    mLastCaptureIdx = mCaptureIdx;
+    return result;
+}
+
+IEffect::Status VisualizerContext::process(float* in, float* out, int samples) {
+    IEffect::Status result = {STATUS_NOT_ENOUGH_DATA, 0, 0};
+    RETURN_VALUE_IF(in == nullptr || out == nullptr || samples == 0, result, "dataBufferError");
+
+    std::lock_guard lg(mMutex);
+    result.status = STATUS_INVALID_OPERATION;
+    RETURN_VALUE_IF(mState != State::ACTIVE, result, "stateNotActive");
+    LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
+    // perform measurements if needed
+    if (mMeasurementMode == Visualizer::MeasurementMode::PEAK_RMS) {
+        // find the peak and RMS squared for the new buffer
+        float rmsSqAcc = 0;
+        float maxSample = 0.f;
+        for (size_t inIdx = 0; inIdx < (unsigned)samples; ++inIdx) {
+            maxSample = fmax(maxSample, fabs(in[inIdx]));
+            rmsSqAcc += in[inIdx] * in[inIdx];
+        }
+        maxSample *= 1 << 15; // scale to int16_t, with exactly 1 << 15 representing positive num.
+        rmsSqAcc *= 1 << 30; // scale to int16_t * 2
+        mPastMeasurements[mMeasurementBufferIdx] = {
+                .mPeakU16 = (uint16_t)maxSample,
+                .mRmsSquared = rmsSqAcc / samples,
+                .mIsValid = true };
+        if (++mMeasurementBufferIdx >= mMeasurementWindowSizeInBuffers) {
+            mMeasurementBufferIdx = 0;
+        }
+    }
+
+    float fscale;  // multiplicative scale
+    if (mScalingMode == Visualizer::ScalingMode::NORMALIZED) {
+        // derive capture scaling factor from peak value in current buffer
+        // this gives more interesting captures for display.
+        float maxSample = 0.f;
+        for (size_t inIdx = 0; inIdx < (unsigned)samples; ) {
+            // we reconstruct the actual summed value to ensure proper normalization
+            // for multichannel outputs (channels > 2 may often be 0).
+            float smp = 0.f;
+            for (int i = 0; i < mChannelCount; ++i) {
+                smp += in[inIdx++];
+            }
+            maxSample = fmax(maxSample, fabs(smp));
+        }
+        if (maxSample > 0.f) {
+            fscale = 0.99f / maxSample;
+            int exp; // unused
+            const float significand = frexp(fscale, &exp);
+            if (significand == 0.5f) {
+                fscale *= 255.f / 256.f; // avoid returning unaltered PCM signal
+            }
+        } else {
+            // scale doesn't matter, the values are all 0.
+            fscale = 1.f;
+        }
+    } else {
+        assert(mScalingMode == Visualizer::ScalingMode::AS_PLAYED);
+        // Note: if channels are uncorrelated, 1/sqrt(N) could be used at the risk of clipping.
+        fscale = 1.f / mChannelCount;  // account for summing all the channels together.
+    }
+
+    uint32_t captIdx;
+    uint32_t inIdx;
+    for (inIdx = 0, captIdx = mCaptureIdx; inIdx < (unsigned)samples; captIdx++) {
+        // wrap
+        if (captIdx >= kMaxCaptureBufSize) {
+            captIdx = 0;
+        }
+
+        float smp = 0.f;
+        for (uint32_t i = 0; i < mChannelCount; ++i) {
+            smp += in[inIdx++];
+        }
+        mCaptureBuf[captIdx] = clamp8_from_float(smp * fscale);
+    }
+
+    // the following two should really be atomic, though it probably doesn't
+    // matter much for visualization purposes
+    mCaptureIdx = captIdx;
+    // update last buffer update time stamp
+    if (clock_gettime(CLOCK_MONOTONIC, &mBufferUpdateTime) < 0) {
+        mBufferUpdateTime.tv_sec = 0;
+    }
+
+    // TODO: handle access_mode
+    memcpy(out, in, samples * sizeof(float));
+    return {STATUS_OK, samples, samples};
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
new file mode 100644
index 0000000..958035f
--- /dev/null
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <audio_effects/effect_dynamicsprocessing.h>
+
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class VisualizerContext final : public EffectContext {
+  public:
+    static const uint32_t kMaxCaptureBufSize = 65536;
+    static const uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
+
+    VisualizerContext(int statusDepth, const Parameter::Common& common);
+    ~VisualizerContext();
+
+    RetCode initParams(const Parameter::Common& common);
+
+    RetCode enable();
+    RetCode disable();
+    // keep all parameters and reset buffer.
+    void reset();
+
+    RetCode setCaptureSamples(int captureSize);
+    int getCaptureSamples();
+    RetCode setMeasurementMode(Visualizer::MeasurementMode mode);
+    Visualizer::MeasurementMode getMeasurementMode();
+    RetCode setScalingMode(Visualizer::ScalingMode mode);
+    Visualizer::ScalingMode getScalingMode();
+    RetCode setDownstreamLatency(int latency);
+    int getDownstreamLatency();
+
+    IEffect::Status process(float* in, float* out, int samples);
+    // Gets the current measurements, measured by process() and consumed by getParameter()
+    Visualizer::Measurement getMeasure();
+    // Gets the latest PCM capture, data captured by process() and consumed by getParameter()
+    std::vector<uint8_t> capture();
+
+    struct BufferStats {
+        bool mIsValid;
+        uint16_t mPeakU16; // the positive peak of the absolute value of the samples in a buffer
+        float mRmsSquared; // the average square of the samples in a buffer
+    };
+
+    enum State {
+        UNINITIALIZED,
+        INITIALIZED,
+        ACTIVE,
+    };
+
+  private:
+    // maximum time since last capture buffer update before resetting capture buffer. This means
+    // that the framework has stopped playing audio and we must start returning silence
+    static const uint32_t kMaxStallTimeMs = 1000;
+    // discard measurements older than this number of ms
+    static const uint32_t kDiscardMeasurementsTimeMs = 2000;
+    // maximum number of buffers for which we keep track of the measurements
+    // note: buffer index is stored in uint8_t
+    static const uint32_t kMeasurementWindowMaxSizeInBuffers = 25;
+
+    // serialize process() and parameter setting
+    std::mutex mMutex;
+    Parameter::Common mCommon GUARDED_BY(mMutex);
+    State mState GUARDED_BY(mMutex) = State::UNINITIALIZED;
+    uint32_t mCaptureIdx GUARDED_BY(mMutex) = 0;
+    uint32_t mLastCaptureIdx GUARDED_BY(mMutex) = 0;
+    Visualizer::ScalingMode mScalingMode GUARDED_BY(mMutex) = Visualizer::ScalingMode::NORMALIZED;
+    struct timespec mBufferUpdateTime GUARDED_BY(mMutex);
+    // capture buf with 8 bits mono PCM samples
+    std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
+    uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
+    uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
+
+    // to avoid recomputing it every time a buffer is processed
+    uint8_t mChannelCount GUARDED_BY(mMutex) = 0;
+    Visualizer::MeasurementMode mMeasurementMode GUARDED_BY(mMutex) =
+            Visualizer::MeasurementMode::NONE;
+    uint8_t mMeasurementWindowSizeInBuffers = kMeasurementWindowMaxSizeInBuffers;
+    uint8_t mMeasurementBufferIdx GUARDED_BY(mMutex) = 0;
+    std::array<BufferStats, kMeasurementWindowMaxSizeInBuffers> mPastMeasurements;
+    void init_params();
+
+    uint32_t getDeltaTimeMsFromUpdatedTime_l() REQUIRES(mMutex);
+};
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/media/liberror/Android.bp b/media/liberror/Android.bp
index f54d354..5e94b0a 100644
--- a/media/liberror/Android.bp
+++ b/media/liberror/Android.bp
@@ -25,7 +25,7 @@
     ],
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth",
+        "com.android.btservices",
         "com.android.media",
         "com.android.media.swcodec",
     ],
@@ -51,7 +51,7 @@
     min_sdk_version: "29",
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth",
+        "com.android.btservices",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 9d63f9b..9955862 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -16,11 +16,17 @@
       "Pose.cpp",
       "PoseBias.cpp",
       "PoseDriftCompensator.cpp",
+      "PosePredictor.cpp",
       "PoseRateLimiter.cpp",
       "QuaternionUtil.cpp",
       "ScreenHeadFusion.cpp",
       "StillnessDetector.cpp",
       "Twist.cpp",
+      "VectorRecorder.cpp",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
     ],
     export_include_dirs: [
         "include",
@@ -31,6 +37,15 @@
     export_header_lib_headers: [
         "libeigen",
     ],
+    cflags: [
+        "-Wthread-safety",
+    ],
+    product_variables: {
+        debuggable: {
+            // enable experiments only in userdebug and eng builds
+            cflags: ["-DENABLE_VERIFICATION"],
+        },
+    },
 }
 
 cc_library {
@@ -39,6 +54,7 @@
       "SensorPoseProvider.cpp",
     ],
     shared_libs: [
+        "libbase",
         "libheadtracking",
         "liblog",
         "libsensor",
@@ -71,6 +87,7 @@
         "Pose-test.cpp",
         "PoseBias-test.cpp",
         "PoseDriftCompensator-test.cpp",
+        "PosePredictor.cpp",
         "PoseRateLimiter-test.cpp",
         "QuaternionUtil-test.cpp",
         "ScreenHeadFusion-test.cpp",
@@ -78,6 +95,8 @@
         "Twist-test.cpp",
     ],
     shared_libs: [
+        "libaudioutils",
+        "libbase", // StringAppendF
         "libheadtracking",
     ],
 }
diff --git a/media/libheadtracking/HeadTrackingProcessor-test.cpp b/media/libheadtracking/HeadTrackingProcessor-test.cpp
index 299192f..5190f52 100644
--- a/media/libheadtracking/HeadTrackingProcessor-test.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor-test.cpp
@@ -15,10 +15,10 @@
  */
 
 #include "media/HeadTrackingProcessor.h"
+#include "media/QuaternionUtil.h"
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
@@ -82,6 +82,8 @@
     std::unique_ptr<HeadTrackingProcessor> processor = createHeadTrackingProcessor(
             Options{.predictionDuration = 2.f}, HeadTrackingMode::WORLD_RELATIVE);
 
+    processor->setPosePredictorType(PosePredictorType::TWIST);
+
     // Establish a baseline for the drift compensators.
     processor->setWorldToHeadPose(0, Pose3f(), Twist3f());
     processor->setWorldToScreenPose(0, Pose3f());
diff --git a/media/libheadtracking/HeadTrackingProcessor.cpp b/media/libheadtracking/HeadTrackingProcessor.cpp
index 71fae8a..54d08d2 100644
--- a/media/libheadtracking/HeadTrackingProcessor.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor.cpp
@@ -13,12 +13,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <inttypes.h>
 
+#include <android-base/stringprintf.h>
+#include <audio_utils/SimpleLog.h>
 #include "media/HeadTrackingProcessor.h"
+#include "media/QuaternionUtil.h"
 
 #include "ModeSelector.h"
 #include "PoseBias.h"
-#include "QuaternionUtil.h"
+#include "PosePredictor.h"
 #include "ScreenHeadFusion.h"
 #include "StillnessDetector.h"
 
@@ -26,6 +30,7 @@
 namespace media {
 namespace {
 
+using android::base::StringAppendF;
 using Eigen::Quaternionf;
 using Eigen::Vector3f;
 
@@ -55,8 +60,8 @@
 
     void setWorldToHeadPose(int64_t timestamp, const Pose3f& worldToHead,
                             const Twist3f& headTwist) override {
-        Pose3f predictedWorldToHead =
-                worldToHead * integrate(headTwist, mOptions.predictionDuration);
+        const Pose3f predictedWorldToHead = mPosePredictor.predict(
+                timestamp, worldToHead, headTwist, mOptions.predictionDuration);
         mHeadPoseBias.setInput(predictedWorldToHead);
         mHeadStillnessDetector.setInput(timestamp, predictedWorldToHead);
         mWorldToHeadTimestamp = timestamp;
@@ -84,14 +89,16 @@
     }
 
     void calculate(int64_t timestamp) override {
-        // Handle the screen first, since it might trigger a recentering of the head.
+        bool screenStable = true;
+
+        // Handle the screen first, since it might: trigger a recentering of the head.
         if (mWorldToScreenTimestamp.has_value()) {
             const Pose3f worldToLogicalScreen = mScreenPoseBias.getOutput();
-            bool screenStable = mScreenStillnessDetector.calculate(timestamp);
+            screenStable = mScreenStillnessDetector.calculate(timestamp);
             mModeSelector.setScreenStable(mWorldToScreenTimestamp.value(), screenStable);
             // Whenever the screen is unstable, recenter the head pose.
             if (!screenStable) {
-                recenter(true, false);
+                recenter(true, false, "calculate: screen movement");
             }
             mScreenHeadFusion.setWorldToScreenPose(mWorldToScreenTimestamp.value(),
                                                    worldToLogicalScreen);
@@ -101,8 +108,9 @@
         if (mWorldToHeadTimestamp.has_value()) {
             Pose3f worldToHead = mHeadPoseBias.getOutput();
             // Auto-recenter.
-            if (mHeadStillnessDetector.calculate(timestamp)) {
-                recenter(true, false);
+            bool headStable = mHeadStillnessDetector.calculate(timestamp);
+            if (headStable || !screenStable) {
+                recenter(true, false, "calculate: head movement");
                 worldToHead = mHeadPoseBias.getOutput();
             }
 
@@ -132,14 +140,16 @@
 
     HeadTrackingMode getActualMode() const override { return mModeSelector.getActualMode(); }
 
-    void recenter(bool recenterHead, bool recenterScreen) override {
+    void recenter(bool recenterHead, bool recenterScreen, std::string source) override {
         if (recenterHead) {
             mHeadPoseBias.recenter();
             mHeadStillnessDetector.reset();
+            mLocalLog.log("recenter Head from %s", source.c_str());
         }
         if (recenterScreen) {
             mScreenPoseBias.recenter();
             mScreenStillnessDetector.reset();
+            mLocalLog.log("recenter Screen from %s", source.c_str());
         }
 
         // If a sensor being recentered is included in the current mode, apply rate limiting to
@@ -152,6 +162,41 @@
         }
     }
 
+    void setPosePredictorType(PosePredictorType type) override {
+        mPosePredictor.setPosePredictorType(type);
+    }
+
+    std::string toString_l(unsigned level) const override {
+        std::string prefixSpace(level, ' ');
+        std::string ss = prefixSpace + "HeadTrackingProcessor:\n";
+        StringAppendF(&ss, "%s maxTranslationalVelocity: %f meter/second\n", prefixSpace.c_str(),
+                      mOptions.maxTranslationalVelocity);
+        StringAppendF(&ss, "%s maxRotationalVelocity: %f rad/second\n", prefixSpace.c_str(),
+                      mOptions.maxRotationalVelocity);
+        StringAppendF(&ss, "%s freshnessTimeout: %0.4f ms\n", prefixSpace.c_str(),
+                      media::nsToFloatMs(mOptions.freshnessTimeout));
+        StringAppendF(&ss, "%s predictionDuration: %0.4f ms\n", prefixSpace.c_str(),
+                      media::nsToFloatMs(mOptions.predictionDuration));
+        StringAppendF(&ss, "%s autoRecenterWindowDuration: %0.4f ms\n", prefixSpace.c_str(),
+                      media::nsToFloatMs(mOptions.autoRecenterWindowDuration));
+        StringAppendF(&ss, "%s autoRecenterTranslationalThreshold: %f meter\n", prefixSpace.c_str(),
+                      mOptions.autoRecenterTranslationalThreshold);
+        StringAppendF(&ss, "%s autoRecenterRotationalThreshold: %f radians\n", prefixSpace.c_str(),
+                      mOptions.autoRecenterRotationalThreshold);
+        StringAppendF(&ss, "%s screenStillnessWindowDuration: %0.4f ms\n", prefixSpace.c_str(),
+                      media::nsToFloatMs(mOptions.screenStillnessWindowDuration));
+        StringAppendF(&ss, "%s screenStillnessTranslationalThreshold: %f meter\n",
+                      prefixSpace.c_str(), mOptions.screenStillnessTranslationalThreshold);
+        StringAppendF(&ss, "%s screenStillnessRotationalThreshold: %f radians\n",
+                      prefixSpace.c_str(), mOptions.screenStillnessRotationalThreshold);
+        ss += mModeSelector.toString(level + 1);
+        ss += mRateLimiter.toString(level + 1);
+        ss += mPosePredictor.toString(level + 1);
+        ss.append(prefixSpace + "ReCenterHistory:\n");
+        ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
+        return ss;
+    }
+
   private:
     const Options mOptions;
     float mPhysicalToLogicalAngle = 0;
@@ -168,6 +213,9 @@
     ScreenHeadFusion mScreenHeadFusion;
     ModeSelector mModeSelector;
     PoseRateLimiter mRateLimiter;
+    PosePredictor mPosePredictor;
+    static constexpr std::size_t mMaxLocalLogLine = 10;
+    SimpleLog mLocalLog{mMaxLocalLogLine};
 };
 
 }  // namespace
@@ -177,5 +225,38 @@
     return std::make_unique<HeadTrackingProcessorImpl>(options, initialMode);
 }
 
+std::string toString(HeadTrackingMode mode) {
+    switch (mode) {
+        case HeadTrackingMode::STATIC:
+            return "STATIC";
+        case HeadTrackingMode::WORLD_RELATIVE:
+            return "WORLD_RELATIVE";
+        case HeadTrackingMode::SCREEN_RELATIVE:
+            return "SCREEN_RELATIVE";
+    }
+    return "EnumNotImplemented";
+};
+
+std::string toString(PosePredictorType posePredictorType) {
+    switch (posePredictorType) {
+        case PosePredictorType::AUTO: return "AUTO";
+        case PosePredictorType::LAST: return "LAST";
+        case PosePredictorType::TWIST: return "TWIST";
+        case PosePredictorType::LEAST_SQUARES: return "LEAST_SQUARES";
+    }
+    return "UNKNOWN" + std::to_string((int)posePredictorType);
+}
+
+bool isValidPosePredictorType(PosePredictorType posePredictorType) {
+    switch (posePredictorType) {
+        case PosePredictorType::AUTO:
+        case PosePredictorType::LAST:
+        case PosePredictorType::TWIST:
+        case PosePredictorType::LEAST_SQUARES:
+            return true;
+    }
+    return false;
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/ModeSelector-test.cpp b/media/libheadtracking/ModeSelector-test.cpp
index a136e6b..6925908 100644
--- a/media/libheadtracking/ModeSelector-test.cpp
+++ b/media/libheadtracking/ModeSelector-test.cpp
@@ -18,7 +18,7 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/ModeSelector.cpp b/media/libheadtracking/ModeSelector.cpp
index cb3a27f..7ee21b3 100644
--- a/media/libheadtracking/ModeSelector.cpp
+++ b/media/libheadtracking/ModeSelector.cpp
@@ -13,11 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <android-base/stringprintf.h>
 
 #include "ModeSelector.h"
 
 namespace android {
 namespace media {
+using android::base::StringAppendF;
 
 ModeSelector::ModeSelector(const Options& options, HeadTrackingMode initialMode)
     : mOptions(options), mDesiredMode(initialMode), mActualMode(initialMode) {}
@@ -47,12 +49,15 @@
 }
 
 void ModeSelector::calculateActualMode(int64_t timestamp) {
-    bool isValidScreenToHead = mScreenToHead.has_value() &&
-                               timestamp - mScreenToHeadTimestamp < mOptions.freshnessTimeout;
-    bool isValidWorldToHead = mWorldToHead.has_value() &&
-                              timestamp - mWorldToHeadTimestamp < mOptions.freshnessTimeout;
-    bool isValidScreenStable = mScreenStable.has_value() &&
-                              timestamp - mScreenStableTimestamp < mOptions.freshnessTimeout;
+    int64_t screenToHeadGap = timestamp - mScreenToHeadTimestamp;
+    int64_t worldToHeadGap = timestamp - mWorldToHeadTimestamp;
+    int64_t screenStableGap = timestamp - mScreenStableTimestamp;
+    bool isValidScreenToHead =
+            mScreenToHead.has_value() && screenToHeadGap < mOptions.freshnessTimeout;
+    bool isValidWorldToHead =
+            mWorldToHead.has_value() && worldToHeadGap < mOptions.freshnessTimeout;
+    bool isValidScreenStable =
+            mScreenStable.has_value() && screenStableGap < mOptions.freshnessTimeout;
 
     HeadTrackingMode mode = mDesiredMode;
 
@@ -70,7 +75,17 @@
         }
     }
 
-    mActualMode = mode;
+    if (mode != mActualMode) {
+        mLocalLog.log(
+                "HT mode change from %s to %s, this ts %0.4f ms, lastTs+gap [ScreenToHead %0.4f + "
+                "%0.4f, WorldToHead %0.4f + %0.4f, ScreenStable %0.4f + %0.4f] ms",
+                media::toString(mActualMode).c_str(), media::toString(mode).c_str(),
+                media::nsToFloatMs(timestamp), media::nsToFloatMs(mScreenToHeadTimestamp),
+                media::nsToFloatMs(screenToHeadGap), media::nsToFloatMs(mWorldToHeadTimestamp),
+                media::nsToFloatMs(worldToHeadGap), media::nsToFloatMs(mScreenStableTimestamp),
+                media::nsToFloatMs(screenStableGap));
+        mActualMode = mode;
+    }
 }
 
 void ModeSelector::calculate(int64_t timestamp) {
@@ -99,5 +114,17 @@
     return mActualMode;
 }
 
+std::string ModeSelector::toString(unsigned level) const {
+    std::string prefixSpace(level, ' ');
+    std::string ss(prefixSpace);
+    ss.append("ModeSelector: ScreenToStage ")
+        .append(mScreenToStage.toString())
+        .append("\n")
+        .append(prefixSpace)
+        .append("Mode change history:\n")
+        .append(mLocalLog.dumpToString((prefixSpace + " ").c_str(), sMaxLocalLogLine));
+    return ss;
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/ModeSelector.h b/media/libheadtracking/ModeSelector.h
index e537040..2475a5b 100644
--- a/media/libheadtracking/ModeSelector.h
+++ b/media/libheadtracking/ModeSelector.h
@@ -16,6 +16,7 @@
 #pragma once
 
 #include <optional>
+#include <audio_utils/SimpleLog.h>
 
 #include "media/HeadTrackingMode.h"
 #include "media/Pose.h"
@@ -114,6 +115,8 @@
      */
     HeadTrackingMode getActualMode() const;
 
+    std::string toString(unsigned level) const;
+
   private:
     const Options mOptions;
 
@@ -129,6 +132,9 @@
     HeadTrackingMode mActualMode;
     Pose3f mHeadToStage;
 
+    static constexpr std::size_t sMaxLocalLogLine = 10;
+    SimpleLog mLocalLog{sMaxLocalLogLine};
+
     void calculateActualMode(int64_t timestamp);
 };
 
diff --git a/media/libheadtracking/Pose-test.cpp b/media/libheadtracking/Pose-test.cpp
index a9e18ce..29dba29 100644
--- a/media/libheadtracking/Pose-test.cpp
+++ b/media/libheadtracking/Pose-test.cpp
@@ -18,7 +18,7 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 using android::media::Pose3f;
diff --git a/media/libheadtracking/Pose.cpp b/media/libheadtracking/Pose.cpp
index ae39512..e03725b 100644
--- a/media/libheadtracking/Pose.cpp
+++ b/media/libheadtracking/Pose.cpp
@@ -13,14 +13,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <android-base/stringprintf.h>
 
 #include "media/Pose.h"
+#include "media/QuaternionUtil.h"
 #include "media/Twist.h"
-#include "QuaternionUtil.h"
 
 namespace android {
 namespace media {
 
+using android::base::StringAppendF;
 using Eigen::Vector3f;
 
 std::optional<Pose3f> Pose3f::fromVector(const std::vector<float>& vec) {
@@ -35,6 +37,19 @@
     return {mTranslation[0], mTranslation[1], mTranslation[2], rot[0], rot[1], rot[2]};
 }
 
+std::string Pose3f::toString() const {
+    const auto& vec = this->toVector();
+    std::string ss = "[";
+    for (auto f = vec.begin(); f != vec.end(); ++f) {
+        if (f != vec.begin()) {
+            ss.append(", ");
+        }
+        StringAppendF(&ss, "%0.2f", *f);
+    }
+    ss.append("]");
+    return ss;
+}
+
 std::tuple<Pose3f, bool> moveWithRateLimit(const Pose3f& from, const Pose3f& to, float t,
                                            float maxTranslationalVelocity,
                                            float maxRotationalVelocity) {
diff --git a/media/libheadtracking/PoseBias-test.cpp b/media/libheadtracking/PoseBias-test.cpp
index 9f42a2c..659dda0 100644
--- a/media/libheadtracking/PoseBias-test.cpp
+++ b/media/libheadtracking/PoseBias-test.cpp
@@ -17,7 +17,8 @@
 #include <gtest/gtest.h>
 
 #include "PoseBias.h"
-#include "QuaternionUtil.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/PoseDriftCompensator-test.cpp b/media/libheadtracking/PoseDriftCompensator-test.cpp
index df0a05f..521e3eb 100644
--- a/media/libheadtracking/PoseDriftCompensator-test.cpp
+++ b/media/libheadtracking/PoseDriftCompensator-test.cpp
@@ -18,7 +18,8 @@
 #include <cmath>
 
 #include "PoseDriftCompensator.h"
-#include "QuaternionUtil.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/PoseDriftCompensator.cpp b/media/libheadtracking/PoseDriftCompensator.cpp
index 0e90cad..2775790 100644
--- a/media/libheadtracking/PoseDriftCompensator.cpp
+++ b/media/libheadtracking/PoseDriftCompensator.cpp
@@ -18,7 +18,7 @@
 
 #include <cmath>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 
 namespace android {
 namespace media {
diff --git a/media/libheadtracking/PosePredictor.cpp b/media/libheadtracking/PosePredictor.cpp
new file mode 100644
index 0000000..5209d54
--- /dev/null
+++ b/media/libheadtracking/PosePredictor.cpp
@@ -0,0 +1,246 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PosePredictor.h"
+
+namespace android::media {
+
+namespace {
+#ifdef ENABLE_VERIFICATION
+constexpr bool kEnableVerification = true;
+constexpr std::array<int, 3> kLookAheadMs{ 50, 100, 200 };
+#else
+constexpr bool kEnableVerification = false;
+constexpr std::array<int, 0> kLookAheadMs{};
+#endif
+
+} // namespace
+
+void LeastSquaresPredictor::add(int64_t atNs, const Pose3f& pose, const Twist3f& twist)
+{
+    (void)twist;
+    mLastAtNs = atNs;
+    mLastPose = pose;
+    const auto q = pose.rotation();
+    const double datNs = static_cast<double>(atNs);
+    mRw.add({datNs, q.w()});
+    mRx.add({datNs, q.x()});
+    mRy.add({datNs, q.y()});
+    mRz.add({datNs, q.z()});
+}
+
+Pose3f LeastSquaresPredictor::predict(int64_t atNs) const
+{
+    if (mRw.getN() < kMinimumSamplesForPrediction) return mLastPose;
+
+    /*
+     * Using parametric form, we have q(t) = { w(t), x(t), y(t), z(t) }.
+     * We compute the least squares prediction of w, x, y, z.
+     */
+    const double dLookahead = static_cast<double>(atNs);
+    Eigen::Quaternionf lsq(
+        mRw.getYFromX(dLookahead),
+        mRx.getYFromX(dLookahead),
+        mRy.getYFromX(dLookahead),
+        mRz.getYFromX(dLookahead));
+
+     /*
+      * We cheat here, since the result lsq is the least squares prediction
+      * in H (arbitrary quaternion), not the least squares prediction in
+      * SO(3) (unit quaternion).
+      *
+      * In other words, the result for lsq is most likely not a unit quaternion.
+      * To solve this, we normalize, thereby selecting the closest unit quaternion
+      * in SO(3) to the prediction in H.
+      */
+    lsq.normalize();
+    return Pose3f(lsq);
+}
+
+void LeastSquaresPredictor::reset() {
+    mLastAtNs = {};
+    mLastPose = {};
+    mRw.reset();
+    mRx.reset();
+    mRy.reset();
+    mRz.reset();
+}
+
+std::string LeastSquaresPredictor::toString(size_t index) const {
+    std::string s(index, ' ');
+    s.append("LeastSquaresPredictor using alpha: ")
+        .append(std::to_string(mAlpha))
+        .append(" last pose: ")
+        .append(mLastPose.toString())
+        .append("\n");
+    return s;
+}
+
+// Formatting
+static inline std::vector<size_t> createDelimiterIdx(size_t predictors, size_t lookaheads) {
+    if (lookaheads == 0) return {};
+    --lookaheads;
+    std::vector<size_t> delimiterIdx(lookaheads);
+    for (size_t i = 0; i < lookaheads; ++i) {
+        delimiterIdx[i] = (i + 1) * predictors;
+    }
+    return delimiterIdx;
+}
+
+PosePredictor::PosePredictor()
+    : mPredictors{
+            // First predictors must match switch in getCurrentPredictor()
+            std::make_shared<LastPredictor>(),
+            std::make_shared<TwistPredictor>(),
+            std::make_shared<LeastSquaresPredictor>(),
+            // After this, can place additional predictors here for comparison such as
+            // std::make_shared<LeastSquaresPredictor>(0.25),
+        }
+    , mLookaheadMs(kLookAheadMs.begin(), kLookAheadMs.end())
+    , mVerifiers(std::size(mLookaheadMs) * std::size(mPredictors))
+    , mDelimiterIdx(createDelimiterIdx(std::size(mPredictors), std::size(mLookaheadMs)))
+    , mPredictionRecorder(
+        std::size(mVerifiers) /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        mDelimiterIdx)
+    , mPredictionDurableRecorder(
+        std::size(mVerifiers) /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        mDelimiterIdx)
+    {
+}
+
+Pose3f PosePredictor::predict(
+        int64_t timestampNs, const Pose3f& pose, const Twist3f& twist, float predictionDurationNs)
+{
+    if (timestampNs - mLastTimestampNs > kMaximumSampleIntervalBeforeResetNs) {
+        for (const auto& predictor : mPredictors) {
+            predictor->reset();
+        }
+        ++mResets;
+    }
+    mLastTimestampNs = timestampNs;
+
+    auto selectedPredictor = getCurrentPredictor();
+    if constexpr (kEnableVerification) {
+        // Update all Predictors
+        for (const auto& predictor : mPredictors) {
+            predictor->add(timestampNs, pose, twist);
+        }
+
+        // Update Verifiers and calculate errors
+        std::vector<float> error(std::size(mVerifiers));
+        for (size_t i = 0; i < mLookaheadMs.size(); ++i) {
+            constexpr float RADIAN_TO_DEGREES = 180 / M_PI;
+            const int64_t atNs =
+                    timestampNs + mLookaheadMs[i] * PosePredictorVerifier::kMillisToNanos;
+
+            for (size_t j = 0; j < mPredictors.size(); ++j) {
+                const size_t idx = i * std::size(mPredictors) + j;
+                mVerifiers[idx].verifyActualPose(timestampNs, pose);
+                mVerifiers[idx].addPredictedPose(atNs, mPredictors[j]->predict(atNs));
+                error[idx] =  RADIAN_TO_DEGREES * mVerifiers[idx].lastError();
+            }
+        }
+        // Record errors
+        mPredictionRecorder.record(error);
+        mPredictionDurableRecorder.record(error);
+    } else /* constexpr */ {
+        selectedPredictor->add(timestampNs, pose, twist);
+    }
+
+    // Deliver prediction
+    const int64_t predictionTimeNs = timestampNs + (int64_t)predictionDurationNs;
+    return selectedPredictor->predict(predictionTimeNs);
+}
+
+void PosePredictor::setPosePredictorType(PosePredictorType type) {
+    if (!isValidPosePredictorType(type)) return;
+    if (type == mSetType) return;
+    mSetType = type;
+    if (type == android::media::PosePredictorType::AUTO) {
+        type = android::media::PosePredictorType::LEAST_SQUARES;
+    }
+    if (type != mCurrentType) {
+        mCurrentType = type;
+        if constexpr (!kEnableVerification) {
+            // Verification keeps all predictors up-to-date.
+            // If we don't enable verification, we must reset the current predictor.
+            getCurrentPredictor()->reset();
+        }
+    }
+}
+
+std::string PosePredictor::toString(size_t index) const {
+    std::string prefixSpace(index, ' ');
+    std::string ss(prefixSpace);
+    ss.append("PosePredictor:\n")
+        .append(prefixSpace)
+        .append(" Current Prediction Type: ")
+        .append(android::media::toString(mCurrentType))
+        .append("\n")
+        .append(prefixSpace)
+        .append(" Resets: ")
+        .append(std::to_string(mResets))
+        .append("\n")
+        .append(getCurrentPredictor()->toString(index + 1));
+    if constexpr (kEnableVerification) {
+        // dump verification
+        ss.append(prefixSpace)
+            .append(" Prediction abs error (L1) degrees [ type (");
+        for (size_t i = 0; i < mPredictors.size(); ++i) {
+            if (i > 0) ss.append(" , ");
+            ss.append(mPredictors[i]->name());
+        }
+        ss.append(" ) x ( ");
+        for (size_t i = 0; i < mLookaheadMs.size(); ++i) {
+            if (i > 0) ss.append(" : ");
+            ss.append(std::to_string(mLookaheadMs[i]));
+        }
+        std::vector<float> cumulativeAverageErrors(std::size(mVerifiers));
+        for (size_t i = 0; i < cumulativeAverageErrors.size(); ++i) {
+            cumulativeAverageErrors[i] = mVerifiers[i].cumulativeAverageError();
+        }
+        ss.append(" ) ms ]\n")
+            .append(prefixSpace)
+            .append("  Cumulative Average Error:\n")
+            .append(prefixSpace)
+            .append("   ")
+            .append(VectorRecorder::toString(cumulativeAverageErrors, mDelimiterIdx, "%.3g"))
+            .append("\n")
+            .append(prefixSpace)
+            .append("  PerMinuteHistory:\n")
+            .append(mPredictionDurableRecorder.toString(index + 3))
+            .append(prefixSpace)
+            .append("  PerSecondHistory:\n")
+            .append(mPredictionRecorder.toString(index + 3));
+    }
+    return ss;
+}
+
+std::shared_ptr<PredictorBase> PosePredictor::getCurrentPredictor() const {
+    // we don't use a map here, we look up directly
+    switch (mCurrentType) {
+    default:
+    case android::media::PosePredictorType::LAST:
+        return mPredictors[0];
+    case android::media::PosePredictorType::TWIST:
+        return mPredictors[1];
+    case android::media::PosePredictorType::AUTO: // shouldn't occur here.
+    case android::media::PosePredictorType::LEAST_SQUARES:
+        return mPredictors[2];
+    }
+}
+
+} // namespace android::media
diff --git a/media/libheadtracking/PosePredictor.h b/media/libheadtracking/PosePredictor.h
new file mode 100644
index 0000000..53211e3
--- /dev/null
+++ b/media/libheadtracking/PosePredictor.h
@@ -0,0 +1,215 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PosePredictorVerifier.h"
+#include <memory>
+#include <audio_utils/Statistics.h>
+#include <media/PosePredictorType.h>
+#include <media/Twist.h>
+#include <media/VectorRecorder.h>
+
+namespace android::media {
+
+// Interface for generic pose predictors
+class PredictorBase {
+public:
+    virtual ~PredictorBase() = default;
+    virtual void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) = 0;
+    virtual Pose3f predict(int64_t atNs) const = 0;
+    virtual void reset() = 0;
+    virtual std::string name() const = 0;
+    virtual std::string toString(size_t index) const = 0;
+};
+
+/**
+ * LastPredictor uses the last sample Pose for prediction
+ *
+ * This class is not thread-safe.
+ */
+class LastPredictor : public PredictorBase {
+public:
+    void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override {
+        (void)atNs;
+        (void)twist;
+        mLastPose = pose;
+    }
+
+    Pose3f predict(int64_t atNs) const override {
+        (void)atNs;
+        return mLastPose;
+    }
+
+    void reset() override {
+        mLastPose = {};
+    }
+
+    std::string name() const override {
+        return "LAST";
+    }
+
+    std::string toString(size_t index) const override {
+        std::string s(index, ' ');
+        s.append("LastPredictor using last pose: ")
+            .append(mLastPose.toString())
+            .append("\n");
+        return s;
+    }
+
+private:
+    Pose3f mLastPose;
+};
+
+/**
+ * TwistPredictor uses the last sample Twist and Pose for prediction
+ *
+ * This class is not thread-safe.
+ */
+class TwistPredictor : public PredictorBase {
+public:
+    void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override {
+        mLastAtNs = atNs;
+        mLastPose = pose;
+        mLastTwist = twist;
+    }
+
+    Pose3f predict(int64_t atNs) const override {
+        return mLastPose * integrate(mLastTwist, atNs - mLastAtNs);
+    }
+
+    void reset() override {
+        mLastAtNs = {};
+        mLastPose = {};
+        mLastTwist = {};
+    }
+
+    std::string name() const override {
+        return "TWIST";
+    }
+
+    std::string toString(size_t index) const override {
+        std::string s(index, ' ');
+        s.append("TwistPredictor using last pose: ")
+            .append(mLastPose.toString())
+            .append(" last twist: ")
+            .append(mLastTwist.toString())
+            .append("\n");
+        return s;
+    }
+
+private:
+    int64_t mLastAtNs{};
+    Pose3f mLastPose;
+    Twist3f mLastTwist;
+};
+
+
+/**
+ * LeastSquaresPredictor uses the Pose history for prediction.
+ *
+ * A exponential weighted least squares is used.
+ *
+ * This class is not thread-safe.
+ */
+class LeastSquaresPredictor : public PredictorBase {
+public:
+    // alpha is the exponential decay.
+    LeastSquaresPredictor(double alpha = kDefaultAlphaEstimator)
+        : mAlpha(alpha)
+        , mRw(alpha)
+        , mRx(alpha)
+        , mRy(alpha)
+        , mRz(alpha)
+        {}
+
+    void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override;
+    Pose3f predict(int64_t atNs) const override;
+    void reset() override;
+    std::string name() const override {
+        return "LEAST_SQUARES(" + std::to_string(mAlpha) + ")";
+    }
+    std::string toString(size_t index) const override;
+
+private:
+    const double mAlpha;
+    int64_t mLastAtNs{};
+    Pose3f mLastPose;
+    static constexpr double kDefaultAlphaEstimator = 0.2;
+    static constexpr size_t kMinimumSamplesForPrediction = 4;
+    audio_utils::LinearLeastSquaresFit<double> mRw;
+    audio_utils::LinearLeastSquaresFit<double> mRx;
+    audio_utils::LinearLeastSquaresFit<double> mRy;
+    audio_utils::LinearLeastSquaresFit<double> mRz;
+};
+
+/*
+ * PosePredictor predicts the pose given sensor input at a time in the future.
+ *
+ * This class is not thread safe.
+ */
+class PosePredictor {
+public:
+    PosePredictor();
+
+    Pose3f predict(int64_t timestampNs, const Pose3f& pose, const Twist3f& twist,
+            float predictionDurationNs);
+
+    void setPosePredictorType(PosePredictorType type);
+
+    // convert predictions to a printable string
+    std::string toString(size_t index) const;
+
+private:
+    static constexpr int64_t kMaximumSampleIntervalBeforeResetNs =
+            300'000'000;
+
+    // Predictors
+    const std::vector<std::shared_ptr<PredictorBase>> mPredictors;
+
+    // Verifiers, create one for an array of future lookaheads for comparison.
+    const std::vector<int> mLookaheadMs;
+
+    std::vector<PosePredictorVerifier> mVerifiers;
+
+    const std::vector<size_t> mDelimiterIdx;
+
+    // Recorders
+    media::VectorRecorder mPredictionRecorder{
+        std::size(mVerifiers) /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        mDelimiterIdx};
+    media::VectorRecorder mPredictionDurableRecorder{
+        std::size(mVerifiers) /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        mDelimiterIdx};
+
+    // Status
+
+    // SetType is the externally set predictor type.  It may include AUTO.
+    PosePredictorType mSetType = PosePredictorType::LEAST_SQUARES;
+
+    // CurrentType is the actual predictor type used by this class.
+    // It does not include AUTO because that metatype means the class
+    // chooses the best predictor type based on sensor statistics.
+    PosePredictorType mCurrentType = PosePredictorType::LEAST_SQUARES;
+
+    int64_t mResets{};
+    int64_t mLastTimestampNs{};
+
+    // Returns current predictor
+    std::shared_ptr<PredictorBase> getCurrentPredictor() const;
+};
+
+}  // namespace android::media
diff --git a/media/libheadtracking/PosePredictorVerifier.h b/media/libheadtracking/PosePredictorVerifier.h
new file mode 100644
index 0000000..6b4a357
--- /dev/null
+++ b/media/libheadtracking/PosePredictorVerifier.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+#include <audio_utils/Statistics.h>
+#include <media/Pose.h>
+
+namespace android::media {
+
+/**
+ * PosePredictorVerifier is used to validate predictions
+ *
+ * This class is not thread-safe
+ */
+class PosePredictorVerifier {
+public:
+    std::string toString() const {
+         return mErrorStats.toString();
+    }
+
+    static constexpr int64_t kMillisToNanos = 1000000;
+
+    void verifyActualPose(int64_t timestampNs, const Pose3f& pose) {
+        for (auto it = mPredictions.begin(); it != mPredictions.end();) {
+            if (it->first < timestampNs) {
+                it = mPredictions.erase(it);
+            } else {
+                int64_t dt = it->first - timestampNs;
+                if (std::abs(dt) < 10 * kMillisToNanos) {
+                    const float angle = pose.rotation().angularDistance(it->second.rotation());
+                    const float error = std::abs(angle); // L1 (absolute difference) here.
+                    mLastError = error;
+                    mErrorStats.add(error);
+                }
+                break;
+            }
+        }
+    }
+
+    void addPredictedPose(int64_t atNs, const Pose3f& pose) {
+        mPredictions.emplace_back(atNs, pose);
+    }
+
+    float lastError() const {
+        return mLastError;
+    }
+
+    float cumulativeAverageError() const {
+        return mErrorStats.getMean();
+    }
+
+private:
+    static constexpr double kCumulativeErrorAlpha = 0.999;
+    std::deque<std::pair<int64_t, Pose3f>> mPredictions;
+    float mLastError{};
+    android::audio_utils::Statistics<double> mErrorStats{kCumulativeErrorAlpha};
+};
+
+}  // namespace androd::media
diff --git a/media/libheadtracking/PoseRateLimiter-test.cpp b/media/libheadtracking/PoseRateLimiter-test.cpp
index f306183..ded874a 100644
--- a/media/libheadtracking/PoseRateLimiter-test.cpp
+++ b/media/libheadtracking/PoseRateLimiter-test.cpp
@@ -17,7 +17,8 @@
 #include <gtest/gtest.h>
 
 #include "PoseRateLimiter.h"
-#include "QuaternionUtil.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/PoseRateLimiter.cpp b/media/libheadtracking/PoseRateLimiter.cpp
index 380e22b..060bb4b 100644
--- a/media/libheadtracking/PoseRateLimiter.cpp
+++ b/media/libheadtracking/PoseRateLimiter.cpp
@@ -13,11 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <android-base/stringprintf.h>
 
 #include "PoseRateLimiter.h"
 
 namespace android {
 namespace media {
+using android::base::StringAppendF;
 
 PoseRateLimiter::PoseRateLimiter(const Options& options) : mOptions(options), mLimiting(false) {}
 
@@ -48,5 +50,15 @@
     return pose;
 }
 
+std::string PoseRateLimiter::toString(unsigned level) const {
+    std::string ss(level, ' ');
+    if (mLimiting) {
+        StringAppendF(&ss, "PoseRateLimiter: enabled with target: %s\n",
+                      mTargetPose.has_value() ? mTargetPose.value().toString().c_str() : "NULL");
+    } else {
+        StringAppendF(&ss, "PoseRateLimiter: disabled\n");
+    }
+    return ss;
+}
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/PoseRateLimiter.h b/media/libheadtracking/PoseRateLimiter.h
index aa2fe80..c673a33 100644
--- a/media/libheadtracking/PoseRateLimiter.h
+++ b/media/libheadtracking/PoseRateLimiter.h
@@ -77,6 +77,8 @@
 
     Pose3f calculatePose(int64_t timestamp);
 
+    std::string toString(unsigned level) const;
+
   private:
     struct Point {
         Pose3f pose;
diff --git a/media/libheadtracking/QuaternionUtil-test.cpp b/media/libheadtracking/QuaternionUtil-test.cpp
index e79e54a..cfeca00 100644
--- a/media/libheadtracking/QuaternionUtil-test.cpp
+++ b/media/libheadtracking/QuaternionUtil-test.cpp
@@ -16,7 +16,7 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 using Eigen::Quaternionf;
@@ -51,6 +51,92 @@
     EXPECT_EQ(vec, quaternionToRotationVector(rotationVectorToQuaternion(vec)));
 }
 
+// Float precision necessitates this precision (1e-4f fails)
+constexpr float NEAR = 1e-3f;
+
+TEST(QuaternionUtil, quaternionToAngles_basic) {
+    float pitch, roll, yaw;
+
+   // angles as reported.
+   // choose 11 angles between -M_PI / 2 to M_PI / 2
+    for (int step = -5; step <= 5; ++step) {
+        const float angle = M_PI * step * 0.1f;
+
+        quaternionToAngles(rotationVectorToQuaternion({angle, 0.f, 0.f}), &pitch, &roll, &yaw);
+        EXPECT_NEAR(angle, pitch, NEAR);
+        EXPECT_NEAR(0.f, roll, NEAR);
+        EXPECT_NEAR(0.f, yaw, NEAR);
+
+        quaternionToAngles(rotationVectorToQuaternion({0.f, angle, 0.f}), &pitch, &roll, &yaw);
+        EXPECT_NEAR(0.f, pitch, NEAR);
+        EXPECT_NEAR(angle, roll, NEAR);
+        EXPECT_NEAR(0.f, yaw, NEAR);
+
+        quaternionToAngles(rotationVectorToQuaternion({0.f, 0.f, angle}), &pitch, &roll, &yaw);
+        EXPECT_NEAR(0.f, pitch, NEAR);
+        EXPECT_NEAR(0.f, roll, NEAR);
+        EXPECT_NEAR(angle, yaw, NEAR);
+    }
+
+    // Generates a debug string
+    const std::string s = quaternionToAngles<true /* DEBUG */>(
+            rotationVectorToQuaternion({M_PI, 0.f, 0.f}), &pitch, &roll, &yaw);
+    ASSERT_FALSE(s.empty());
+}
+
+TEST(QuaternionUtil, quaternionToAngles_zaxis) {
+    float pitch, roll, yaw;
+
+    for (int rot_step = -10; rot_step <= 10; ++rot_step) {
+        const float rot_angle = M_PI * rot_step * 0.1f;
+        // pitch independent of world Z rotation
+
+        // We don't test the boundaries of pitch +-M_PI/2 as roll can become
+        // degenerate and atan(0, 0) may report 0, PI, or -PI.
+        for (int step = -4; step <= 4; ++step) {
+            const float angle = M_PI * step * 0.1f;
+            auto q = rotationVectorToQuaternion({angle, 0.f, 0.f});
+            auto world_z = rotationVectorToQuaternion({0.f, 0.f, rot_angle});
+
+            // Sequential active rotations (on world frame) compose as R_2 * R_1.
+            quaternionToAngles(world_z * q, &pitch, &roll, &yaw);
+
+            EXPECT_NEAR(angle, pitch, NEAR);
+            EXPECT_NEAR(0.f, roll, NEAR);
+       }
+
+        // roll independent of world Z rotation
+        for (int step = -5; step <= 5; ++step) {
+            const float angle = M_PI * step * 0.1f;
+            auto q = rotationVectorToQuaternion({0.f, angle, 0.f});
+            auto world_z = rotationVectorToQuaternion({0.f, 0.f, rot_angle});
+
+            // Sequential active rotations (on world frame) compose as R_2 * R_1.
+            quaternionToAngles(world_z * q, &pitch, &roll, &yaw);
+
+            EXPECT_NEAR(0.f, pitch, NEAR);
+            EXPECT_NEAR(angle, roll, NEAR);
+
+            // Convert extrinsic (world-based) active rotations to a sequence of
+            // intrinsic rotations (each rotation based off of previous rotation
+            // frame).
+            //
+            // R_1 * R_intrinsic = R_extrinsic * R_1
+            //    implies
+            // R_intrinsic = (R_1)^-1 R_extrinsic R_1
+            //
+            auto world_z_intrinsic = rotationVectorToQuaternion(
+                    q.inverse() * Vector3f(0.f, 0.f, rot_angle));
+
+            // Sequential intrinsic rotations compose as R_1 * R_2.
+            quaternionToAngles(q * world_z_intrinsic, &pitch, &roll, &yaw);
+
+            EXPECT_NEAR(0.f, pitch, NEAR);
+            EXPECT_NEAR(angle, roll, NEAR);
+        }
+    }
+}
+
 }  // namespace
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/QuaternionUtil.cpp b/media/libheadtracking/QuaternionUtil.cpp
index 5d090de..e245c80 100644
--- a/media/libheadtracking/QuaternionUtil.cpp
+++ b/media/libheadtracking/QuaternionUtil.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 
 #include <cassert>
 
diff --git a/media/libheadtracking/QuaternionUtil.h b/media/libheadtracking/QuaternionUtil.h
deleted file mode 100644
index f7a2ca9..0000000
--- a/media/libheadtracking/QuaternionUtil.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#pragma once
-
-#include <Eigen/Geometry>
-
-namespace android {
-namespace media {
-
-/**
- * Converts a rotation vector to an equivalent quaternion.
- * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
- * magnitude the rotation angle (in radians) around that axis.
- */
-Eigen::Quaternionf rotationVectorToQuaternion(const Eigen::Vector3f& rotationVector);
-
-/**
- * Converts a quaternion to an equivalent rotation vector.
- * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
- * magnitude the rotation angle (in radians) around that axis.
- */
-Eigen::Vector3f quaternionToRotationVector(const Eigen::Quaternionf& quaternion);
-
-/**
- * Returns a quaternion representing a rotation around the X-axis with the given amount (in
- * radians).
- */
-Eigen::Quaternionf rotateX(float angle);
-
-/**
- * Returns a quaternion representing a rotation around the Y-axis with the given amount (in
- * radians).
- */
-Eigen::Quaternionf rotateY(float angle);
-
-/**
- * Returns a quaternion representing a rotation around the Z-axis with the given amount (in
- * radians).
- */
-Eigen::Quaternionf rotateZ(float angle);
-
-}  // namespace media
-}  // namespace android
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index 8ebaf6e..8a29027 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -18,26 +18,31 @@
 
 #define LOG_TAG "SensorPoseProvider"
 
-#include <inttypes.h>
-
+#include <algorithm>
 #include <future>
+#include <inttypes.h>
+#include <limits>
 #include <map>
 #include <thread>
 
+#include <android-base/stringprintf.h>
 #include <android-base/thread_annotations.h>
 #include <log/log_main.h>
 #include <sensor/SensorEventQueue.h>
 #include <sensor/SensorManager.h>
 #include <utils/Looper.h>
 
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 
 namespace android {
 namespace media {
 namespace {
 
+using android::base::StringAppendF;
+
 // Identifier to use for our event queue on the loop.
 // The number 19 is arbitrary, only useful if using multiple objects on the same looper.
+// Note: Instead of a fixed number, the SensorEventQueue's fd could be used instead.
 constexpr int kIdent = 19;
 
 static inline Looper* ALooper_to_Looper(ALooper* alooper) {
@@ -56,7 +61,8 @@
     EventQueueGuard(const sp<SensorEventQueue>& queue, Looper* looper) : mQueue(queue) {
         mQueue->looper = Looper_to_ALooper(looper);
         mQueue->requestAdditionalInfo = false;
-        looper->addFd(mQueue->getFd(), kIdent, ALOOPER_EVENT_INPUT, nullptr, nullptr);
+        looper->addFd(mQueue->getFd(), kIdent, ALOOPER_EVENT_INPUT,
+                nullptr /* callback */, nullptr /* data */);
     }
 
     ~EventQueueGuard() {
@@ -71,7 +77,7 @@
     [[nodiscard]] SensorEventQueue* get() const { return mQueue.get(); }
 
   private:
-    sp<SensorEventQueue> mQueue;
+    const sp<SensorEventQueue> mQueue;
 };
 
 /**
@@ -91,10 +97,7 @@
         }
     }
 
-    SensorEnableGuard(const SensorEnableGuard&) = delete;
-    SensorEnableGuard& operator=(const SensorEnableGuard&) = delete;
-
-    // Enable moving.
+    // Enable move and delete default copy-ctor/copy-assignment.
     SensorEnableGuard(SensorEnableGuard&& other) : mQueue(other.mQueue), mSensor(other.mSensor) {
         other.mSensor = SensorPoseProvider::INVALID_HANDLE;
     }
@@ -118,6 +121,7 @@
     ~SensorPoseProviderImpl() override {
         // Disable all active sensors.
         mEnabledSensors.clear();
+        mQuit = true;
         mLooper->wake();
         mThread.join();
     }
@@ -126,33 +130,73 @@
         // Figure out the sensor's data format.
         DataFormat format = getSensorFormat(sensor);
         if (format == DataFormat::kUnknown) {
-            ALOGE("Unknown format for sensor %" PRId32, sensor);
+            ALOGE("%s: Unknown format for sensor %" PRId32, __func__, sensor);
             return false;
         }
 
         {
             std::lock_guard lock(mMutex);
-            mEnabledSensorsExtra.emplace(sensor, SensorExtra{ .format = format });
+            mEnabledSensorsExtra.emplace(
+                    sensor,
+                    SensorExtra{.format = format,
+                                .samplingPeriod = static_cast<int32_t>(samplingPeriod.count())});
         }
 
         // Enable the sensor.
         if (mQueue->enableSensor(sensor, samplingPeriod.count(), 0, 0)) {
-            ALOGE("Failed to enable sensor");
+            ALOGE("%s: Failed to enable sensor %" PRId32, __func__, sensor);
             std::lock_guard lock(mMutex);
             mEnabledSensorsExtra.erase(sensor);
             return false;
         }
 
-        mEnabledSensors.emplace(sensor, SensorEnableGuard(mQueue.get(), sensor));
+        mEnabledSensors.emplace(sensor, SensorEnableGuard(mQueue, sensor));
+        ALOGD("%s: Sensor %" PRId32 " started", __func__, sensor);
         return true;
     }
 
     void stopSensor(int handle) override {
+        ALOGD("%s: Sensor %" PRId32 " stopped", __func__, handle);
         mEnabledSensors.erase(handle);
         std::lock_guard lock(mMutex);
         mEnabledSensorsExtra.erase(handle);
     }
 
+    std::string toString(unsigned level) override {
+        std::string prefixSpace(level, ' ');
+        std::string ss = prefixSpace + "SensorPoseProvider:\n";
+        bool needUnlock = false;
+
+        prefixSpace += " ";
+        auto now = std::chrono::steady_clock::now();
+        if (!mMutex.try_lock_until(now + media::kSpatializerDumpSysTimeOutInSecond)) {
+            ss.append(prefixSpace).append("try_lock failed, dumpsys below maybe INACCURATE!\n");
+        } else {
+            needUnlock = true;
+        }
+
+        // Enabled sensor information
+        StringAppendF(&ss, "%sSensors total number %zu:\n", prefixSpace.c_str(),
+                      mEnabledSensorsExtra.size());
+        for (auto sensor : mEnabledSensorsExtra) {
+            StringAppendF(&ss,
+                          "%s[Handle: 0x%08x, Format %s Period (set %d max %0.4f min %0.4f) ms",
+                          prefixSpace.c_str(), sensor.first, toString(sensor.second.format).c_str(),
+                          sensor.second.samplingPeriod, media::nsToFloatMs(sensor.second.maxPeriod),
+                          media::nsToFloatMs(sensor.second.minPeriod));
+            if (sensor.second.discontinuityCount.has_value()) {
+                StringAppendF(&ss, ", DiscontinuityCount: %d",
+                              sensor.second.discontinuityCount.value());
+            }
+            ss += "]\n";
+        }
+
+        if (needUnlock) {
+            mMutex.unlock();
+        }
+        return ss;
+    }
+
   private:
     enum DataFormat {
         kUnknown,
@@ -167,18 +211,22 @@
     };
 
     struct SensorExtra {
-        DataFormat format;
+        DataFormat format = DataFormat::kUnknown;
+        int32_t samplingPeriod = 0;
+        int64_t latestTimestamp = 0;
+        int64_t maxPeriod = 0;
+        int64_t minPeriod = std::numeric_limits<int64_t>::max();
         std::optional<int32_t> discontinuityCount;
     };
 
+    bool mQuit = false;
     sp<Looper> mLooper;
     Listener* const mListener;
     SensorManager* const mSensorManager;
-    std::thread mThread;
-    std::mutex mMutex;
+    std::timed_mutex mMutex;
+    sp<SensorEventQueue> mQueue;
     std::map<int32_t, SensorEnableGuard> mEnabledSensors;
     std::map<int32_t, SensorExtra> mEnabledSensorsExtra GUARDED_BY(mMutex);
-    sp<SensorEventQueue> mQueue;
 
     // We must do some of the initialization operations on the worker thread, because the API relies
     // on the thread-local looper. In addition, as a matter of convenience, we store some of the
@@ -187,18 +235,25 @@
     // the worker thread and that thread would notify, via the promise below whenever initialization
     // is finished, and whether it was successful.
     std::promise<bool> mInitPromise;
+    std::thread mThread;
 
     SensorPoseProviderImpl(const char* packageName, Listener* listener)
         : mListener(listener),
-          mSensorManager(&SensorManager::getInstanceForPackage(String16(packageName))),
-          mThread([this] { threadFunc(); }) {}
-
+          mSensorManager(&SensorManager::getInstanceForPackage(String16(packageName))) {
+        mThread = std::thread([this] { threadFunc(); });
+    }
     void initFinished(bool success) { mInitPromise.set_value(success); }
 
     bool waitInitFinished() { return mInitPromise.get_future().get(); }
 
     void threadFunc() {
-        // Obtain looper.
+        // Name our std::thread to help identification.  As is, canCallJava == false.
+        androidSetThreadName("SensorPoseProvider-looper");
+
+        // Run at the highest non-realtime priority.
+        androidSetThreadPriority(gettid(), PRIORITY_URGENT_AUDIO);
+
+        // The looper is started on the created std::thread.
         mLooper = Looper::prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
 
         // Create event queue.
@@ -214,20 +269,28 @@
 
         initFinished(true);
 
-        while (true) {
-            int ret = mLooper->pollOnce(-1 /* no timeout */, nullptr, nullptr, nullptr);
+        while (!mQuit) {
+            const int ret = mLooper->pollOnce(-1 /* no timeout */, nullptr /* outFd */,
+                    nullptr /* outEvents */, nullptr /* outData */);
 
             switch (ret) {
                 case ALOOPER_POLL_WAKE:
-                    // Normal way to exit.
-                    return;
+                    // Continue to see if mQuit flag is set.
+                    // This can be spurious (due to bugreport being taken).
+                    continue;
 
                 case kIdent:
                     // Possible events on our queue.
                     break;
 
                 default:
-                    ALOGE("Unexpected status out of Looper::pollOnce: %d", ret);
+                    // Besides WAKE and kIdent, there should be no timeouts, callbacks,
+                    // ALOOPER_POLL_ERROR, or other events.
+                    // Exit now to avoid high frequency log spam on error,
+                    // e.g. if the fd becomes invalid (b/31093485).
+                    ALOGE("%s: Unexpected status out of Looper::pollOnce: %d", __func__, ret);
+                    mQuit = true;
+                    continue;
             }
 
             // Process an event.
@@ -239,7 +302,8 @@
             ssize_t size = mQueue->filterEvents(&event, actual);
 
             if (size < 0 || size > 1) {
-                ALOGE("Unexpected return value from SensorEventQueue::filterEvents: %zd", size);
+                ALOGE("%s: Unexpected return value from SensorEventQueue::filterEvents: %zd",
+                        __func__, size);
                 break;
             }
             if (size == 0) {
@@ -249,6 +313,7 @@
 
             handleEvent(event);
         }
+        ALOGD("%s: Exiting sensor event loop", __func__);
     }
 
     void handleEvent(const ASensorEvent& event) {
@@ -261,6 +326,7 @@
                 return;
             }
             value = parseEvent(event, iter->second.format, &iter->second.discontinuityCount);
+            updateEventTimestamp(event, iter->second);
         }
         mListener->onPose(event.timestamp, event.sensor, value.pose, value.twist,
                           value.isNewReference);
@@ -316,6 +382,15 @@
         return std::nullopt;
     }
 
+    void updateEventTimestamp(const ASensorEvent& event, SensorExtra& extra) {
+        if (extra.latestTimestamp != 0) {
+            int64_t gap = event.timestamp - extra.latestTimestamp;
+            extra.maxPeriod = std::max(gap, extra.maxPeriod);
+            extra.minPeriod = std::min(gap, extra.minPeriod);
+        }
+        extra.latestTimestamp = event.timestamp;
+    }
+
     static PoseEvent parseEvent(const ASensorEvent& event, DataFormat format,
                                 std::optional<int32_t>* discontinutyCount) {
         switch (format) {
@@ -345,6 +420,19 @@
                 LOG_ALWAYS_FATAL("Unexpected sensor type: %d", static_cast<int>(format));
         }
     }
+
+    const static std::string toString(DataFormat format) {
+        switch (format) {
+            case DataFormat::kUnknown:
+                return "kUnknown";
+            case DataFormat::kQuaternion:
+                return "kQuaternion";
+            case DataFormat::kRotationVectorsAndDiscontinuityCount:
+                return "kRotationVectorsAndDiscontinuityCount";
+            default:
+                return "NotImplemented";
+        }
+    }
 };
 
 }  // namespace
diff --git a/media/libheadtracking/StillnessDetector-test.cpp b/media/libheadtracking/StillnessDetector-test.cpp
index b6cd479..56e7b4e 100644
--- a/media/libheadtracking/StillnessDetector-test.cpp
+++ b/media/libheadtracking/StillnessDetector-test.cpp
@@ -16,8 +16,9 @@
 
 #include <gtest/gtest.h>
 
-#include "QuaternionUtil.h"
 #include "StillnessDetector.h"
+
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 namespace android {
diff --git a/media/libheadtracking/StillnessDetector.cpp b/media/libheadtracking/StillnessDetector.cpp
index be7c893..5232084 100644
--- a/media/libheadtracking/StillnessDetector.cpp
+++ b/media/libheadtracking/StillnessDetector.cpp
@@ -26,6 +26,9 @@
     mFifo.clear();
     mWindowFull = false;
     mSuppressionDeadline.reset();
+    // A "true" state indicates stillness is detected (default = true)
+    mCurrentState = true;
+    mPreviousState = true;
 }
 
 void StillnessDetector::setInput(int64_t timestamp, const Pose3f& input) {
@@ -33,7 +36,15 @@
     discardOld(timestamp);
 }
 
+bool StillnessDetector::getPreviousState() const {
+    return mPreviousState;
+}
+
 bool StillnessDetector::calculate(int64_t timestamp) {
+    // Move the current stillness state to the previous state.
+    // This allows us to detect transitions into and out of stillness.
+    mPreviousState = mCurrentState;
+
     discardOld(timestamp);
 
     // Check whether all the poses in the queue are in the proximity of the new one. We want to do
@@ -60,15 +71,17 @@
 
     // If the window has not been full, return the default value.
     if (!mWindowFull) {
-        return mOptions.defaultValue;
+        mCurrentState = mOptions.defaultValue;
     }
-
     // Force "in motion" while the suppression deadline is active.
-    if (mSuppressionDeadline.has_value()) {
-        return false;
+    else if (mSuppressionDeadline.has_value()) {
+        mCurrentState = false;
+    }
+    else {
+        mCurrentState = !moved;
     }
 
-    return !moved;
+    return mCurrentState;
 }
 
 void StillnessDetector::discardOld(int64_t timestamp) {
diff --git a/media/libheadtracking/StillnessDetector.h b/media/libheadtracking/StillnessDetector.h
index ee4b2d8..abd7cc4 100644
--- a/media/libheadtracking/StillnessDetector.h
+++ b/media/libheadtracking/StillnessDetector.h
@@ -80,7 +80,8 @@
     void setInput(int64_t timestamp, const Pose3f& input);
     /** Calculate whether the stream is still at the given timestamp. */
     bool calculate(int64_t timestamp);
-
+    /** Return the stillness state from the previous call to calculate() */
+    bool getPreviousState() const;
   private:
     struct TimestampedPose {
         int64_t timestamp;
@@ -92,6 +93,8 @@
     const float mCosHalfRotationalThreshold;
     std::deque<TimestampedPose> mFifo;
     bool mWindowFull = false;
+    bool mCurrentState = true;
+    bool mPreviousState = true;
     // As soon as motion is detected, this will be set for the time of detection + window duration,
     // and during this time we will always consider outselves in motion without checking. This is
     // used for hyteresis purposes, since because of the approximate method we use for determining
diff --git a/media/libheadtracking/Twist-test.cpp b/media/libheadtracking/Twist-test.cpp
index 7984e1e..9fbf81f 100644
--- a/media/libheadtracking/Twist-test.cpp
+++ b/media/libheadtracking/Twist-test.cpp
@@ -16,9 +16,7 @@
 
 #include "media/Twist.h"
 
-#include <gtest/gtest.h>
-
-#include "QuaternionUtil.h"
+#include "media/QuaternionUtil.h"
 #include "TestUtil.h"
 
 using Eigen::Quaternionf;
diff --git a/media/libheadtracking/Twist.cpp b/media/libheadtracking/Twist.cpp
index 664c4d5..fdec694 100644
--- a/media/libheadtracking/Twist.cpp
+++ b/media/libheadtracking/Twist.cpp
@@ -15,8 +15,8 @@
  */
 
 #include "media/Twist.h"
-
-#include "QuaternionUtil.h"
+#include <android-base/stringprintf.h>
+#include "media/QuaternionUtil.h"
 
 namespace android {
 namespace media {
@@ -39,5 +39,11 @@
     return os;
 }
 
+std::string Twist3f::toString() const {
+    return base::StringPrintf("[%0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f]",
+        mTranslationalVelocity[0], mTranslationalVelocity[1], mTranslationalVelocity[2],
+        mRotationalVelocity[0], mRotationalVelocity[1], mRotationalVelocity[2]);
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/VectorRecorder.cpp b/media/libheadtracking/VectorRecorder.cpp
new file mode 100644
index 0000000..5c87d05
--- /dev/null
+++ b/media/libheadtracking/VectorRecorder.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "media/VectorRecorder.h"
+
+namespace android::media {
+
+// Convert data to string with level indentation.
+// No need for a lock as the SimpleLog is thread-safe.
+std::string VectorRecorder::toString(size_t indent) const {
+    return mRecordLog.dumpToString(std::string(indent, ' ').c_str(), mMaxLocalLogLine);
+}
+
+// Record into local log when it is time.
+void VectorRecorder::record(const std::vector<float>& record) {
+    if (record.size() != mVectorSize) return;
+
+    // Protect against concurrent calls to record().
+    std::lock_guard lg(mLock);
+
+    // if it is time, record average data and reset.
+    if (shouldRecordLog_l()) {
+        sumToAverage_l();
+        mRecordLog.log(
+                "mean: %s, min: %s, max %s, calculated %zu samples in %0.4f second(s)",
+                toString(mSum, mDelimiterIdx, mFormatString.c_str()).c_str(),
+                toString(mMin, mDelimiterIdx, mFormatString.c_str()).c_str(),
+                toString(mMax, mDelimiterIdx, mFormatString.c_str()).c_str(),
+                mNumberOfSamples,
+                mNumberOfSecondsSinceFirstSample.count());
+        resetRecord_l();
+    }
+
+    // update stream average.
+    if (mNumberOfSamples++ == 0) {
+        mFirstSampleTimestamp = std::chrono::steady_clock::now();
+        for (size_t i = 0; i < mVectorSize; ++i) {
+            const float value = record[i];
+            mSum[i] += value;
+            mMax[i] = value;
+            mMin[i] = value;
+        }
+    } else {
+        for (size_t i = 0; i < mVectorSize; ++i) {
+            const float value = record[i];
+            mSum[i] += value;
+            mMax[i] = std::max(mMax[i], value);
+            mMin[i] = std::min(mMin[i], value);
+        }
+    }
+}
+
+bool VectorRecorder::shouldRecordLog_l() {
+    mNumberOfSecondsSinceFirstSample = std::chrono::duration_cast<std::chrono::seconds>(
+            std::chrono::steady_clock::now() - mFirstSampleTimestamp);
+    return mNumberOfSecondsSinceFirstSample >= mRecordThreshold;
+}
+
+void VectorRecorder::resetRecord_l() {
+    mSum.assign(mVectorSize, 0);
+    mMax.assign(mVectorSize, 0);
+    mMin.assign(mVectorSize, 0);
+    mNumberOfSamples = 0;
+    mNumberOfSecondsSinceFirstSample = std::chrono::seconds(0);
+}
+
+void VectorRecorder::sumToAverage_l() {
+    if (mNumberOfSamples == 0) return;
+    const float reciprocal = 1.f / mNumberOfSamples;
+    for (auto& p : mSum) {
+        p *= reciprocal;
+    }
+}
+
+}  // namespace android::media
diff --git a/media/libheadtracking/include/media/HeadTrackingMode.h b/media/libheadtracking/include/media/HeadTrackingMode.h
index 38496e8..92d1165 100644
--- a/media/libheadtracking/include/media/HeadTrackingMode.h
+++ b/media/libheadtracking/include/media/HeadTrackingMode.h
@@ -15,6 +15,8 @@
  */
 #pragma once
 
+#include <string>
+
 namespace android {
 namespace media {
 
@@ -30,5 +32,7 @@
     SCREEN_RELATIVE,
 };
 
+std::string toString(HeadTrackingMode mode);
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/include/media/HeadTrackingProcessor.h b/media/libheadtracking/include/media/HeadTrackingProcessor.h
index 1744be3..d2b78f2 100644
--- a/media/libheadtracking/include/media/HeadTrackingProcessor.h
+++ b/media/libheadtracking/include/media/HeadTrackingProcessor.h
@@ -19,6 +19,7 @@
 
 #include "HeadTrackingMode.h"
 #include "Pose.h"
+#include "PosePredictorType.h"
 #include "Twist.h"
 
 namespace android {
@@ -95,9 +96,19 @@
     /**
      * This causes the current poses for both the head and/or screen to be considered "center".
      */
-    virtual void recenter(bool recenterHead = true, bool recenterScreen = true) = 0;
-};
+    virtual void recenter(
+            bool recenterHead = true, bool recenterScreen = true, std::string source = "") = 0;
 
+    /**
+     * Set the predictor type.
+     */
+    virtual void setPosePredictorType(PosePredictorType type) = 0;
+
+    /**
+     * Dump HeadTrackingProcessor parameters under caller lock.
+     */
+    virtual std::string toString_l(unsigned level) const = 0;
+};
 /**
  * Creates an instance featuring a default implementation of the HeadTrackingProcessor interface.
  */
diff --git a/media/libheadtracking/include/media/Pose.h b/media/libheadtracking/include/media/Pose.h
index e660bb9..50294ed 100644
--- a/media/libheadtracking/include/media/Pose.h
+++ b/media/libheadtracking/include/media/Pose.h
@@ -16,6 +16,7 @@
 #pragma once
 
 #include <optional>
+#include <string>
 #include <vector>
 #include <Eigen/Geometry>
 
@@ -63,6 +64,9 @@
      */
     std::vector<float> toVector() const;
 
+    // Convert instance to a string representation.
+    std::string toString() const;
+
     Pose3f& operator=(const Pose3f& other) {
         mTranslation = other.mTranslation;
         mRotation = other.mRotation;
@@ -128,5 +132,10 @@
                                            float maxTranslationalVelocity,
                                            float maxRotationalVelocity);
 
+template <typename T>
+static float nsToFloatMs(T ns) {
+    return ns * 1e-6f;
+}
+
 }  // namespace media
 }  // namespace android
diff --git a/media/libheadtracking/include/media/PosePredictorType.h b/media/libheadtracking/include/media/PosePredictorType.h
new file mode 100644
index 0000000..aa76d5d
--- /dev/null
+++ b/media/libheadtracking/include/media/PosePredictorType.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+
+namespace android::media {
+
+enum class PosePredictorType {
+    /** Use best predictor determined from sensor input */
+    AUTO,
+
+    /** Use last pose for future prediction */
+    LAST,
+
+    /** Use twist angular velocity for future prediction */
+    TWIST,
+
+    /** Use weighted least squares history of prior poses (ignoring twist) */
+    LEAST_SQUARES,
+};
+
+std::string toString(PosePredictorType posePredictorType);
+bool isValidPosePredictorType(PosePredictorType posePredictorType);
+
+}  // namespace android::media
diff --git a/media/libheadtracking/include/media/QuaternionUtil.h b/media/libheadtracking/include/media/QuaternionUtil.h
new file mode 100644
index 0000000..a711d17
--- /dev/null
+++ b/media/libheadtracking/include/media/QuaternionUtil.h
@@ -0,0 +1,297 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <android-base/stringprintf.h>
+#include <Eigen/Geometry>
+#include <media/Pose.h>
+
+namespace android {
+namespace media {
+
+/**
+ * Converts a rotation vector to an equivalent quaternion.
+ * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
+ * magnitude the rotation angle (in radians) around that axis.
+ */
+Eigen::Quaternionf rotationVectorToQuaternion(const Eigen::Vector3f& rotationVector);
+
+/**
+ * Converts a quaternion to an equivalent rotation vector.
+ * The rotation vector is given as a 3-vector whose direction represents the rotation axis and its
+ * magnitude the rotation angle (in radians) around that axis.
+ */
+Eigen::Vector3f quaternionToRotationVector(const Eigen::Quaternionf& quaternion);
+
+/**
+ * Returns a quaternion representing a rotation around the X-axis with the given amount (in
+ * radians).
+ */
+Eigen::Quaternionf rotateX(float angle);
+
+/**
+ * Returns a quaternion representing a rotation around the Y-axis with the given amount (in
+ * radians).
+ */
+Eigen::Quaternionf rotateY(float angle);
+
+/**
+ * Returns a quaternion representing a rotation around the Z-axis with the given amount (in
+ * radians).
+ */
+Eigen::Quaternionf rotateZ(float angle);
+
+/**
+ * Compute separate roll, pitch, and yaw angles from a quaternion
+ *
+ * The roll, pitch, and yaw follow standard 3DOF virtual reality definitions
+ * with angles increasing counter-clockwise by the right hand rule.
+ *
+ * https://en.wikipedia.org/wiki/Six_degrees_of_freedom
+ *
+ * The roll, pitch, and yaw angles are calculated separately from the device frame
+ * rotation from the world frame.  This is not to be confused with the
+ * intrinsic Euler xyz roll, pitch, yaw 'nautical' angles.
+ *
+ * The input quarternion is the active rotation that transforms the
+ * World/Stage frame to the Head/Screen frame.
+ *
+ * The input quaternion may come from two principal sensors: DEVICE and HEADSET
+ * and are interpreted as below.
+ *
+ * DEVICE SENSOR
+ *
+ * Android sensor stack assumes device coordinates along the x/y axis.
+ *
+ * https://developer.android.com/reference/android/hardware/SensorEvent#sensor.type_rotation_vector:
+ *
+ * Looking down from the clouds. Android Device coordinate system (not used)
+ *        DEVICE --> X (Y goes through top speaker towards the observer)
+ *           | Z
+ *           V
+ *         USER
+ *
+ * Internally within this library, we transform the device sensor coordinate
+ * system by rotating the coordinate system around the X axis by -M_PI/2.
+ * This aligns the device coordinate system to match that of the
+ * Head Tracking sensor (see below), should the user be facing the device in
+ * natural (phone == portrait, tablet == ?) orientation.
+ *
+ * Looking down from the clouds. Spatializer device frame.
+ *           Y
+ *           ^
+ *           |
+ *        DEVICE --> X (Z goes through top of the DEVICE towards the observer)
+ *
+ *         USER
+ *
+ * The reference world frame is the device in vertical
+ * natural (phone == portrait) orientation with the top pointing straight
+ * up from the ground and the front-to-back direction facing north.
+ * The world frame is presumed locally fixed by magnetic and gravitational reference.
+ *
+ * HEADSET SENSOR
+ * https://developer.android.com/reference/android/hardware/SensorEvent#sensor.type_head_tracker:
+ *
+ * Looking down from the clouds. Headset frame.
+ *           Y
+ *           ^
+ *           |
+ *         USER ---> X
+ *         (Z goes through the top of the USER head towards the observer)
+ *
+ * The Z axis goes from the neck to the top of the head, the X axis goes
+ * from the left ear to the right ear, the Y axis goes from the back of the
+ * head through the nose.
+ *
+ * Typically for a headset sensor, the X and Y axes have some arbitrary fixed
+ * reference.
+ *
+ * ROLL
+ * Roll is the counter-clockwise L/R motion around the Y axis (hence ZX plane).
+ * The right hand convention means the plane is ZX not XZ.
+ * This can be considered the azimuth angle in spherical coordinates
+ * with Pitch being the elevation angle.
+ *
+ * Roll has a range of -M_PI to M_PI radians.
+ *
+ * Rolling a device changes between portrait and landscape
+ * modes, and for L/R speakers will limit the amount of crosstalk cancellation.
+ * Roll increases as the device (if vertical like a coin) rolls from left to right.
+ *
+ * By this definition, Roll is less accurate when the device is flat
+ * on a table rather than standing on edge.
+ * When perfectly flat on the table, roll may report as 0, M_PI, or -M_PI
+ * due ambiguity / degeneracy of atan(0, 0) in this case (the device Y axis aligns with
+ * the world Z axis), but exactly flat rarely occurs.
+ *
+ * Roll for a headset is the angle the head is inclined to the right side
+ * (like sleeping).
+ *
+ * PITCH
+ * Pitch is the Surface normal Y deviation (along the Z axis away from the earth).
+ * This can be considered the elevation angle in spherical coordinates using
+ * Roll as the azimuth angle.
+ *
+ * Pitch for a device determines whether the device is "upright" or lying
+ * flat on the table (i.e. surface normal).  Pitch is 0 when upright, decreases
+ * as the device top moves away from the user to -M_PI/2 when lying down face up.
+ * Pitch increases from 0 to M_PI/2 when the device tilts towards the user, and is
+ * M_PI/2 degrees when face down.
+ *
+ * Pitch for a headset is the user tilting the head/chin up or down,
+ * like nodding.
+ *
+ * Pitch has a range of -M_PI/2, M_PI/2 radians.
+ *
+ * YAW
+ * Yaw is the rotational component along the earth's XY tangential plane,
+ * where the Z axis points radially away from the earth.
+ *
+ * Yaw has a range of -M_PI to M_PI radians.  If used for azimuth angle in
+ * spherical coordinates, the elevation angle may be derived from the Z axis.
+ *
+ * A positive increase means the phone is rotating from right to left
+ * when considered flat on the table.
+ * (headset: the user is rotating their head to look left).
+ * If left speaker or right earbud is pointing straight up or down,
+ * this value is imprecise and Pitch or Roll is a more useful measure.
+ *
+ * Yaw for a device is like spinning a vertical device along the axis of
+ * gravity, like spinning a coin.  Yaw increases as the coin / device
+ * spins from right to left, rotating around the Z axis.
+ *
+ * Yaw for a headset is the user turning the head to look left or right
+ * like shaking the head for no. Yaw is the primary angle for a binaural
+ * head tracking device.
+ *
+ * @param q input active rotation Eigen quaternion.
+ * @param pitch output set to pitch if not nullptr
+ * @param roll output set to roll if not nullptr
+ * @param yaw output set to yaw if not nullptr
+ * @return (DEBUG==true) a debug string with intermediate transformation matrix
+ *                       interpreted as the unit basis vectors.
+ */
+
+// DEBUG returns a debug string for analysis.
+// We save unneeded rotation matrix computation by keeping the DEBUG option constexpr.
+template <bool DEBUG = false>
+auto quaternionToAngles(const Eigen::Quaternionf& q, float *pitch, float *roll, float *yaw) {
+    /*
+     * The quaternion here is the active rotation that transforms from the world frame
+     * to the device frame: the observer remains in the world frame,
+     * and the device (frame) moves.
+     *
+     * We use this to map device coordinates to world coordinates.
+     *
+     * Device:  We transform the device right speaker (X == 1), top speaker (Z == 1),
+     * and surface inwards normal (Y == 1) positions to the world frame.
+     *
+     * Headset: We transform the headset right bud (X == 1), top (Z == 1) and
+     * nose normal (Y == 1) positions to the world frame.
+     *
+     * This is the same as the world frame coordinates of the
+     *  unit device vector in the X dimension (ux),
+     *  unit device vector in the Y dimension (uy),
+     *  unit device vector in the Z dimension (uz).
+     *
+     * Rather than doing the rotation on unit vectors individually,
+     * one can simply use the columns of the rotation matrix of
+     * the world-to-body quaternion, so the computation is exceptionally fast.
+     *
+     * Furthermore, Eigen inlines the "toRotationMatrix" method
+     * and we rely on unused expression removal for efficiency
+     * and any elements not used should not be computed.
+     *
+     * Side note: For applying a rotation to several points,
+     * it is more computationally efficient to extract and
+     * use the rotation matrix form than the quaternion.
+     * So use of the rotation matrix is good for many reasons.
+     */
+    const auto rotation = q.toRotationMatrix();
+
+    /*
+     * World location of unit vector right speaker assuming the phone is situated
+     * natural (phone == portrait) mode.
+     * (headset: right bud).
+     *
+     * auto ux = q.rotation() * Eigen::Vector3f{1.f, 0.f, 0.f};
+     *         = rotation.col(0);
+     */
+    [[maybe_unused]] const auto ux_0 = rotation.coeff(0, 0);
+    [[maybe_unused]] const auto ux_1 = rotation.coeff(1, 0);
+    [[maybe_unused]] const auto ux_2 = rotation.coeff(2, 0);
+
+    [[maybe_unused]] std::string coordinates;
+    if constexpr (DEBUG) {
+        base::StringAppendF(&coordinates, "ux: %f %f %f", ux_0, ux_1, ux_2);
+    }
+
+    /*
+     * World location of screen-inwards normal assuming the phone is situated
+     * in natural (phone == portrait) mode.
+     * (headset: user nose).
+     *
+     * auto uy = q.rotation() * Eigen::Vector3f{0.f, 1.f, 0.f};
+     *         = rotation.col(1);
+     */
+    [[maybe_unused]] const auto uy_0 = rotation.coeff(0, 1);
+    [[maybe_unused]] const auto uy_1 = rotation.coeff(1, 1);
+    [[maybe_unused]] const auto uy_2 = rotation.coeff(2, 1);
+    if constexpr (DEBUG) {
+        base::StringAppendF(&coordinates, "uy: %f %f %f", uy_0, uy_1, uy_2);
+    }
+
+    /*
+     * World location of unit vector top speaker.
+     * (headset: top of head).
+     * auto uz = q.rotation() * Eigen::Vector3f{0.f, 0.f, 1.f};
+     *         = rotation.col(2);
+     */
+    [[maybe_unused]] const auto uz_0 = rotation.coeff(0, 2);
+    [[maybe_unused]] const auto uz_1 = rotation.coeff(1, 2);
+    [[maybe_unused]] const auto uz_2 = rotation.coeff(2, 2);
+    if constexpr (DEBUG) {
+        base::StringAppendF(&coordinates, "uz: %f %f %f", uz_0, uz_1, uz_2);
+    }
+
+    // pitch computed from nose world Z coordinate;
+    // hence independent of rotation around world Z.
+    if (pitch != nullptr) {
+        *pitch = asin(std::clamp(uy_2, -1.f, 1.f));
+    }
+
+    // roll computed from head/right world Z coordinate;
+    // hence independent of rotation around world Z.
+    if (roll != nullptr) {
+        // atan2 takes care of implicit scale normalization of Z, X.
+        *roll = -atan2(ux_2, uz_2);
+    }
+
+    // yaw computed from right ear angle projected onto world XY plane
+    // where world Z == 0.  This is the rotation around world Z.
+    if (yaw != nullptr) {
+        // atan2 takes care of implicit scale normalization of X, Y.
+        *yaw =  atan2(ux_1, ux_0);
+    }
+
+    if constexpr (DEBUG) {
+        return coordinates;
+    }
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/include/media/SensorPoseProvider.h b/media/libheadtracking/include/media/SensorPoseProvider.h
index 0f42074..4609e0c 100644
--- a/media/libheadtracking/include/media/SensorPoseProvider.h
+++ b/media/libheadtracking/include/media/SensorPoseProvider.h
@@ -28,6 +28,9 @@
 namespace android {
 namespace media {
 
+// Timeout for Spatializer dumpsys trylock, don't block for more than 3 seconds.
+constexpr auto kSpatializerDumpSysTimeOutInSecond = std::chrono::seconds(3);
+
 /**
  * A utility providing streaming of pose data from motion sensors provided by the Sensor Framework.
  *
@@ -100,6 +103,11 @@
      * discover properties of the sensor.
      */
     virtual std::optional<const Sensor> getSensorByHandle(int32_t handle) = 0;
+
+    /**
+     * Dump SensorPoseProvider parameters and history data.
+     */
+    virtual std::string toString(unsigned level) = 0;
 };
 
 }  // namespace media
diff --git a/media/libheadtracking/include/media/Twist.h b/media/libheadtracking/include/media/Twist.h
index 291cea3..51b83d8 100644
--- a/media/libheadtracking/include/media/Twist.h
+++ b/media/libheadtracking/include/media/Twist.h
@@ -66,6 +66,9 @@
         return Twist3f(mTranslationalVelocity / s, mRotationalVelocity / s);
     }
 
+    // Convert instance to a string representation.
+    std::string toString() const;
+
   private:
     Eigen::Vector3f mTranslationalVelocity;
     Eigen::Vector3f mRotationalVelocity;
diff --git a/media/libheadtracking/include/media/VectorRecorder.h b/media/libheadtracking/include/media/VectorRecorder.h
new file mode 100644
index 0000000..4103a7d
--- /dev/null
+++ b/media/libheadtracking/include/media/VectorRecorder.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/stringprintf.h>
+#include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
+#include <chrono>
+#include <math.h>
+#include <mutex>
+#include <vector>
+
+namespace android::media {
+
+/**
+ * VectorRecorder records a vector of floats computing the average, max, and min
+ * over given time periods.
+ *
+ * The class is thread-safe.
+ */
+class VectorRecorder {
+  public:
+    /**
+     * @param vectorSize is the size of the vector input.
+     *        If the input does not match this size, it is ignored.
+     * @param threshold is the time interval we bucket for averaging.
+     * @param maxLogLine is the number of lines we log.  At this
+     *        threshold, the oldest line will expire when the new line comes in.
+     * @param delimiterIdx is an optional array of delimiter indices that
+     *        replace the ',' with a ':'.  For example if delimiterIdx = { 3 } then
+     *        the above example would format as [0.00, 0.00, 0.00 : -1.29, -0.50, 15.27].
+     * @param formatString is the sprintf format string for the double converted data
+     *        to use.
+     */
+    VectorRecorder(
+        size_t vectorSize, std::chrono::duration<double> threshold, int maxLogLine,
+            std::vector<size_t> delimiterIdx = {},
+            const std::string_view formatString = {})
+        : mVectorSize(vectorSize)
+        , mDelimiterIdx(std::move(delimiterIdx))
+        , mFormatString(formatString)
+        , mRecordLog(maxLogLine)
+        , mRecordThreshold(threshold)
+    {
+        resetRecord_l();  // OK to call - we're in the constructor.
+    }
+
+    /** Convert recorded vector data to string with level indentation */
+    std::string toString(size_t indent) const;
+
+    /**
+     * @brief Record a vector of floats.
+     *
+     * @param record a vector of floats.
+     */
+    void record(const std::vector<float>& record);
+
+    /**
+     * Format vector to a string, [0.00, 0.00, 0.00, -1.29, -0.50, 15.27].
+     *
+     * @param delimiterIdx is an optional array of delimiter indices that
+     *        replace the ',' with a ':'.  For example if delimiterIdx = { 3 } then
+     *        the above example would format as [0.00, 0.00, 0.00 : -1.29, -0.50, 15.27].
+     * @param formatString is the sprintf format string for the double converted data
+     *        to use.
+     */
+    template <typename T>
+    static std::string toString(const std::vector<T>& record,
+            const std::vector<size_t>& delimiterIdx = {},
+            const char * const formatString = nullptr) {
+        if (record.size() == 0) {
+            return "[]";
+        }
+
+        std::string ss = "[";
+        auto nextDelimiter = delimiterIdx.begin();
+        for (size_t i = 0; i < record.size(); ++i) {
+            if (i > 0) {
+                if (nextDelimiter != delimiterIdx.end()
+                        && *nextDelimiter <= i) {
+                     ss.append(" : ");
+                     ++nextDelimiter;
+                } else {
+                    ss.append(", ");
+                }
+            }
+            if (formatString != nullptr && *formatString) {
+                base::StringAppendF(&ss, formatString, static_cast<double>(record[i]));
+            } else {
+                base::StringAppendF(&ss, "%5.2lf", static_cast<double>(record[i]));
+            }
+        }
+        ss.append("]");
+        return ss;
+    }
+
+  private:
+    static constexpr int mMaxLocalLogLine = 10;
+
+    const size_t mVectorSize;
+    const std::vector<size_t> mDelimiterIdx;
+    const std::string mFormatString;
+
+    // Local log for historical vector data.
+    // Locked internally, so does not need mutex below.
+    SimpleLog mRecordLog{mMaxLocalLogLine};
+
+    std::mutex mLock;
+
+    // Time threshold to record vectors in the local log.
+    // Vector data will be recorded into log at least every mRecordThreshold.
+    std::chrono::duration<double> mRecordThreshold GUARDED_BY(mLock);
+
+    // Number of seconds since first sample in mSum.
+    std::chrono::duration<double> mNumberOfSecondsSinceFirstSample GUARDED_BY(mLock);
+
+    // Timestamp of first sample recorded in mSum.
+    std::chrono::time_point<std::chrono::steady_clock> mFirstSampleTimestamp GUARDED_BY(mLock);
+
+    // Number of samples in mSum.
+    size_t mNumberOfSamples GUARDED_BY(mLock) = 0;
+
+    std::vector<double> mSum GUARDED_BY(mLock);
+    std::vector<float> mMax GUARDED_BY(mLock);
+    std::vector<float> mMin GUARDED_BY(mLock);
+
+    // Computes mNumberOfSecondsSinceFirstSample, returns true if time to record.
+    bool shouldRecordLog_l() REQUIRES(mLock);
+
+    // Resets the running mNumberOfSamples, mSum, mMax, mMin.
+    void resetRecord_l() REQUIRES(mLock);
+
+    // Convert mSum to an average.
+    void sumToAverage_l() REQUIRES(mLock);
+};  // VectorRecorder
+
+}  // namespace android::media
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 1b8656d..2ba1fc3 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -26,7 +26,6 @@
 #include <binder/IMemory.h>
 #include <binder/MemoryDealer.h>
 #include <drm/drm_framework_common.h>
-#include <log/log.h>
 #include <media/mediametadataretriever.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -344,19 +343,22 @@
     mFrameDecoded = false;
     mFrameMemory.clear();
 
-    mRetriever = new MediaMetadataRetriever();
-    status_t err = mRetriever->setDataSource(mDataSource, "image/heif");
+    sp<MediaMetadataRetriever> retriever = new MediaMetadataRetriever();
+    status_t err = retriever->setDataSource(mDataSource, "image/heif");
     if (err != OK) {
         ALOGE("failed to set data source!");
-
         mRetriever.clear();
         mDataSource.clear();
         return false;
     }
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        mRetriever = retriever;
+    }
     ALOGV("successfully set data source.");
 
-    const char* hasImage = mRetriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
-    const char* hasVideo = mRetriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
+    const char* hasImage = retriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
+    const char* hasVideo = retriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
 
     mHasImage = hasImage && !strcasecmp(hasImage, "yes");
     mHasVideo = hasVideo && !strcasecmp(hasVideo, "yes");
@@ -364,7 +366,7 @@
     HeifFrameInfo* defaultInfo = nullptr;
     if (mHasImage) {
         // image index < 0 to retrieve primary image
-        sp<IMemory> sharedMem = mRetriever->getImageAtIndex(
+        sp<IMemory> sharedMem = retriever->getImageAtIndex(
                 -1, mOutputColor, true /*metaOnly*/);
 
         if (sharedMem == nullptr || sharedMem->unsecurePointer() == nullptr) {
@@ -399,7 +401,7 @@
     }
 
     if (mHasVideo) {
-        sp<IMemory> sharedMem = mRetriever->getFrameAtTime(0,
+        sp<IMemory> sharedMem = retriever->getFrameAtTime(0,
                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
                 mOutputColor, true /*metaOnly*/);
 
@@ -425,7 +427,7 @@
 
         initFrameInfo(&mSequenceInfo, videoFrame);
 
-        const char* frameCount = mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
+        const char* frameCount = retriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
         if (frameCount == nullptr) {
             android_errorWriteWithInfoLog(0x534e4554, "215002587", -1, NULL, 0);
             ALOGD("No valid sequence information in metadata");
@@ -511,14 +513,26 @@
 }
 
 bool HeifDecoderImpl::decodeAsync() {
+    wp<MediaMetadataRetriever> weakRetriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        weakRetriever = mRetriever;
+    }
+
     for (size_t i = 1; i < mNumSlices; i++) {
+        sp<MediaMetadataRetriever> retriever = weakRetriever.promote();
+        if (retriever == nullptr) {
+            return false;
+        }
+
         ALOGV("decodeAsync(): decoding slice %zu", i);
         size_t top = i * mSliceHeight;
         size_t bottom = (i + 1) * mSliceHeight;
         if (bottom > mImageInfo.mHeight) {
             bottom = mImageInfo.mHeight;
         }
-        sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
+
+        sp<IMemory> frameMemory = retriever->getImageRectAtIndex(
                 -1, mOutputColor, 0, top, mImageInfo.mWidth, bottom);
         {
             Mutex::Autolock autolock(mLock);
@@ -534,10 +548,13 @@
             mScanlineReady.signal();
         }
     }
-    // Aggressive clear to avoid holding on to resources
-    mRetriever.clear();
-
     // Hold on to mDataSource in case the client wants to redecode.
+
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        mRetriever.clear();
+    }
+
     return false;
 }
 
@@ -549,6 +566,17 @@
         return true;
     }
 
+    sp<MediaMetadataRetriever> retriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        if (mRetriever == nullptr) {
+            ALOGE("Failed to get MediaMetadataRetriever!");
+            return false;
+        }
+
+        retriever = mRetriever;
+    }
+
     // See if we want to decode in slices to allow client to start
     // scanline processing in parallel with decode. If this fails
     // we fallback to decoding the full frame.
@@ -563,7 +591,7 @@
 
         if (mNumSlices > 1) {
             // get first slice and metadata
-            sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
+            sp<IMemory> frameMemory = retriever->getImageRectAtIndex(
                     -1, mOutputColor, 0, 0, mImageInfo.mWidth, mSliceHeight);
 
             if (frameMemory == nullptr || frameMemory->unsecurePointer() == nullptr) {
@@ -598,9 +626,9 @@
 
     if (mHasImage) {
         // image index < 0 to retrieve primary image
-        mFrameMemory = mRetriever->getImageAtIndex(-1, mOutputColor);
+        mFrameMemory = retriever->getImageAtIndex(-1, mOutputColor);
     } else if (mHasVideo) {
-        mFrameMemory = mRetriever->getFrameAtTime(0,
+        mFrameMemory = retriever->getFrameAtTime(0,
                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
     }
 
@@ -636,7 +664,10 @@
     mFrameDecoded = true;
 
     // Aggressively clear to avoid holding on to resources
-    mRetriever.clear();
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        mRetriever.clear();
+    }
 
     // Hold on to mDataSource in case the client wants to redecode.
     return true;
@@ -658,7 +689,17 @@
     // set total scanline to sequence height now
     mTotalScanline = mSequenceInfo.mHeight;
 
-    mFrameMemory = mRetriever->getFrameAtIndex(frameIndex, mOutputColor);
+    sp<MediaMetadataRetriever> retriever;
+    {
+        Mutex::Autolock _l(mRetrieverLock);
+        retriever = mRetriever;
+        if (retriever == nullptr) {
+            ALOGE("failed to get MediaMetadataRetriever!");
+            return false;
+        }
+    }
+
+    mFrameMemory = retriever->getFrameAtIndex(frameIndex, mOutputColor);
     if (mFrameMemory == nullptr || mFrameMemory->unsecurePointer() == nullptr) {
         ALOGE("decode: videoFrame is a nullptr");
         return false;
@@ -735,9 +776,9 @@
     HeifFrameInfo* info = &mImageInfo;
     if (info != nullptr) {
         return mImageInfo.mBitDepth;
+    } else {
+        return 0;
     }
-
-    return 0;
 }
 
 } // namespace android
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
index 86a8628..c1504cd 100644
--- a/media/libheif/HeifDecoderImpl.h
+++ b/media/libheif/HeifDecoderImpl.h
@@ -72,6 +72,8 @@
     bool mHasVideo;
     size_t mSequenceLength;
 
+    Mutex mRetrieverLock;
+
     // Slice decoding only
     Mutex mLock;
     Condition mScanlineReady;
diff --git a/media/libheif/include/HeifDecoderAPI.h b/media/libheif/include/HeifDecoderAPI.h
index dc12486..56f4765 100644
--- a/media/libheif/include/HeifDecoderAPI.h
+++ b/media/libheif/include/HeifDecoderAPI.h
@@ -47,7 +47,7 @@
     int32_t  mRotationAngle;           // Rotation angle, clockwise, should be multiple of 90
     uint32_t mBytesPerPixel;           // Number of bytes for one pixel
     int64_t  mDurationUs;              // Duration of the frame in us
-    uint32_t mBitDepth;                // Number of bits for each of the R/G/B channels
+    uint32_t mBitDepth;                // Number of bits of R/G/B channel
     std::vector<uint8_t> mIccData;     // ICC data array
 };
 
@@ -164,7 +164,7 @@
     virtual size_t skipScanlines(size_t count) = 0;
 
     /*
-     * Returns color depth in bits for each of the R/G/B channels.
+     * Returns color depth of R/G/B channel.
      */
     virtual uint32_t getColorDepth() = 0;
 
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 2dd5784..590a7b7 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -304,6 +304,10 @@
 cc_library {
     name: "libmedia",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
     srcs: [
         ":mediaextractorservice_aidl",
         "IDataSource.cpp",
@@ -356,7 +360,6 @@
 
     shared_libs: [
         "android.hidl.token@1.0-utils",
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
         "liblog",
@@ -381,7 +384,6 @@
     export_shared_lib_headers: [
         "libaudioclient",
         "libbinder",
-        "libandroidicu",
         //"libsonivox",
         "libmedia_omx",
         "framework-permission-aidl-cpp",
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 8a3b84e..86427ed 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -27,40 +27,6 @@
 #include <utils/String8.h>
 #include <utils/KeyedVector.h>
 
-// The binder is supposed to propagate the scheduler group across
-// the binder interface so that remote calls are executed with
-// the same priority as local calls. This is currently not working
-// so this change puts in a temporary hack to fix the issue with
-// metadata retrieval which can be a huge CPU hit if done on a
-// foreground thread.
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-
-#undef LOG_TAG
-#define LOG_TAG "IMediaMetadataRetriever"
-#include <utils/Log.h>
-#include <cutils/sched_policy.h>
-
-namespace android {
-
-static void sendSchedPolicy(Parcel& data)
-{
-    SchedPolicy policy;
-    get_sched_policy(gettid(), &policy);
-    data.writeInt32(policy);
-}
-
-static void setSchedPolicy(const Parcel& data)
-{
-    SchedPolicy policy = (SchedPolicy) data.readInt32();
-    set_sched_policy(gettid(), policy);
-}
-static void restoreSchedPolicy()
-{
-    set_sched_policy(gettid(), SP_FOREGROUND);
-}
-}; // end namespace android
-#endif
-
 namespace android {
 
 enum {
@@ -157,9 +123,6 @@
         data.writeInt32(option);
         data.writeInt32(colorFormat);
         data.writeInt32(metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_FRAME_AT_TIME, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -178,9 +141,6 @@
         data.writeInt32(colorFormat);
         data.writeInt32(metaOnly);
         data.writeInt32(thumbnail);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_IMAGE_AT_INDEX, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -202,9 +162,6 @@
         data.writeInt32(top);
         data.writeInt32(right);
         data.writeInt32(bottom);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_IMAGE_RECT_AT_INDEX, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -223,9 +180,6 @@
         data.writeInt32(index);
         data.writeInt32(colorFormat);
         data.writeInt32(metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(GET_FRAME_AT_INDEX, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -238,9 +192,6 @@
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         remote()->transact(EXTRACT_ALBUM_ART, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
@@ -253,9 +204,6 @@
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-        sendSchedPolicy(data);
-#endif
         data.writeInt32(keyCode);
         remote()->transact(EXTRACT_METADATA, data, &reply);
         status_t ret = reply.readInt32();
@@ -366,9 +314,6 @@
             bool metaOnly = (data.readInt32() != 0);
             ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d), metaOnly(%d)",
                     timeUs, option, colorFormat, metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> bitmap = getFrameAtTime(timeUs, option, colorFormat, metaOnly);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -376,9 +321,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         case GET_IMAGE_AT_INDEX: {
@@ -389,9 +331,6 @@
             bool thumbnail = (data.readInt32() != 0);
             ALOGV("getImageAtIndex: index(%d), colorFormat(%d), metaOnly(%d), thumbnail(%d)",
                     index, colorFormat, metaOnly, thumbnail);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> bitmap = getImageAtIndex(index, colorFormat, metaOnly, thumbnail);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -399,9 +338,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
 
@@ -415,9 +351,6 @@
             int bottom = data.readInt32();
             ALOGV("getImageRectAtIndex: index(%d), colorFormat(%d), rect {%d, %d, %d, %d}",
                     index, colorFormat, left, top, right, bottom);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> bitmap = getImageRectAtIndex(
                     index, colorFormat, left, top, right, bottom);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
@@ -426,9 +359,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
 
@@ -439,9 +369,6 @@
             bool metaOnly = (data.readInt32() != 0);
             ALOGV("getFrameAtIndex: index(%d), colorFormat(%d), metaOnly(%d)",
                     index, colorFormat, metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> frame = getFrameAtIndex(index, colorFormat, metaOnly);
             if (frame != nullptr) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -449,16 +376,10 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         case EXTRACT_ALBUM_ART: {
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             sp<IMemory> albumArt = extractAlbumArt();
             if (albumArt != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
@@ -466,16 +387,10 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         case EXTRACT_METADATA: {
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            setSchedPolicy(data);
-#endif
             int keyCode = data.readInt32();
             const char* value = extractMetadata(keyCode);
             if (value != NULL) {  // Don't send NULL across the binder interface
@@ -484,9 +399,6 @@
             } else {
                 reply->writeInt32(UNKNOWN_ERROR);
             }
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-            restoreSchedPolicy();
-#endif
             return NO_ERROR;
         } break;
         default:
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 154988d..e191999 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -428,7 +428,7 @@
         return reply.readInt32();
     }
 
-    status_t getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones)
+    status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones)
     {
         ALOGV("getActiveMicrophones");
         Parcel data, reply;
@@ -756,7 +756,7 @@
         case GET_ACTIVE_MICROPHONES: {
             ALOGV("GET_ACTIVE_MICROPHONES");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
-            std::vector<media::MicrophoneInfo> activeMicrophones;
+            std::vector<media::MicrophoneInfoFw> activeMicrophones;
             status_t status = getActiveMicrophones(&activeMicrophones);
             reply->writeInt32(status);
             if (status != NO_ERROR) {
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 85768bd..5aa9adc 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -898,10 +898,9 @@
         }
     }
 
-    for (size_t cameraId = 0; cameraId < mCameraIds.size(); ++cameraId) {
+    for (size_t refIndex = 0; refIndex < mCameraIds.size(); ++refIndex) {
+        const int cameraId = mCameraIds[refIndex];
         for (size_t j = 0; j < kNumRequiredProfiles; ++j) {
-            int refIndex = getRequiredProfileRefIndex(cameraId);
-            CHECK(refIndex != -1);
             RequiredProfileRefInfo *info =
                     &mRequiredProfileRefs[refIndex].mRefs[j];
 
@@ -931,14 +930,14 @@
 
                 int index = getCamcorderProfileIndex(cameraId, profile->mQuality);
                 if (index != -1) {
-                    ALOGV("Profile quality %d for camera %zu already exists",
+                    ALOGV("Profile quality %d for camera %d already exists",
                         profile->mQuality, cameraId);
                     CHECK(index == refIndex);
                     continue;
                 }
 
                 // Insert the new profile
-                ALOGV("Add a profile: quality %d=>%d for camera %zu",
+                ALOGV("Add a profile: quality %d=>%d for camera %d",
                         mCamcorderProfiles[info->mRefProfileIndex]->mQuality,
                         profile->mQuality, cameraId);
 
diff --git a/media/libmedia/include/media/CharacterEncodingDetector.h b/media/libmedia/include/media/CharacterEncodingDetector.h
index 62564b1..2acc868 100644
--- a/media/libmedia/include/media/CharacterEncodingDetector.h
+++ b/media/libmedia/include/media/CharacterEncodingDetector.h
@@ -21,9 +21,12 @@
 
 #include "StringArray.h"
 
-#include "unicode/ucnv.h"
-#include "unicode/ucsdet.h"
-#include "unicode/ustring.h"
+/** Declare opaque structures from ICU4C. */
+struct UConverter;
+typedef struct UConverter UConverter;
+
+struct UCharsetMatch;
+typedef struct UCharsetMatch UCharsetMatch;
 
 namespace android {
 
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 6e69782..05da5c2 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -18,8 +18,8 @@
 #ifndef ANDROID_IMEDIARECORDER_H
 #define ANDROID_IMEDIARECORDER_H
 
+#include <android/media/MicrophoneInfoFw.h>
 #include <binder/IInterface.h>
-#include <media/MicrophoneInfo.h>
 #include <system/audio.h>
 #include <vector>
 
@@ -74,7 +74,7 @@
     virtual status_t getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
     virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
     virtual status_t getActiveMicrophones(
-                        std::vector<media::MicrophoneInfo>* activeMicrophones) = 0;
+                        std::vector<media::MicrophoneInfoFw>* activeMicrophones) = 0;
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
     virtual status_t getPortId(audio_port_handle_t *portId) = 0;
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 2b7818d..82ec9c5 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -19,7 +19,6 @@
 #define MEDIA_RECORDER_BASE_H_
 
 #include <media/AudioSystem.h>
-#include <media/MicrophoneInfo.h>
 #include <media/mediarecorder.h>
 #include <android/content/AttributionSourceState.h>
 
@@ -74,7 +73,7 @@
     virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback) = 0;
     virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
     virtual status_t getActiveMicrophones(
-                        std::vector<media::MicrophoneInfo>* activeMicrophones) = 0;
+                        std::vector<media::MicrophoneInfoFw>* activeMicrophones) = 0;
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
     virtual status_t getPortId(audio_port_handle_t *portId) const = 0;
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index de4c7db..2f9b85e 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -195,7 +195,8 @@
     INVOKE_ID_SELECT_TRACK = 4,
     INVOKE_ID_UNSELECT_TRACK = 5,
     INVOKE_ID_SET_VIDEO_SCALING_MODE = 6,
-    INVOKE_ID_GET_SELECTED_TRACK = 7
+    INVOKE_ID_GET_SELECTED_TRACK = 7,
+    INVOKE_ID_SET_PLAYER_IID = 8,
 };
 
 // ----------------------------------------------------------------------------
@@ -213,7 +214,8 @@
 {
 public:
     explicit MediaPlayer(const android::content::AttributionSourceState& mAttributionSource =
-        android::content::AttributionSourceState());
+        android::content::AttributionSourceState(),
+        audio_session_t sessionId = AUDIO_SESSION_ALLOCATE);
     ~MediaPlayer();
             void            died();
             void            disconnect();
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index dd18144..602f72e 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -24,7 +24,7 @@
 #include <utils/Errors.h>
 #include <media/IMediaRecorderClient.h>
 #include <media/IMediaDeathNotifier.h>
-#include <media/MicrophoneInfo.h>
+#include <android/media/MicrophoneInfoFw.h>
 #include <android/content/AttributionSourceState.h>
 
 namespace android {
@@ -268,7 +268,7 @@
     status_t    setInputDevice(audio_port_handle_t deviceId);
     status_t    getRoutedDeviceId(audio_port_handle_t *deviceId);
     status_t    enableAudioDeviceCallback(bool enabled);
-    status_t    getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
+    status_t    getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones);
     status_t    setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     status_t    setPreferredMicrophoneFieldDimension(float zoom);
 
diff --git a/media/libmedia/include/media/omx/1.0/Conversion.h b/media/libmedia/include/media/omx/1.0/Conversion.h
index 811936b..37cb059 100644
--- a/media/libmedia/include/media/omx/1.0/Conversion.h
+++ b/media/libmedia/include/media/omx/1.0/Conversion.h
@@ -606,8 +606,16 @@
     t->attr.height = l.getHeight();
     t->attr.stride = l.getStride();
     t->attr.format = static_cast<PixelFormat>(l.getPixelFormat());
-    t->attr.layerCount = l.getLayerCount();
-    t->attr.usage = l.getUsage();
+    // HACK
+    // anwBuffer.layerCount 8 bytes : GraphicBuffer::layerCount 4 bytes
+    // anwBuffer.usage      4 bytes : GraphicBuffer::usage      8 bytes
+    // We would like to retain high part of usage with high part of layerCount
+    uint64_t usage = l.getUsage();
+    uint32_t usageHigh = (usage >> 32);
+    uint32_t usageLow = (0xFFFFFFFF & usage);
+    uint32_t layerLow = l.getLayerCount();
+    t->attr.layerCount = ((uint64_t(usageHigh) << 32) | layerLow);
+    t->attr.usage = usageLow;
     t->attr.id = l.getId();
     t->attr.generationNumber = l.getGenerationNumber();
     t->nativeHandle = hidl_handle(l.handle);
@@ -637,30 +645,37 @@
         }
     }
 
-    size_t const numInts = 12 + (handle ? handle->numInts : 0);
+    size_t const numInts = 13 + (handle ? handle->numInts : 0);
     int32_t* ints = new int32_t[numInts];
 
     size_t numFds = static_cast<size_t>(handle ? handle->numFds : 0);
     int* fds = new int[numFds];
 
-    ints[0] = 'GBFR';
+    ints[0] = 'GB01';
     ints[1] = static_cast<int32_t>(t.attr.width);
     ints[2] = static_cast<int32_t>(t.attr.height);
     ints[3] = static_cast<int32_t>(t.attr.stride);
     ints[4] = static_cast<int32_t>(t.attr.format);
-    ints[5] = static_cast<int32_t>(t.attr.layerCount);
+    // HACK
+    // anwBuffer.layerCount 8 bytes : GraphicBuffer::layerCount 4 bytes
+    // anwBuffer.usage      4 bytes : GraphicBuffer::usage      8 bytes
+    // We would like to retain high part of usage with high part of layerCount
+    uint32_t layer = (0xFFFFFFFF & t.attr.layerCount);
+    uint32_t usageHigh = (t.attr.layerCount >> 32);
+    ints[5] = layer;
     ints[6] = static_cast<int32_t>(t.attr.usage);
     ints[7] = static_cast<int32_t>(t.attr.id >> 32);
     ints[8] = static_cast<int32_t>(t.attr.id & 0xFFFFFFFF);
     ints[9] = static_cast<int32_t>(t.attr.generationNumber);
     ints[10] = 0;
     ints[11] = 0;
+    ints[12] = usageHigh;
     if (handle) {
         ints[10] = static_cast<int32_t>(handle->numFds);
         ints[11] = static_cast<int32_t>(handle->numInts);
         int* intsStart = handle->data + handle->numFds;
         std::copy(handle->data, intsStart, fds);
-        std::copy(intsStart, intsStart + handle->numInts, &ints[12]);
+        std::copy(intsStart, intsStart + handle->numInts, &ints[13]);
     }
 
     void const* constBuffer = static_cast<void const*>(ints);
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 5215c1b..b5c75b3 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -42,8 +42,8 @@
 using media::VolumeShaper;
 using content::AttributionSourceState;
 
-MediaPlayer::MediaPlayer(const AttributionSourceState& attributionSource)
-        : mAttributionSource(attributionSource)
+MediaPlayer::MediaPlayer(const AttributionSourceState& attributionSource,
+    const audio_session_t sessionId) : mAttributionSource(attributionSource)
 {
     ALOGV("constructor");
     mListener = NULL;
@@ -61,7 +61,12 @@
     mLeftVolume = mRightVolume = 1.0;
     mVideoWidth = mVideoHeight = 0;
     mLockThreadId = 0;
-    mAudioSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    if (sessionId == AUDIO_SESSION_ALLOCATE) {
+        mAudioSessionId = static_cast<audio_session_t>(
+            AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION));
+    } else {
+        mAudioSessionId = sessionId;
+    }
     AudioSystem::acquireAudioSessionId(mAudioSessionId, (pid_t)-1, (uid_t)-1); // always in client.
     mSendLevel = 0;
     mRetransmitEndpointValid = false;
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index cf12c36..bd06fb6 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -884,7 +884,8 @@
     return mMediaRecorder->enableAudioDeviceCallback(enabled);
 }
 
-status_t MediaRecorder::getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones)
+status_t MediaRecorder::getActiveMicrophones(
+        std::vector<media::MicrophoneInfoFw>* activeMicrophones)
 {
     ALOGV("getActiveMicrophones");
 
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
index 2ed3126..6f3010c 100644
--- a/media/libmedia/tests/codeclist/Android.bp
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -25,7 +25,7 @@
 
 cc_test {
     name: "CodecListTest",
-    test_suites: ["device-tests", "mts"],
+    test_suites: ["device-tests"],
     gtest: true,
 
     // Support multilib variants (using different suffix per sub-architecture), which is needed on
diff --git a/media/libmedia/tests/codeclist/CodecListTest.cpp b/media/libmedia/tests/codeclist/CodecListTest.cpp
index bd2adf7..75adcca 100644
--- a/media/libmedia/tests/codeclist/CodecListTest.cpp
+++ b/media/libmedia/tests/codeclist/CodecListTest.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "CodecListTest"
 #include <utils/Log.h>
 
+#include <memory>
+
 #include <gtest/gtest.h>
 
 #include <binder/Parcel.h>
@@ -194,16 +196,15 @@
             }
         }
 
-        Parcel *codecInfoParcel = new Parcel();
+        std::unique_ptr<Parcel> codecInfoParcel(new Parcel());
         ASSERT_NE(codecInfoParcel, nullptr) << "Unable to create parcel";
 
-        status_t status = info->writeToParcel(codecInfoParcel);
+        status_t status = info->writeToParcel(codecInfoParcel.get());
         ASSERT_EQ(status, OK) << "Writing to parcel failed";
 
         codecInfoParcel->setDataPosition(0);
         sp<MediaCodecInfo> parcelCodecInfo = info->FromParcel(*codecInfoParcel);
         ASSERT_NE(parcelCodecInfo, nullptr) << "CodecInfo from parcel is null";
-        delete codecInfoParcel;
 
         EXPECT_STREQ(info->getCodecName(), parcelCodecInfo->getCodecName())
                 << "Returned codec name in info doesn't match";
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 165a8ad..649f813 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -20,7 +20,7 @@
     },
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth",
+        "com.android.btservices",
         "com.android.media",
         "com.android.media.swcodec",
     ],
@@ -49,8 +49,9 @@
         "liblog",
     ],
     header_libs: [
-        "libmedia_helper_headers",
         "libaudio_system_headers",
+        "libhardware_headers",
+        "libmedia_helper_headers",
     ],
     export_header_lib_headers: [
         "libmedia_helper_headers",
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index 382a920..a61a1bc 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <media/AudioParameter.h>
+#include <hardware/audio.h>
 #include <system/audio.h>
 
 namespace android {
@@ -32,7 +33,15 @@
 const char * const AudioParameter::keyFrameCount = AUDIO_PARAMETER_STREAM_FRAME_COUNT;
 const char * const AudioParameter::keyInputSource = AUDIO_PARAMETER_STREAM_INPUT_SOURCE;
 const char * const AudioParameter::keyScreenState = AUDIO_PARAMETER_KEY_SCREEN_STATE;
+const char * const AudioParameter::keyClosing = AUDIO_PARAMETER_KEY_CLOSING;
+const char * const AudioParameter::keyExiting = AUDIO_PARAMETER_KEY_EXITING;
+const char * const AudioParameter::keyBtSco = AUDIO_PARAMETER_KEY_BT_SCO;
+const char * const AudioParameter::keyBtScoHeadsetName = AUDIO_PARAMETER_KEY_BT_SCO_HEADSET_NAME;
 const char * const AudioParameter::keyBtNrec = AUDIO_PARAMETER_KEY_BT_NREC;
+const char * const AudioParameter::keyBtScoWb = AUDIO_PARAMETER_KEY_BT_SCO_WB;
+const char * const AudioParameter::keyBtHfpEnable = AUDIO_PARAMETER_KEY_HFP_ENABLE;
+const char * const AudioParameter::keyBtHfpSamplingRate = AUDIO_PARAMETER_KEY_HFP_SET_SAMPLING_RATE;
+const char * const AudioParameter::keyBtHfpVolume = AUDIO_PARAMETER_KEY_HFP_VOLUME;
 const char * const AudioParameter::keyHwAvSync = AUDIO_PARAMETER_HW_AV_SYNC;
 const char * const AudioParameter::keyPresentationId = AUDIO_PARAMETER_STREAM_PRESENTATION_ID;
 const char * const AudioParameter::keyProgramId = AUDIO_PARAMETER_STREAM_PROGRAM_ID;
@@ -50,9 +59,13 @@
         AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES;
 const char * const AudioParameter::valueOn = AUDIO_PARAMETER_VALUE_ON;
 const char * const AudioParameter::valueOff = AUDIO_PARAMETER_VALUE_OFF;
+const char * const AudioParameter::valueTrue = AUDIO_PARAMETER_VALUE_TRUE;
+const char * const AudioParameter::valueFalse = AUDIO_PARAMETER_VALUE_FALSE;
 const char * const AudioParameter::valueListSeparator = AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+const char * const AudioParameter::keyBtA2dpSuspended = AUDIO_PARAMETER_KEY_BT_A2DP_SUSPENDED;
 const char * const AudioParameter::keyReconfigA2dp = AUDIO_PARAMETER_RECONFIG_A2DP;
 const char * const AudioParameter::keyReconfigA2dpSupported = AUDIO_PARAMETER_A2DP_RECONFIG_SUPPORTED;
+const char * const AudioParameter::keyBtLeSuspended = AUDIO_PARAMETER_KEY_BT_LE_SUSPENDED;
 // const char * const AudioParameter::keyDeviceSupportedEncapsulationModes =
 //        AUDIO_PARAMETER_DEVICE_SUP_ENCAPSULATION_MODES;
 // const char * const AudioParameter::keyDeviceSupportedEncapsulationMetadataTypes =
@@ -61,6 +74,12 @@
         AUDIO_PARAMETER_DEVICE_ADDITIONAL_OUTPUT_DELAY;
 const char * const AudioParameter::keyMaxAdditionalOutputDeviceDelay =
         AUDIO_PARAMETER_DEVICE_MAX_ADDITIONAL_OUTPUT_DELAY;
+const char * const AudioParameter::keyOffloadCodecAverageBitRate = AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE;
+const char * const AudioParameter::keyOffloadCodecSampleRate = AUDIO_OFFLOAD_CODEC_SAMPLE_RATE;
+const char * const AudioParameter::keyOffloadCodecChannels = AUDIO_OFFLOAD_CODEC_NUM_CHANNEL;
+const char * const AudioParameter::keyOffloadCodecDelaySamples = AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES;
+const char * const AudioParameter::keyOffloadCodecPaddingSamples =
+        AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES;
 
 AudioParameter::AudioParameter(const String8& keyValuePairs)
 {
@@ -226,4 +245,9 @@
     }
 }
 
+bool AudioParameter::containsKey(const String8& key) const
+{
+    return mParameters.indexOfKey(key) >= 0;
+}
+
 } // namespace android
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 9a6ca8a..70f8af3 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -49,11 +49,28 @@
     static const char * const keyInputSource;
     static const char * const keyScreenState;
 
+    // TODO(b/73175392) consider improvement to AIDL StreamOut interface.
+    // keyClosing: "true" when AudioOutputDescriptor is closing.  Used by A2DP HAL.
+    // keyExiting: "1" on AudioFlinger Thread preExit.  Used by remote_submix and A2DP HAL.
+    static const char * const keyClosing;
+    static const char * const keyExiting;
+
+    //  keyBtSco: Whether BT SCO is 'on' or 'off'
+    //  keyBtScoHeadsetName: BT SCO headset name (for debugging)
     //  keyBtNrec: BT SCO Noise Reduction + Echo Cancellation parameters
+    //  keyBtScoWb: BT SCO NR wideband mode
+    //  keyHfp...: Parameters of the Hands-Free Profile
+    static const char * const keyBtSco;
+    static const char * const keyBtScoHeadsetName;
+    static const char * const keyBtNrec;
+    static const char * const keyBtScoWb;
+    static const char * const keyBtHfpEnable;
+    static const char * const keyBtHfpSamplingRate;
+    static const char * const keyBtHfpVolume;
+
     //  keyHwAvSync: get HW synchronization source identifier from a device
     //  keyMonoOutput: Enable mono audio playback
     //  keyStreamHwAvSync: set HW synchronization source identifier on a stream
-    static const char * const keyBtNrec;
     static const char * const keyHwAvSync;
     static const char * const keyMonoOutput;
     static const char * const keyStreamHwAvSync;
@@ -84,13 +101,19 @@
 
     static const char * const valueOn;
     static const char * const valueOff;
+    static const char * const valueTrue;
+    static const char * const valueFalse;
 
     static const char * const valueListSeparator;
 
+    // keyBtA2dpSuspended: 'true' or 'false'
     // keyReconfigA2dp: Ask HwModule to reconfigure A2DP offloaded codec
     // keyReconfigA2dpSupported: Query if HwModule supports A2DP offload codec config
+    // keyBtLeSuspended: 'true' or 'false'
+    static const char * const keyBtA2dpSuspended;
     static const char * const keyReconfigA2dp;
     static const char * const keyReconfigA2dpSupported;
+    static const char * const keyBtLeSuspended;
 
     // For querying device supported encapsulation capabilities. All returned values are integer,
     // which are bit fields composed from using encapsulation capability values as position bits.
@@ -107,6 +130,12 @@
     static const char * const keyAdditionalOutputDeviceDelay;
     static const char * const keyMaxAdditionalOutputDeviceDelay;
 
+    static const char * const keyOffloadCodecAverageBitRate;
+    static const char * const keyOffloadCodecSampleRate;
+    static const char * const keyOffloadCodecChannels;
+    static const char * const keyOffloadCodecDelaySamples;
+    static const char * const keyOffloadCodecPaddingSamples;
+
     String8 toString() const { return toStringImpl(true); }
     String8 keysToString() const { return toStringImpl(false); }
 
@@ -117,6 +146,12 @@
 
     status_t remove(const String8& key);
 
+    status_t get(const String8& key, int& value) const {
+        return getInt(key, value);
+    }
+    status_t get(const String8& key, float& value) const {
+        return getFloat(key, value);
+    }
     status_t get(const String8& key, String8& value) const;
     status_t getInt(const String8& key, int& value) const;
     status_t getFloat(const String8& key, float& value) const;
@@ -125,6 +160,7 @@
 
     size_t size() const { return mParameters.size(); }
 
+    bool containsKey(const String8& key) const;
 private:
     String8 mKeyValuePairs;
     KeyedVector <String8, String8> mParameters;
diff --git a/media/libmediametrics/MediaMetrics.cpp b/media/libmediametrics/MediaMetrics.cpp
index a3c2f1a..2240223 100644
--- a/media/libmediametrics/MediaMetrics.cpp
+++ b/media/libmediametrics/MediaMetrics.cpp
@@ -86,6 +86,11 @@
     if (item != NULL) item->setRate(attr, count, duration);
 }
 
+void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
+                                 const std::string &string) {
+    mediametrics_setCString(handle, attr, string.c_str());
+}
+
 void mediametrics_setCString(mediametrics_handle_t handle, attr_t attr,
                                  const char *value) {
     Item *item = (Item *) handle;
@@ -152,6 +157,14 @@
     return item->getRate(attr, count, duration, rate);
 }
 
+bool mediametrics_getString(mediametrics_handle_t handle, attr_t attr,
+                                 std::string *string) {
+    Item *item = (Item *) handle;
+    if (item == NULL) return false;
+
+    return item->getString(attr, string);
+}
+
 // NB: caller owns the string that comes back, is responsible for freeing it
 bool mediametrics_getCString(mediametrics_handle_t handle, attr_t attr,
                                  char **value) {
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 1c30510..f80a467 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -57,6 +57,10 @@
 // The Audio Spatializer key appends the spatializerId (currently 0)
 #define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER AMEDIAMETRICS_KEY_PREFIX_AUDIO "spatializer."
 
+// The Audio Spatializer device key appends the device type.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER_DEVICE \
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "device."
+
 // The AudioStream key appends the "streamId" to the prefix.
 #define AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM  AMEDIAMETRICS_KEY_PREFIX_AUDIO "stream."
 
@@ -68,6 +72,7 @@
 
 // Keys are strings used for MediaMetrics Item Keys
 #define AMEDIAMETRICS_KEY_AUDIO_FLINGER       AMEDIAMETRICS_KEY_PREFIX_AUDIO "flinger"
+#define AMEDIAMETRICS_KEY_AUDIO_MIDI          AMEDIAMETRICS_KEY_PREFIX_AUDIO "midi"
 #define AMEDIAMETRICS_KEY_AUDIO_POLICY        AMEDIAMETRICS_KEY_PREFIX_AUDIO "policy"
 
 // Error keys
@@ -83,6 +88,7 @@
 #define AMEDIAMETRICS_PROP_PREFIX_EFFECTIVE "effective."
 #define AMEDIAMETRICS_PROP_PREFIX_HAL       "hal."
 #define AMEDIAMETRICS_PROP_PREFIX_HAPTIC    "haptic."
+#define AMEDIAMETRICS_PROP_PREFIX_LAST      "last."
 #define AMEDIAMETRICS_PROP_PREFIX_SERVER    "server."
 
 // Properties within mediametrics are string constants denoted by
@@ -107,6 +113,7 @@
 // of suppressed in the Time Machine.
 #define AMEDIAMETRICS_PROP_SUFFIX_CHAR_DUPLICATES_ALLOWED '#'
 
+#define AMEDIAMETRICS_PROP_ADDRESS        "address"        // string, for example MAC address
 #define AMEDIAMETRICS_PROP_ALLOWUID       "_allowUid"      // int32_t, allow client uid to post
 #define AMEDIAMETRICS_PROP_AUDIOMODE      "audioMode"      // string (audio.flinger)
 #define AMEDIAMETRICS_PROP_AUXEFFECTID    "auxEffectId"    // int32 (AudioTrack)
@@ -115,10 +122,17 @@
 #define AMEDIAMETRICS_PROP_BURSTFRAMES    "burstFrames"    // int32
 #define AMEDIAMETRICS_PROP_CALLERNAME     "callerName"     // string, eg. "aaudio"
 #define AMEDIAMETRICS_PROP_CHANNELCOUNT   "channelCount"   // int32
+#define AMEDIAMETRICS_PROP_CHANNELCOUNTHARDWARE "channelCountHardware" // int32
 #define AMEDIAMETRICS_PROP_CHANNELMASK    "channelMask"    // int32
+#define AMEDIAMETRICS_PROP_CHANNELMASKS   "channelMasks"   // string with channelMask values
+                                                           // separated by |.
+#define AMEDIAMETRICS_PROP_CLOSEDCOUNT   "closedCount"    // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_CONTENTTYPE    "contentType"    // string attributes (AudioTrack)
 #define AMEDIAMETRICS_PROP_CUMULATIVETIMENS "cumulativeTimeNs" // int64_t playback/record time
                                                            // since start
+#define AMEDIAMETRICS_PROP_DEVICEDISCONNECTED "deviceDisconnected" // string true/false (MIDI)
+#define AMEDIAMETRICS_PROP_DEVICEID       "deviceId"       // int32 device id (MIDI)
+
 // DEVICE values are averaged since starting on device
 #define AMEDIAMETRICS_PROP_DEVICELATENCYMS "deviceLatencyMs" // double - avg latency time
 #define AMEDIAMETRICS_PROP_DEVICESTARTUPMS "deviceStartupMs" // double - avg startup time
@@ -132,7 +146,9 @@
 
 #define AMEDIAMETRICS_PROP_DIRECTION      "direction"      // string AAudio input or output
 #define AMEDIAMETRICS_PROP_DURATIONNS     "durationNs"     // int64 duration time span
+#define AMEDIAMETRICS_PROP_ENABLED        "enabled"        // string true/false.
 #define AMEDIAMETRICS_PROP_ENCODING       "encoding"       // string value of format
+#define AMEDIAMETRICS_PROP_ENCODINGHARDWARE "encodingHardware" // string value of hardware format
 
 #define AMEDIAMETRICS_PROP_EVENT          "event#"         // string value (often func name)
 #define AMEDIAMETRICS_PROP_EXECUTIONTIMENS "executionTimeNs"  // time to execute the event
@@ -141,10 +157,15 @@
 #define AMEDIAMETRICS_PROP_FLAGS          "flags"
 
 #define AMEDIAMETRICS_PROP_FRAMECOUNT     "frameCount"     // int32
+#define AMEDIAMETRICS_PROP_HARDWARETYPE   "hardwareType"   // int32 (MIDI)
+#define AMEDIAMETRICS_PROP_HASHEADTRACKER  "hasHeadTracker" // string true/false
+#define AMEDIAMETRICS_PROP_HEADTRACKERENABLED "headTrackerEnabled" // string true/false
 #define AMEDIAMETRICS_PROP_HEADTRACKINGMODES "headTrackingModes" // string |, like modes.
 #define AMEDIAMETRICS_PROP_INPUTDEVICES   "inputDevices"   // string value
+#define AMEDIAMETRICS_PROP_INPUTPORTCOUNT  "inputPortCount" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_INTERNALTRACKID "internalTrackId" // int32
 #define AMEDIAMETRICS_PROP_INTERVALCOUNT  "intervalCount"  // int32
+#define AMEDIAMETRICS_PROP_ISSHARED      "isShared"       // string true/false (MIDI)
 #define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
 #define AMEDIAMETRICS_PROP_LEVELS         "levels"          // string | with levels
 #define AMEDIAMETRICS_PROP_LOGSESSIONID   "logSessionId"   // hex string, "" none
@@ -154,13 +175,16 @@
 #define AMEDIAMETRICS_PROP_MODES          "modes"          // string | with modes
 #define AMEDIAMETRICS_PROP_NAME           "name"           // string value
 #define AMEDIAMETRICS_PROP_ORIGINALFLAGS  "originalFlags"  // int32
+#define AMEDIAMETRICS_PROP_OPENEDCOUNT   "openedCount"    // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
+#define AMEDIAMETRICS_PROP_OUTPUTPORTCOUNT "outputPortCount" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_PERFORMANCEMODE "performanceMode"    // string value, "none", lowLatency"
 #define AMEDIAMETRICS_PROP_PLAYBACK_PITCH "playback.pitch" // double value (AudioTrack)
 #define AMEDIAMETRICS_PROP_PLAYBACK_SPEED "playback.speed" // double value (AudioTrack)
 #define AMEDIAMETRICS_PROP_PLAYERIID      "playerIId"      // int32 (-1 invalid/unset IID)
 #define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATE     "sampleRate"     // int32
+#define AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE "sampleRateHardware" // int32
 #define AMEDIAMETRICS_PROP_SELECTEDDEVICEID "selectedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SELECTEDMICDIRECTION "selectedMicDirection" // int32
 #define AMEDIAMETRICS_PROP_SELECTEDMICFIELDDIRECTION "selectedMicFieldDimension" // double
@@ -183,6 +207,13 @@
                                                            // Treated as "debug" information.
 
 #define AMEDIAMETRICS_PROP_STREAMTYPE     "streamType"     // string (AudioTrack)
+#define AMEDIAMETRICS_PROP_SUPPORTSMIDIUMP "supportsMidiUmp" // string true/false (MIDI).
+                                                             // Universal MIDI Packets is a new
+                                                             // format to transport packets.
+                                                             // Raw byte streams are used if this
+                                                             // is false.
+#define AMEDIAMETRICS_PROP_TOTALINPUTBYTES "totalInputBytes" // int32 (MIDI)
+#define AMEDIAMETRICS_PROP_TOTALOUTPUTBYTES "totalOutputBytes" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_THREADID       "threadId"       // int32 value io handle
 #define AMEDIAMETRICS_PROP_THROTTLEMS     "throttleMs"     // double
 #define AMEDIAMETRICS_PROP_TRACKID        "trackId"        // int32 port id of track/record
@@ -191,6 +222,7 @@
 #define AMEDIAMETRICS_PROP_UNDERRUN       "underrun"       // int32
 #define AMEDIAMETRICS_PROP_UNDERRUNFRAMES "underrunFrames" // int64_t from Thread
 #define AMEDIAMETRICS_PROP_USAGE          "usage"          // string attributes (ATrack)
+#define AMEDIAMETRICS_PROP_USINGALSA     "usingAlsa"      // string true/false (MIDI)
 #define AMEDIAMETRICS_PROP_VOICEVOLUME    "voiceVolume"    // double (audio.flinger)
 #define AMEDIAMETRICS_PROP_VOLUME_LEFT    "volume.left"    // double (AudioTrack)
 #define AMEDIAMETRICS_PROP_VOLUME_RIGHT   "volume.right"   // double (AudioTrack)
@@ -215,6 +247,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE     "create"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CREATEAUDIOPATCH "createAudioPatch"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR       "ctor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED "deviceClosed" // MIDI
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_DISCONNECT "disconnect"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_DTOR       "dtor"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM "endAAudioStream" // AAudioStream
diff --git a/media/libmediametrics/include/media/MediaMetrics.h b/media/libmediametrics/include/media/MediaMetrics.h
index 76abe86..58612a3 100644
--- a/media/libmediametrics/include/media/MediaMetrics.h
+++ b/media/libmediametrics/include/media/MediaMetrics.h
@@ -50,7 +50,7 @@
 void mediametrics_setRate(mediametrics_handle_t handle, attr_t attr,
                           int64_t count, int64_t duration);
 void mediametrics_setCString(mediametrics_handle_t handle, attr_t attr,
-                            const char * value);
+                             const char * value);
 
 // fused get/add/set; if attr wasn't there, it's a simple set.
 // these do not provide atomicity or mutual exclusion, only simpler code sequences.
@@ -95,4 +95,11 @@
 
 __END_DECLS
 
+#ifdef __cplusplus
+#include <string>
+void mediametrics_setString(mediametrics_handle_t handle, attr_t attr,
+                            const std::string &value);
+bool mediametrics_getString(mediametrics_handle_t handle, attr_t attr, std::string *value);
+#endif // __cplusplus
+
 #endif
diff --git a/media/libmediametrics/include/media/MediaMetricsItem.h b/media/libmediametrics/include/media/MediaMetricsItem.h
index de56665..03834d4 100644
--- a/media/libmediametrics/include/media/MediaMetricsItem.h
+++ b/media/libmediametrics/include/media/MediaMetricsItem.h
@@ -1048,6 +1048,9 @@
         }
         return true;
     }
+    bool getString(const char *key, std::string *value) const {
+        return get(key, value);
+    }
     // Caller owns the returned string
     bool getCString(const char *key, char **value) const {
         std::string s;
@@ -1057,9 +1060,6 @@
         }
         return false;
     }
-    bool getString(const char *key, std::string *value) const {
-        return get(key, value);
-    }
 
     const Prop::Elem* get(const char *key) const {
         const Prop *prop = findProp(key);
diff --git a/media/libmediametrics/libmediametrics.map.txt b/media/libmediametrics/libmediametrics.map.txt
index c46281a..f37af64 100644
--- a/media/libmediametrics/libmediametrics.map.txt
+++ b/media/libmediametrics/libmediametrics.map.txt
@@ -1,29 +1,29 @@
 LIBMEDIAMETRICS_1 {
   global:
-    mediametrics_addDouble; # apex
-    mediametrics_addInt32; # apex
-    mediametrics_addInt64; # apex
-    mediametrics_addRate; # apex
-    mediametrics_count; # apex
-    mediametrics_create; # apex
-    mediametrics_delete; # apex
-    mediametrics_freeCString; # apex
-    mediametrics_getAttributes; # apex
-    mediametrics_getCString; # apex
-    mediametrics_getDouble; # apex
-    mediametrics_getInt32; # apex
-    mediametrics_getInt64; # apex
-    mediametrics_getKey; # apex
-    mediametrics_getRate; # apex
-    mediametrics_isEnabled; # apex
-    mediametrics_readable; # apex
-    mediametrics_selfRecord; # apex
-    mediametrics_setCString; # apex
-    mediametrics_setDouble; # apex
-    mediametrics_setInt32; # apex
-    mediametrics_setInt64; # apex
-    mediametrics_setRate; # apex
-    mediametrics_setUid; # apex
+    mediametrics_addDouble; # systemapi
+    mediametrics_addInt32; # systemapi
+    mediametrics_addInt64; # systemapi
+    mediametrics_addRate; # systemapi
+    mediametrics_count; # systemapi
+    mediametrics_create; # systemapi
+    mediametrics_delete; # systemapi
+    mediametrics_freeCString; # systemapi
+    mediametrics_getAttributes; # systemapi
+    mediametrics_getCString; # systemapi
+    mediametrics_getDouble; # systemapi
+    mediametrics_getInt32; # systemapi
+    mediametrics_getInt64; # systemapi
+    mediametrics_getKey; # systemapi
+    mediametrics_getRate; # systemapi
+    mediametrics_isEnabled; # systemapi
+    mediametrics_readable; # systemapi
+    mediametrics_selfRecord; # systemapi
+    mediametrics_setCString; # systemapi
+    mediametrics_setDouble; # systemapi
+    mediametrics_setInt32; # systemapi
+    mediametrics_setInt64; # systemapi
+    mediametrics_setRate; # systemapi
+    mediametrics_setUid; # systemapi
   local:
     *;
 };
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 8e19d02..9e9e9d8 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1805,7 +1805,8 @@
 MediaPlayerService::AudioOutput::AudioOutput(audio_session_t sessionId,
         const AttributionSourceState& attributionSource, const audio_attributes_t* attr,
         const sp<AudioSystem::AudioDeviceCallback>& deviceCallback)
-    : mCallback(NULL),
+    : mCachedPlayerIId(PLAYER_PIID_INVALID),
+      mCallback(NULL),
       mCallbackCookie(NULL),
       mCallbackData(NULL),
       mStreamType(AUDIO_STREAM_MUSIC),
@@ -1836,7 +1837,6 @@
     } else {
         mAttributes = NULL;
     }
-
     setMinBufferCount();
 }
 
@@ -2314,6 +2314,10 @@
         return t->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
     });
 
+    if (mCachedPlayerIId != PLAYER_PIID_INVALID) {
+        t->setPlayerIId(mCachedPlayerIId);
+    }
+
     mSampleRateHz = sampleRate;
     mFlags = flags;
     mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
@@ -2366,6 +2370,17 @@
     return NO_INIT;
 }
 
+void MediaPlayerService::AudioOutput::setPlayerIId(int32_t playerIId)
+{
+    ALOGV("setPlayerIId(%d)", playerIId);
+    Mutex::Autolock lock(mLock);
+    mCachedPlayerIId = playerIId;
+
+    if (mTrack != nullptr) {
+        mTrack->setPlayerIId(mCachedPlayerIId);
+    }
+}
+
 void MediaPlayerService::AudioOutput::setNextOutput(const sp<AudioOutput>& nextOutput) {
     Mutex::Autolock lock(mLock);
     mNextOutput = nextOutput;
@@ -2683,7 +2698,7 @@
     // This is a benign busy-wait, with the next data request generated 10 ms or more later;
     // nevertheless for power reasons, we don't want to see too many of these.
 
-    ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+    ALOGV_IF(actualSize == 0 && buffer.size() > 0, "callbackwrapper: empty buffer returned");
     unlock();
     return actualSize;
 }
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 86be3fe..52c2f79 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -113,6 +113,8 @@
                 bool doNotReconnect = false,
                 uint32_t suggestedFrameCount = 0);
 
+        virtual void            setPlayerIId(int32_t playerIId);
+
         virtual status_t        start();
         virtual ssize_t         write(const void* buffer, size_t size, bool blocking = true);
         virtual void            stop();
@@ -160,6 +162,7 @@
         sp<AudioTrack>          mTrack;
         sp<AudioTrack>          mRecycledTrack;
         sp<AudioOutput>         mNextOutput;
+        int                     mCachedPlayerIId;
         AudioCallback           mCallback;
         void *                  mCallbackCookie;
         sp<CallbackData>        mCallbackData;
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 4aa80be..58fc06d 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -563,7 +563,7 @@
 }
 
 status_t MediaRecorderClient::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo>* activeMicrophones) {
+        std::vector<media::MicrophoneInfoFw>* activeMicrophones) {
     ALOGV("getActiveMicrophones");
     Mutex::Autolock lock(mLock);
     if (mRecorder != NULL) {
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index dcb9f82..dec0c99 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -83,7 +83,7 @@
     virtual     status_t   getRoutedDeviceId(audio_port_handle_t* deviceId);
     virtual     status_t   enableAudioDeviceCallback(bool enabled);
     virtual     status_t   getActiveMicrophones(
-                              std::vector<media::MicrophoneInfo>* activeMicrophones);
+                              std::vector<media::MicrophoneInfoFw>* activeMicrophones);
     virtual     status_t   setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     virtual     status_t   setPreferredMicrophoneFieldDimension(float zoom);
                 status_t   getPortId(audio_port_handle_t *portId) override;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index b3f7f25..db979d7 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -211,6 +211,7 @@
         mime = MEDIA_MIMETYPE_VIDEO_AV1;
         trackMeta = new MetaData(*trackMeta);
         trackMeta->setCString(kKeyMIMEType, mime);
+        isHeif = true;
     }
 
     sp<AMessage> format = new AMessage;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index ea1fdf4..0365085 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -129,6 +129,7 @@
       mRTPCVOExtMap(-1),
       mRTPCVODegrees(0),
       mRTPSockDscp(0),
+      mRTPSockOptEcn(0),
       mRTPSockNetwork(0),
       mLastSeqNo(0),
       mStarted(false),
@@ -910,6 +911,13 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamRtpEcn(int32_t ecn) {
+    ALOGV("setParamRtpEcn: %d", ecn);
+
+    mRTPSockOptEcn = ecn;
+    return OK;
+}
+
 status_t StagefrightRecorder::requestIDRFrame() {
     status_t ret = BAD_VALUE;
     if (mVideoEncoderSource != NULL) {
@@ -1091,6 +1099,11 @@
         if (safe_strtoi32(value.string(), &dscp)) {
             return setParamRtpDscp(dscp);
         }
+    } else if (key == "rtp-param-set-socket-ecn") {
+        int32_t targetEcn;
+        if (safe_strtoi32(value.string(), &targetEcn)) {
+            return setParamRtpEcn(targetEcn);
+        }
     } else if (key == "rtp-param-set-socket-network") {
         int64_t networkHandle;
         if (safe_strtoi64(value.string(), &networkHandle)) {
@@ -1272,6 +1285,9 @@
             if (mRTPSockDscp > 0) {
                 meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
             }
+            if (mRTPSockOptEcn > 0) {
+                meta->setInt32(kKeyRtpEcn, mRTPSockOptEcn);
+            }
 
             status = mWriter->start(meta.get());
             break;
@@ -1966,6 +1982,14 @@
             format->setString("mime", MEDIA_MIMETYPE_VIDEO_HEVC);
             break;
 
+        case VIDEO_ENCODER_DOLBY_VISION:
+            format->setString("mime", MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
+            break;
+
+        case VIDEO_ENCODER_AV1:
+            format->setString("mime", MEDIA_MIMETYPE_VIDEO_AV1);
+            break;
+
         default:
             CHECK(!"Should not be here, unsupported video encoding.");
             break;
@@ -2556,7 +2580,7 @@
 }
 
 status_t StagefrightRecorder::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo>* activeMicrophones) {
+        std::vector<media::MicrophoneInfoFw>* activeMicrophones) {
     if (mAudioSourceNode != 0) {
         return mAudioSourceNode->getActiveMicrophones(activeMicrophones);
     }
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index d7785da..0b6a5bb 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -83,7 +83,7 @@
     virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
     virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
     virtual status_t enableAudioDeviceCallback(bool enabled);
-    virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
+    virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones);
     virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
             status_t getPortId(audio_port_handle_t *portId) const override;
@@ -153,6 +153,7 @@
     int32_t mRTPCVOExtMap;
     int32_t mRTPCVODegrees;
     int32_t mRTPSockDscp;
+    int32_t mRTPSockOptEcn;
     int64_t mRTPSockNetwork;
     uint32_t mLastSeqNo;
 
@@ -247,6 +248,7 @@
     status_t setRTPCVOExtMap(int32_t extmap);
     status_t setRTPCVODegrees(int32_t cvoDegrees);
     status_t setParamRtpDscp(int32_t dscp);
+    status_t setParamRtpEcn(int32_t ecn);
     status_t setSocketNetwork(int64_t networkHandle);
     status_t requestIDRFrame();
     void clipVideoBitRate();
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index a36f1d6..5e95c87 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -46,6 +46,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediaplayerservice",
+        vector: "remote",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -60,15 +68,51 @@
     static_libs: [
         "libstagefright_rtsp",
         "libbase",
+        "libstagefright_nuplayer",
+        "libplayerservice_datasource",
+        "libstagefright_timedtext",
+        "libaudioprocessing_base",
     ],
     shared_libs: [
+        "android.hardware.media.omx@1.0",
         "av-types-aidl-cpp",
         "media_permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libactivitymanager_aidl",
         "libandroid_net",
+        "libaudioclient",
         "libcamera_client",
+        "libcodec2_client",
+        "libcrypto",
+        "libdatasource",
+        "libdrmframework",
         "libgui",
+        "libhidlbase",
+        "liblog",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmediadrm",
         "libmediametrics",
+        "libmediautils",
+        "libmemunreachable",
+        "libnetd_client",
+        "libpowermanager",
+        "libstagefright_httplive",
+        "packagemanager_aidl-cpp",
+        "libfakeservicemanager",
+        "libvibrator",
+        "libnbaio",
+        "libnblog",
+        "libpowermanager",
+        "libaudioprocessing",
+        "libaudioflinger",
+        "libresourcemanagerservice",
+        "libmediametricsservice",
+        "mediametricsservice-aidl-cpp",
+    ],
+    header_libs: [
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
     ],
 }
 
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index 7799f44..a189d04 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -26,6 +26,8 @@
 #include <media/IMediaRecorder.h>
 #include <media/IRemoteDisplay.h>
 #include <media/IRemoteDisplayClient.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
 #include <media/stagefright/RemoteDataSource.h>
 #include <media/stagefright/foundation/base64.h>
 #include <thread>
@@ -102,6 +104,42 @@
     IBinder *onAsBinder() { return nullptr; };
 };
 
+struct TestMediaHTTPConnection : public MediaHTTPConnection {
+  public:
+    TestMediaHTTPConnection() {}
+    virtual ~TestMediaHTTPConnection() {}
+
+    virtual bool connect(const char* /*uri*/, const KeyedVector<String8, String8>* /*headers*/) {
+        return true;
+    }
+
+    virtual void disconnect() { return; }
+
+    virtual ssize_t readAt(off64_t /*offset*/, void* /*data*/, size_t size) { return size; }
+
+    virtual off64_t getSize() { return 0; }
+    virtual status_t getMIMEType(String8* /*mimeType*/) { return NO_ERROR; }
+    virtual status_t getUri(String8* /*uri*/) { return NO_ERROR; }
+
+  private:
+    DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPConnection);
+};
+
+struct TestMediaHTTPService : public BnInterface<IMediaHTTPService> {
+  public:
+    TestMediaHTTPService() {}
+    ~TestMediaHTTPService(){};
+
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+        mMediaHTTPConnection = sp<TestMediaHTTPConnection>::make();
+        return mMediaHTTPConnection;
+    }
+
+  private:
+    sp<TestMediaHTTPConnection> mMediaHTTPConnection = nullptr;
+    DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
+};
+
 class BinderDeathNotifier : public IBinder::DeathRecipient {
    public:
     void binderDied(const wp<IBinder> &) { abort(); }
@@ -140,7 +178,9 @@
             AString out;
             encodeBase64(uriSuffix.data(), uriSuffix.size(), &out);
             uri += out.c_str();
-            status = mMediaPlayer->setDataSource(nullptr /*httpService*/, uri.c_str(), &headers);
+            sp<TestMediaHTTPService> testService = sp<TestMediaHTTPService>::make();
+            status =
+                    mMediaPlayer->setDataSource(testService /*httpService*/, uri.c_str(), &headers);
             break;
         }
         case fd: {
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index b0040fe..fdac1a1 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -18,6 +18,10 @@
 #include <media/stagefright/foundation/AString.h>
 #include "fuzzer/FuzzedDataProvider.h"
 
+#include <AudioFlinger.h>
+#include <MediaPlayerService.h>
+#include <ResourceManagerService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
 #include <StagefrightRecorder.h>
 #include <camera/Camera.h>
 #include <camera/android/hardware/ICamera.h>
@@ -25,6 +29,7 @@
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <mediametricsservice/MediaMetricsService.h>
 #include <thread>
 
 using namespace std;
@@ -193,7 +198,7 @@
     mStfRecorder->setInputDevice(deviceId);
     mStfRecorder->getRoutedDeviceId(&deviceId);
 
-    vector<android::media::MicrophoneInfo> activeMicrophones{};
+    vector<android::media::MicrophoneInfoFw> activeMicrophones{};
     mStfRecorder->getActiveMicrophones(&activeMicrophones);
 
     int32_t portId;
@@ -305,6 +310,21 @@
     mStfRecorder->reset();
 }
 
+extern "C" int LLVMFuzzerInitialize(int /* *argc */, char /* ***argv */) {
+    /**
+     * Initializing a FakeServiceManager and adding the instances
+     * of all the required services
+     */
+    sp<IServiceManager> fakeServiceManager = new FakeServiceManager();
+    setDefaultServiceManager(fakeServiceManager);
+    MediaPlayerService::instantiate();
+    AudioFlinger::instantiate();
+    ResourceManagerService::instantiate();
+    fakeServiceManager->addService(String16(MediaMetricsService::kServiceName),
+                                    new MediaMetricsService());
+    return 0;
+}
+
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
     MediaRecorderClientFuzzer mrcFuzzer(data, size);
     mrcFuzzer.process();
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 1cbd8a0..fb20aab 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -123,6 +123,8 @@
                 bool doNotReconnect = false,
                 uint32_t suggestedFrameCount = 0) = 0;
 
+        virtual void        setPlayerIId(int32_t playerIId) = 0;
+
         virtual status_t    start() = 0;
 
         /* Input parameter |size| is in byte units stored in |buffer|.
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index c3bd207..e8556dd 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -28,11 +28,13 @@
 
 namespace android {
 
+const int32_t INVALID_DISPLAY_ID = -1;
+
 AWakeLock::AWakeLock() :
     mPowerManager(NULL),
     mWakeLockToken(NULL),
     mWakeLockCount(0),
-    mDeathRecipient(new PMDeathRecipient(this)) {}
+    mDeathRecipient(new PMDeathRecipient(this)){}
 
 AWakeLock::~AWakeLock() {
     if (mPowerManager != NULL) {
@@ -60,14 +62,19 @@
             sp<IBinder> binder = new BBinder();
             int64_t token = IPCThreadState::self()->clearCallingIdentity();
             binder::Status status = mPowerManager->acquireWakeLock(
-                    binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
-                    String16("AWakeLock"), String16("media"),
-                    {} /* workSource */, {} /* historyTag */, -1 /* displayId */,
-                    nullptr /* callback */);
+                binder,
+                /*flags= */ POWERMANAGER_PARTIAL_WAKE_LOCK,
+                /*tag=*/ String16("AWakeLock"),
+                /*packageName=*/ String16("media"),
+                /*ws=*/ {},
+                /*historyTag=*/ {},
+                /*displayId=*/ INVALID_DISPLAY_ID,
+                /*callback=*/NULL);
             IPCThreadState::self()->restoreCallingIdentity(token);
             if (status.isOk()) {
                 mWakeLockToken = binder;
                 mWakeLockCount++;
+                ALOGI("AwakeLock acquired");
                 return true;
             }
         }
@@ -94,6 +101,7 @@
             IPCThreadState::self()->restoreCallingIdentity(token);
         }
         mWakeLockToken.clear();
+        ALOGI("AwakeLock released");
     }
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
index 71a3168..911463b 100644
--- a/media/libmediaplayerservice/nuplayer/Android.bp
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -17,6 +17,14 @@
     ],
 }
 
+cc_library_headers {
+    name: "libstagefright_nuplayer_headers",
+
+    export_include_dirs: [
+        "include",
+    ],
+}
+
 cc_library_static {
 
     srcs: [
@@ -81,6 +89,7 @@
 
     static_libs: [
         "libplayerservice_datasource",
+        "libstagefright_esds",
         "libstagefright_timedtext",
     ],
 
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 36e4d4a..1358faa 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -992,6 +992,11 @@
         format->setInt32("auto", !!isAutoselect);
         format->setInt32("default", !!isDefault);
         format->setInt32("forced", !!isForced);
+    } else if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+        int32_t hapticChannelCount;
+        if (meta->findInt32(kKeyHapticChannelCount, &hapticChannelCount)) {
+            format->setInt32("haptic-channel-count", hapticChannelCount);
+        }
     }
 
     return format;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 9b4fc8f..e5f2b2b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -60,7 +60,7 @@
 #include <gui/Surface.h>
 
 
-#include "ESDS.h"
+#include <media/esds/ESDS.h>
 #include <media/stagefright/Utils.h>
 
 namespace android {
@@ -555,6 +555,13 @@
         reply->writeInt32(isAuto);
         reply->writeInt32(isDefault);
         reply->writeInt32(isForced);
+    } else if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+        int32_t hapticChannelCount;
+        bool hasHapticChannels = format->findInt32("haptic-channel-count", &hapticChannelCount);
+        reply->writeInt32(hasHapticChannels);
+        if (hasHapticChannels) {
+            reply->writeInt32(hapticChannelCount);
+        }
     }
 }
 
@@ -1220,7 +1227,11 @@
                             notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
                         } else {
                             // Only audio track has error. Video track could be still good to play.
-                            notifyListener(MEDIA_INFO, MEDIA_INFO_PLAY_AUDIO_ERROR, err);
+                            if (mVideoEOS) {
+                                notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
+                            } else {
+                                notifyListener(MEDIA_INFO, MEDIA_INFO_PLAY_AUDIO_ERROR, err);
+                            }
                         }
                         mAudioDecoderError = true;
                     } else {
@@ -1231,7 +1242,11 @@
                             notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
                         } else {
                             // Only video track has error. Audio track could be still good to play.
-                            notifyListener(MEDIA_INFO, MEDIA_INFO_PLAY_VIDEO_ERROR, err);
+                            if (mAudioEOS) {
+                                notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
+                            } else {
+                                notifyListener(MEDIA_INFO, MEDIA_INFO_PLAY_VIDEO_ERROR, err);
+                            }
                         }
                         mVideoDecoderError = true;
                     }
@@ -3017,6 +3032,16 @@
     }
  }
 
+ void NuPlayer::dump(AString& logString) {
+    logString.append("renderer(");
+    if (mRenderer != nullptr) {
+        mRenderer->dump(logString);
+    } else {
+        logString.append("null");
+    }
+    logString.append(")");
+ }
+
 // Modular DRM begin
 status_t NuPlayer::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
 {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 52b2041..8da09c4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1104,14 +1104,14 @@
                         static_cast<MediaBufferHolder*>(holder.get())->mediaBuffer() : nullptr;
                 }
                 if (mediaBuf != NULL) {
-                    if (mediaBuf->size() > codecBuffer->capacity()) {
+                    if (mediaBuf->range_length() > codecBuffer->capacity()) {
                         handleError(ERROR_BUFFER_TOO_SMALL);
                         mDequeuedInputBuffers.push_back(bufferIx);
                         return false;
                     }
 
-                    codecBuffer->setRange(0, mediaBuf->size());
-                    memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->size());
+                    codecBuffer->setRange(0, mediaBuf->range_length());
+                    memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->range_length());
 
                     MetaDataBase &meta_data = mediaBuf->meta_data();
                     cryptInfo = NuPlayerDrm::getSampleCryptoInfo(meta_data);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 2a50fc2..c6595ba 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -26,6 +26,7 @@
 #include "NuPlayer.h"
 #include "NuPlayerSource.h"
 
+#include <audiomanager/AudioManager.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AUtils.h>
@@ -85,6 +86,7 @@
       mMediaClock(new MediaClock),
       mPlayer(new NuPlayer(pid, mMediaClock)),
       mPlayerFlags(0),
+      mCachedPlayerIId(PLAYER_PIID_INVALID),
       mMetricsItem(NULL),
       mClientUid(-1),
       mAtEOS(false),
@@ -804,6 +806,16 @@
             return mPlayer->getSelectedTrack(type, reply);
         }
 
+        case INVOKE_ID_SET_PLAYER_IID:
+        {
+            Mutex::Autolock autoLock(mAudioSinkLock);
+            mCachedPlayerIId = request.readInt32();
+            if (mAudioSink != nullptr) {
+                mAudioSink->setPlayerIId(mCachedPlayerIId);
+            }
+            return OK;
+        }
+
         default:
         {
             return INVALID_OPERATION;
@@ -812,8 +824,12 @@
 }
 
 void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) {
+    Mutex::Autolock autoLock(mAudioSinkLock);
     mPlayer->setAudioSink(audioSink);
     mAudioSink = audioSink;
+    if (mCachedPlayerIId != PLAYER_PIID_INVALID) {
+        mAudioSink->setPlayerIId(mCachedPlayerIId);
+    }
 }
 
 status_t NuPlayerDriver::setParameter(
@@ -953,13 +969,16 @@
     }
 
     if (locked) {
-        snprintf(buf, sizeof(buf), "  state(%d), atEOS(%d), looping(%d), autoLoop(%d)\n",
+        snprintf(buf, sizeof(buf), "  state(%d), atEOS(%d), looping(%d), autoLoop(%d), ",
                 mState, mAtEOS, mLooping, mAutoLoop);
+        logString.append(buf);
+        mPlayer->dump(logString);
+        logString.append("\n");
         mLock.unlock();
     } else {
         snprintf(buf, sizeof(buf), "  NPD(%p) lock is taken\n", this);
+        logString.append(buf);
     }
-    logString.append(buf);
 
     for (size_t i = 0; i < trackStats.size(); ++i) {
         const sp<AMessage> &stats = trackStats.itemAt(i);
@@ -1027,6 +1046,7 @@
             if (mState != STATE_RESET_IN_PROGRESS) {
                 if (mAutoLoop) {
                     audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+                    Mutex::Autolock autoLock(mAudioSinkLock);
                     if (mAudioSink != NULL) {
                         streamType = mAudioSink->getAudioStreamType();
                     }
@@ -1037,6 +1057,7 @@
                 }
                 if (mLooping || mAutoLoop) {
                     mPlayer->seekToAsync(0);
+                    Mutex::Autolock autoLock(mAudioSinkLock);
                     if (mAudioSink != NULL) {
                         // The renderer has stopped the sink at the end in order to play out
                         // the last little bit of audio. If we're looping, we need to restart it.
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
index 6788b56..a964d4f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
@@ -22,14 +22,13 @@
 #include <mediadrm/DrmUtils.h>
 #include <utils/Log.h>
 
-
 namespace android {
 
 // static helpers - internal
 
 sp<IDrm> NuPlayerDrm::CreateDrm(status_t *pstatus)
 {
-    return DrmUtils::MakeDrm(pstatus);
+    return DrmUtils::MakeDrm(IDRM_NUPLAYER, pstatus);
 }
 
 sp<ICrypto> NuPlayerDrm::createCrypto(status_t *pstatus)
@@ -191,8 +190,8 @@
         uint8_t key[kBlockSize],
         uint8_t iv[kBlockSize],
         CryptoPlugin::Mode mode,
-        size_t *clearbytes,
-        size_t *encryptedbytes)
+        uint32_t *clearbytes,
+        uint32_t *encryptedbytes)
 {
     // size needed to store all the crypto data
     size_t cryptosize;
@@ -236,7 +235,7 @@
     if (!meta.findData(kKeyEncryptedSizes, &type, &crypteddata, &cryptedsize)) {
         return NULL;
     }
-    size_t numSubSamples = cryptedsize / sizeof(size_t);
+    size_t numSubSamples = cryptedsize / sizeof(uint32_t);
 
     if (numSubSamples <= 0) {
         ALOGE("getSampleCryptoInfo INVALID numSubSamples: %zu", numSubSamples);
@@ -285,8 +284,8 @@
             (uint8_t*) key,
             (uint8_t*) iv,
             (CryptoPlugin::Mode)mode,
-            (size_t*) cleardata,
-            (size_t*) crypteddata);
+            (uint32_t*) cleardata,
+            (uint32_t*) crypteddata);
 }
 
 }   // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 2828d44..9dae16e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -73,6 +73,10 @@
 // is closed to allow the audio DSP to power down.
 static const int64_t kOffloadPauseMaxUs = 10000000LL;
 
+// Additional delay after teardown before releasing the wake lock to allow time for the audio path
+// to be completely released
+static const int64_t kWakelockReleaseDelayUs = 2000000LL;
+
 // Maximum allowed delay from AudioSink, 1.5 seconds.
 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000LL;
 
@@ -474,6 +478,23 @@
     msg->postAndAwaitResponse(&response);
 }
 
+void NuPlayer::Renderer::dump(AString& logString) {
+    Mutex::Autolock autoLock(mLock);
+    logString.append("paused(");
+    logString.append(mPaused);
+    logString.append("), offloading(");
+    logString.append(offloadingAudio());
+    logString.append("), wakelock(acquired=");
+    mWakelockAcquireEvent.dump(logString);
+    logString.append(", timeout=");
+    mWakelockTimeoutEvent.dump(logString);
+    logString.append(", release=");
+    mWakelockReleaseEvent.dump(logString);
+    logString.append(", cancel=");
+    mWakelockCancelEvent.dump(logString);
+    logString.append(")");
+}
+
 void NuPlayer::Renderer::changeAudioFormat(
         const sp<AMessage> &format,
         bool offloadOnly,
@@ -788,11 +809,33 @@
         {
             int32_t generation;
             CHECK(msg->findInt32("drainGeneration", &generation));
+            mWakelockTimeoutEvent.updateValues(
+                    uptimeMillis(),
+                    generation,
+                    mAudioOffloadPauseTimeoutGeneration);
             if (generation != mAudioOffloadPauseTimeoutGeneration) {
                 break;
             }
             ALOGV("Audio Offload tear down due to pause timeout.");
             onAudioTearDown(kDueToTimeout);
+            sp<AMessage> newMsg = new AMessage(kWhatReleaseWakeLock, this);
+            newMsg->setInt32("drainGeneration", generation);
+            newMsg->post(kWakelockReleaseDelayUs);
+            break;
+        }
+
+        case kWhatReleaseWakeLock:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            mWakelockReleaseEvent.updateValues(
+                uptimeMillis(),
+                generation,
+                mAudioOffloadPauseTimeoutGeneration);
+            if (generation != mAudioOffloadPauseTimeoutGeneration) {
+                break;
+            }
+            ALOGV("releasing audio offload pause wakelock.");
             mWakeLock->release();
             break;
         }
@@ -1785,6 +1828,8 @@
         return;
     }
 
+    startAudioOffloadPauseTimeout();
+
     {
         Mutex::Autolock autoLock(mLock);
         // we do not increment audio drain generation so that we fill audio buffer during pause.
@@ -1799,7 +1844,6 @@
 
     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
     mAudioSink->pause();
-    startAudioOffloadPauseTimeout();
 
     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
           mAudioQueue.size(), mVideoQueue.size());
@@ -1895,6 +1939,9 @@
 void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
     if (offloadingAudio()) {
         mWakeLock->acquire();
+        mWakelockAcquireEvent.updateValues(uptimeMillis(),
+                                           mAudioOffloadPauseTimeoutGeneration,
+                                           mAudioOffloadPauseTimeoutGeneration);
         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
         msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
         msg->post(kOffloadPauseMaxUs);
@@ -1911,6 +1958,9 @@
     // Note: The acquired wakelock prevents the device from suspending
     // immediately after offload pause (in case a resume happens shortly thereafter).
     mWakeLock->release(true);
+    mWakelockCancelEvent.updateValues(uptimeMillis(),
+                                      mAudioOffloadPauseTimeoutGeneration,
+                                      mAudioOffloadPauseTimeoutGeneration);
     ++mAudioOffloadPauseTimeoutGeneration;
 }
 
@@ -1927,12 +1977,17 @@
     int32_t numChannels;
     CHECK(format->findInt32("channel-count", &numChannels));
 
-    int32_t rawChannelMask;
-    audio_channel_mask_t channelMask =
-            format->findInt32("channel-mask", &rawChannelMask) ?
-                    static_cast<audio_channel_mask_t>(rawChannelMask)
-                    // signal to the AudioSink to derive the mask from count.
-                    : CHANNEL_MASK_USE_CHANNEL_ORDER;
+    // channel mask info as read from the audio format
+    int32_t mediaFormatChannelMask;
+    // channel mask to use for native playback
+    audio_channel_mask_t channelMask;
+    if (format->findInt32("channel-mask", &mediaFormatChannelMask)) {
+        // KEY_CHANNEL_MASK follows the android.media.AudioFormat java mask
+        channelMask = audio_channel_mask_from_media_format_mask(mediaFormatChannelMask);
+    } else {
+        // no mask found: the mask will be derived from the channel count
+        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+    }
 
     int32_t sampleRate;
     CHECK(format->findInt32("sample-rate", &sampleRate));
@@ -2141,4 +2196,14 @@
     notify->post();
 }
 
+void NuPlayer::Renderer::WakeLockEvent::dump(AString& logString) {
+  logString.append("[");
+  logString.append(mTimeMs);
+  logString.append(",");
+  logString.append(mEventTimeoutGeneration);
+  logString.append(",");
+  logString.append(mRendererTimeoutGeneration);
+  logString.append("]");
+}
+
 }  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index 6a17972..fd03150 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -115,7 +115,7 @@
 
         int sockRtp, sockRtcp;
         ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
-                info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+                info->mLocalPort, info->mRemotePort, info->mSocketNetwork, info->mRtpSockOptEcn);
 
         sp<AMessage> notify = new AMessage('accu', this);
 
@@ -125,6 +125,8 @@
         mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
         mRTPConn->setSelfID(info->mSelfID);
         mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
+        mRTPConn->setRtpSockOptEcn(info->mRtpSockOptEcn);
+        mRTPConn->setIsIPv6(info->mLocalIp);
 
         unsigned long PT;
         AString formatDesc, formatParams;
@@ -719,6 +721,8 @@
     } else if (key == "rtp-param-set-socket-network") {
         int64_t networkHandle = atoll(value);
         setSocketNetwork(networkHandle);
+    } else if (key == "rtp-param-set-socket-ecn") {
+        info->mRtpSockOptEcn = atoi(value);
     } else if (key == "rtp-param-jitter-buffer-time") {
         // clamping min at 40, max at 3000
         info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
index adb7075..7dc97ea 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
@@ -104,6 +104,8 @@
 
     void setTargetBitrate(int bitrate /* bps */);
 
+    void dump(AString& logString);
+
 protected:
     virtual ~NuPlayer();
 
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
index 55a0fad..138cd6f 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
@@ -140,9 +140,12 @@
     sp<ALooper> mLooper;
     const sp<MediaClock> mMediaClock;
     const sp<NuPlayer> mPlayer;
-    sp<AudioSink> mAudioSink;
     uint32_t mPlayerFlags;
 
+    mutable Mutex mAudioSinkLock;
+    sp<AudioSink> mAudioSink GUARDED_BY(mAudioSinkLock);
+    int32_t mCachedPlayerIId GUARDED_BY(mAudioSinkLock);
+
     mediametrics::Item *mMetricsItem;
     mutable Mutex mMetricsLock;
     uid_t mClientUid;
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDrm.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDrm.h
index 4360656..232638c 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDrm.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDrm.h
@@ -106,8 +106,8 @@
                 uint8_t key[kBlockSize],
                 uint8_t iv[kBlockSize],
                 CryptoPlugin::Mode mode,
-                size_t *clearbytes,
-                size_t *encryptedbytes);
+                uint32_t *clearbytes,
+                uint32_t *encryptedbytes);
 
         static CryptoInfo *getSampleCryptoInfo(MetaDataBase &meta);
 
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
index 3d2b033..2ca040f 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
@@ -83,6 +83,8 @@
             bool isStreaming);
     void closeAudioSink();
 
+    void dump(AString& logString);
+
     // re-open audio sink after all pending audio buffers played.
     void changeAudioFormat(
             const sp<AMessage> &format,
@@ -100,6 +102,7 @@
         kWhatMediaRenderingStart      = 'mdrd',
         kWhatAudioTearDown            = 'adTD',
         kWhatAudioOffloadPauseTimeout = 'aOPT',
+        kWhatReleaseWakeLock          = 'adRL',
     };
 
     enum AudioTearDownReason {
@@ -234,6 +237,32 @@
     status_t getCurrentPositionFromAnchor(
             int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
 
+    struct WakeLockEvent{
+        int64_t mTimeMs;
+        int32_t mEventTimeoutGeneration;
+        int32_t mRendererTimeoutGeneration;
+
+        WakeLockEvent():
+            mTimeMs(0),
+            mEventTimeoutGeneration(0),
+            mRendererTimeoutGeneration(0) {}
+
+        void updateValues(int64_t timeMs,
+                          int32_t eventGeneration,
+                          int32_t rendererGeneration) {
+            mTimeMs = timeMs;
+            mEventTimeoutGeneration = eventGeneration;
+            mRendererTimeoutGeneration = rendererGeneration;
+        }
+
+        void dump(AString& logString);
+    };
+
+    WakeLockEvent mWakelockAcquireEvent;
+    WakeLockEvent mWakelockTimeoutEvent;
+    WakeLockEvent mWakelockReleaseEvent;
+    WakeLockEvent mWakelockCancelEvent;
+
     void notifyEOSCallback();
     size_t fillAudioBuffer(void *buffer, size_t size);
 
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
index 7d9bb8f..b2afe86 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
@@ -121,6 +121,8 @@
         uint32_t mSelfID;
         /* extmap:<value> for CVO will be set to here */
         int32_t mCVOExtMap;
+        /* To check ECN is supported or not */
+        int32_t mRtpSockOptEcn;
 
         /* a copy of TrackInfo in RTSPSource */
         sp<AnotherPacketSource> mSource;
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index 2beb47f..30f6a91 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -25,8 +25,8 @@
 #include <aidl/android/media/BnResourceManagerService.h>
 
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
 #include <mediadrm/DrmSessionManager.h>
+#include <mediautils/ProcessInfoInterface.h>
 
 #include <algorithm>
 #include <iostream>
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
index 162c187..9514021 100644
--- a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
@@ -286,7 +286,7 @@
     // Record media for 4 secs
     std::this_thread::sleep_for(std::chrono::seconds(kClipDurationInSec));
 
-    std::vector<media::MicrophoneInfo> activeMicrophones{};
+    std::vector<media::MicrophoneInfoFw> activeMicrophones{};
     status = mStfRecorder->getActiveMicrophones(&activeMicrophones);
     ASSERT_EQ(status, OK) << "Failed to get Active Microphones";
     ASSERT_GT(activeMicrophones.size(), 0) << "No active microphones are found";
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index e9422cc..89e9806 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -68,6 +68,10 @@
     // ],
     // static_libs: ["libsndfile"],
 
+    shared_libs: [
+        "libmediautils",
+    ],
+
     header_libs: ["libaudiohal_headers"],
 
     export_include_dirs: ["include"],
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 581867f..0ab5874 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -50,6 +50,14 @@
         mFormat = Format_from_SR_C(config.sample_rate,
                 audio_channel_count_from_out_mask(config.channel_mask), config.format);
         mFrameSize = Format_frameSize(mFormat);
+
+        // update format for MEL computation
+        auto processor = mMelProcessor.load();
+        if (processor) {
+            processor->updateAudioFormat(config.sample_rate,
+                                         audio_channel_count_from_out_mask(config.channel_mask),
+                                         config.format);
+        }
     }
     return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
 }
@@ -63,8 +71,15 @@
     size_t written;
     status_t ret = mStream->write(buffer, count * mFrameSize, &written);
     if (ret == OK && written > 0) {
+        // Send to MelProcessor for sound dose measurement.
+        auto processor = mMelProcessor.load();
+        if (processor) {
+            processor->process(buffer, written);
+        }
+
         written /= mFrameSize;
         mFramesWritten += written;
+
         return written;
     } else {
         // FIXME verify HAL implementations are returning the correct error codes e.g. WOULD_BLOCK
@@ -85,4 +100,28 @@
     return OK;
 }
 
+void AudioStreamOutSink::startMelComputation(const sp<audio_utils::MelProcessor>& processor)
+{
+    ALOGV("%s start mel computation for device %d", __func__, processor->getDeviceId());
+
+    mMelProcessor.store(processor);
+    if (processor) {
+        // update format for MEL computation
+        processor->updateAudioFormat(mFormat.mSampleRate,
+                                     mFormat.mChannelCount,
+                                     mFormat.mFormat);
+        processor->resume();
+    }
+
+}
+
+void AudioStreamOutSink::stopMelComputation()
+{
+    auto melProcessor = mMelProcessor.load();
+    if (melProcessor != nullptr) {
+        ALOGV("%s pause mel computation for device %d", __func__, melProcessor->getDeviceId());
+        melProcessor->pause();
+    }
+}
+
 }   // namespace android
diff --git a/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h b/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
index 635f67f..7b5aa06 100644
--- a/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
+++ b/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
@@ -17,7 +17,9 @@
 #ifndef ANDROID_AUDIO_STREAM_OUT_SINK_H
 #define ANDROID_AUDIO_STREAM_OUT_SINK_H
 
+#include <audio_utils/MelProcessor.h>
 #include <media/nbaio/NBAIO.h>
+#include <mediautils/Synchronization.h>
 
 namespace android {
 
@@ -48,6 +50,10 @@
 
     // NBAIO_Sink end
 
+    void startMelComputation(const sp<audio_utils::MelProcessor>& processor);
+
+    void stopMelComputation();
+
 #if 0   // until necessary
     sp<StreamOutHalInterface> stream() const { return mStream; }
 #endif
@@ -55,6 +61,7 @@
 private:
     sp<StreamOutHalInterface> mStream;
     size_t              mStreamBufferSizeBytes; // as reported by get_buffer_size()
+    mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
 };
 
 }   // namespace android
diff --git a/media/libnblog/Reader.cpp b/media/libnblog/Reader.cpp
index 67d028d..d6232d4 100644
--- a/media/libnblog/Reader.cpp
+++ b/media/libnblog/Reader.cpp
@@ -208,11 +208,14 @@
     }
     while (back + Entry::kPreviousLengthOffset >= front) {
         const uint8_t *prev = back - back[Entry::kPreviousLengthOffset] - Entry::kOverhead;
-        const Event type = (const Event)prev[offsetof(entry, type)];
         if (prev < front
-                || prev + prev[offsetof(entry, length)] + Entry::kOverhead != back
-                || type <= EVENT_RESERVED || type >= EVENT_UPPER_BOUND) {
-            // prev points to an out of limits or inconsistent entry
+                || prev + prev[offsetof(entry, length)] + Entry::kOverhead != back) {
+            // prev points to an out of limits entry
+            return nullptr;
+        }
+        const Event type = (const Event)prev[offsetof(entry, type)];
+        if (type <= EVENT_RESERVED || type >= EVENT_UPPER_BOUND) {
+            // prev points to an inconsistent entry
             return nullptr;
         }
         // if invalidTypes does not contain the type, then the type is valid.
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 52c4c0f..505775b 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -785,7 +785,7 @@
 
     // we cannot change the number of output buffers while OMX is running
     // set up surface to the same count
-    Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
+    std::vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
     ALOGV("setting up surface for %zu buffers", buffers.size());
 
     err = native_window_set_buffer_count(nativeWindow, buffers.size());
@@ -825,7 +825,7 @@
     // cancel undequeued buffers to new surface
     if (!storingMetadataInDecodedBuffers()) {
         for (size_t i = 0; i < buffers.size(); ++i) {
-            BufferInfo &info = buffers.editItemAt(i);
+            BufferInfo &info = buffers[i];
             if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
                 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer());
                 err = nativeWindow->cancelBuffer(
@@ -872,7 +872,7 @@
     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
 
     CHECK(mAllocator[portIndex] == NULL);
-    CHECK(mBuffers[portIndex].isEmpty());
+    CHECK(mBuffers[portIndex].empty());
 
     status_t err;
     if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
@@ -951,6 +951,7 @@
 
             const sp<AMessage> &format =
                     portIndex == kPortIndexInput ? mInputFormat : mOutputFormat;
+            mBuffers[portIndex].reserve(def.nBufferCountActual);
             for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
                 hidl_memory hidlMemToken;
                 sp<TMemory> hidlMem;
@@ -1039,7 +1040,7 @@
                     }
                 }
 
-                mBuffers[portIndex].push(info);
+                mBuffers[portIndex].push_back(info);
             }
         }
     }
@@ -1250,6 +1251,7 @@
          mComponentName.c_str(), bufferCount, bufferSize);
 
     // Dequeue buffers and send them to OMX
+    mBuffers[kPortIndexOutput].reserve(bufferCount);
     for (OMX_U32 i = 0; i < bufferCount; i++) {
         ANativeWindowBuffer *buf;
         int fenceFd;
@@ -1275,7 +1277,7 @@
         info.mData = new MediaCodecBuffer(mOutputFormat, new ABuffer(bufferSize));
         info.mCodecData = info.mData;
 
-        mBuffers[kPortIndexOutput].push(info);
+        mBuffers[kPortIndexOutput].push_back(info);
 
         IOMX::buffer_id bufferId;
         err = mOMXNode->useBuffer(kPortIndexOutput, graphicBuffer, &bufferId);
@@ -1285,7 +1287,7 @@
             break;
         }
 
-        mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId;
+        mBuffers[kPortIndexOutput][i].mBufferID = bufferId;
 
         ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)",
              mComponentName.c_str(),
@@ -1307,7 +1309,7 @@
     }
 
     for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
-        BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+        BufferInfo *info = &mBuffers[kPortIndexOutput][i];
         if (info->mStatus == BufferInfo::OWNED_BY_US) {
             status_t error = cancelBufferToNativeWindow(info);
             if (err == 0) {
@@ -1336,6 +1338,7 @@
     ALOGV("[%s] Allocating %u meta buffers on output port",
          mComponentName.c_str(), bufferCount);
 
+    mBuffers[kPortIndexOutput].reserve(bufferCount);
     for (OMX_U32 i = 0; i < bufferCount; i++) {
         BufferInfo info;
         info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
@@ -1353,7 +1356,7 @@
         info.mCodecData = info.mData;
 
         err = mOMXNode->useBuffer(kPortIndexOutput, OMXBuffer::sPreset, &info.mBufferID);
-        mBuffers[kPortIndexOutput].push(info);
+        mBuffers[kPortIndexOutput].push_back(info);
 
         ALOGV("[%s] allocated meta buffer with ID %u",
                 mComponentName.c_str(), info.mBufferID);
@@ -1462,7 +1465,7 @@
             it != done.cend(); ++it) {
         ssize_t index = it->getIndex();
         if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
-            mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL;
+            mBuffers[kPortIndexOutput][index].mRenderInfo = NULL;
         } else if (index >= 0) {
             // THIS SHOULD NEVER HAPPEN
             ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
@@ -1502,7 +1505,7 @@
         bool stale = false;
         for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
             i--;
-            BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+            BufferInfo *info = &mBuffers[kPortIndexOutput][i];
 
             if (info->mGraphicBuffer != NULL &&
                     info->mGraphicBuffer->handle == buf->handle) {
@@ -1550,8 +1553,7 @@
     BufferInfo *oldest = NULL;
     for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
         i--;
-        BufferInfo *info =
-            &mBuffers[kPortIndexOutput].editItemAt(i);
+        BufferInfo *info = &mBuffers[kPortIndexOutput][i];
         if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&
             (oldest == NULL ||
              // avoid potential issues from counter rolling over
@@ -1608,8 +1610,7 @@
     status_t err = OK;
     for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
         i--;
-        BufferInfo *info =
-            &mBuffers[kPortIndexOutput].editItemAt(i);
+        BufferInfo *info = &mBuffers[kPortIndexOutput][i];
 
         // At this time some buffers may still be with the component
         // or being drained.
@@ -1626,7 +1627,7 @@
 }
 
 status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
-    BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+    BufferInfo *info = &mBuffers[portIndex][i];
     status_t err = OK;
 
     // there should not be any fences in the metadata
@@ -1666,14 +1667,14 @@
     }
 
     // remove buffer even if mOMXNode->freeBuffer fails
-    mBuffers[portIndex].removeAt(i);
+    mBuffers[portIndex].erase(mBuffers[portIndex].begin() + i);
     return err;
 }
 
 ACodec::BufferInfo *ACodec::findBufferByID(
         uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {
     for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
-        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+        BufferInfo *info = &mBuffers[portIndex][i];
 
         if (info->mBufferID == bufferID) {
             if (index != NULL) {
@@ -5102,7 +5103,7 @@
     size_t n = 0;
 
     for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
-        const BufferInfo &info = mBuffers[portIndex].itemAt(i);
+        const BufferInfo &info = mBuffers[portIndex][i];
 
         if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) {
             ++n;
@@ -5116,7 +5117,7 @@
     size_t n = 0;
 
     for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
-        const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i);
+        const BufferInfo &info = mBuffers[kPortIndexOutput][i];
 
         if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
             ++n;
@@ -5143,7 +5144,7 @@
 bool ACodec::allYourBuffersAreBelongToUs(
         OMX_U32 portIndex) {
     for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
-        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+        BufferInfo *info = &mBuffers[portIndex][i];
 
         if (info->mStatus != BufferInfo::OWNED_BY_US
                 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
@@ -5167,12 +5168,11 @@
 }
 
 void ACodec::processDeferredMessages() {
-    List<sp<AMessage> > queue = mDeferredQueue;
+    std::list<sp<AMessage>> queue = mDeferredQueue;
     mDeferredQueue.clear();
 
-    List<sp<AMessage> >::iterator it = queue.begin();
-    while (it != queue.end()) {
-        onMessageReceived(*it++);
+    for(const sp<AMessage> &msg : queue) {
+        onMessageReceived(msg);
     }
 }
 
@@ -5928,19 +5928,17 @@
 
         case ACodec::kWhatSetSurface:
         {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
             sp<RefBase> obj;
             CHECK(msg->findObject("surface", &obj));
 
             status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
 
-            sp<AReplyToken> replyID;
-            if (msg->senderAwaitsResponse(&replyID)) {
-                sp<AMessage> response = new AMessage;
-                response->setInt32("err", err);
-                response->postReply(replyID);
-            } else if (err != OK) {
-                mCodec->signalError(OMX_ErrorUndefined, err);
-            }
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
             break;
         }
 
@@ -6317,6 +6315,11 @@
                     flags |= OMX_BUFFERFLAG_EOS;
                 }
 
+                int32_t isDecodeOnly = 0;
+                if (buffer->meta()->findInt32("decode-only", &isDecodeOnly) && isDecodeOnly != 0) {
+                    flags |= OMX_BUFFERFLAG_DECODEONLY;
+                    mCodec->mDecodeOnlyTimesUs.emplace(timeUs);
+                }
                 size_t size = buffer->size();
                 size_t offset = buffer->offset();
                 if (buffer->base() != info->mCodecData->base()) {
@@ -6346,6 +6349,10 @@
                     ALOGV("[%s] calling emptyBuffer %u w/ EOS",
                          mCodec->mComponentName.c_str(), bufferID);
                 } else {
+                    if (flags & OMX_BUFFERFLAG_DECODEONLY) {
+                        ALOGV("[%s] calling emptyBuffer %u w/ decode only flag",
+                            mCodec->mComponentName.c_str(), bufferID);
+                    }
 #if TRACK_BUFFER_TIMING
                     ALOGI("[%s] calling emptyBuffer %u w/ time %lld us",
                          mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);
@@ -6483,7 +6490,7 @@
     BufferInfo *eligible = NULL;
 
     for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
-        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput][i];
 
 #if 0
         if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) {
@@ -6636,6 +6643,39 @@
 
             info->mData.clear();
 
+            // Workaround: if OMX_BUFFERFLAG_DECODEONLY is not implemented in
+            // HAL, the flag is then removed in the corresponding output buffer.
+
+            // for all buffers that were marked as DECODE_ONLY, remove their timestamp
+            // if it is smaller than the timestamp of the buffer that was
+            // just received
+            while (!mCodec->mDecodeOnlyTimesUs.empty() &&
+                   *mCodec->mDecodeOnlyTimesUs.begin() < timeUs) {
+                    mCodec->mDecodeOnlyTimesUs.erase(mCodec->mDecodeOnlyTimesUs.begin());
+            }
+            // if OMX_BUFFERFLAG_DECODEONLY is not implemented in HAL, we need to restore the
+            // OMX_BUFFERFLAG_DECODEONLY flag to the frames we had saved in the set, the set
+            // contains the timestamps of buffers that were marked as DECODE_ONLY by the app
+            if (!mCodec->mDecodeOnlyTimesUs.empty() &&
+                *mCodec->mDecodeOnlyTimesUs.begin() == timeUs) {
+                mCodec->mDecodeOnlyTimesUs.erase(timeUs);
+                // If the app queued the last valid buffer as DECODE_ONLY and queued an additional
+                // empty buffer as EOS, it's possible that HAL sets the last valid frame as EOS
+                // instead and drops the empty buffer. In such a case, we should not add back
+                // the OMX_BUFFERFLAG_DECODEONLY flag to it, as doing so will make it so that the
+                // app does not receive the EOS buffer, which breaks the contract of EOS buffers
+                if (flags & OMX_BUFFERFLAG_EOS) {
+                    // Set buffer size to 0, as described by
+                    // https://developer.android.com/reference/android/media/MediaCodec.BufferInfo?hl=en#size
+                    // a buffer of size 0 should only be used to carry the EOS flag and should
+                    // be discarded by the app as it has no data
+                    buffer->setRange(0, 0);
+                } else {
+                    // re-add the OMX_BUFFERFLAG_DECODEONLY flag to the buffer in case it is
+                    // not the end of stream buffer
+                    flags |= OMX_BUFFERFLAG_DECODEONLY;
+                }
+            }
             mCodec->mBufferChannel->drainThisBuffer(info->mBufferID, flags);
 
             info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
@@ -6753,6 +6793,8 @@
         info->checkReadFence("onOutputBufferDrained before queueBuffer");
         err = mCodec->mNativeWindow->queueBuffer(
                     mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
+        // TODO(b/266211548): Poll the native window for rendered buffers, since when queueing
+        // buffers, the frame event history delta is retrieved.
         info->mFenceFd = -1;
         if (err == OK) {
             info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
@@ -6856,6 +6898,7 @@
     mCodec->mConverter[0].clear();
     mCodec->mConverter[1].clear();
     mCodec->mComponentName.clear();
+    mCodec->mDecodeOnlyTimesUs.clear();
 }
 
 bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
@@ -7515,7 +7558,7 @@
     // submit as many buffers as there are input buffers with the codec
     // in case we are in port reconfiguring
     for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
-        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput][i];
 
         if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) {
             if (mCodec->submitOutputMetadataBuffer() != OK)
@@ -7533,7 +7576,7 @@
 void ACodec::ExecutingState::submitRegularOutputBuffers() {
     bool failed = false;
     for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
-        BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput][i];
 
         if (mCodec->mNativeWindow != NULL) {
             if (info->mStatus != BufferInfo::OWNED_BY_US
@@ -7590,7 +7633,7 @@
     }
 
     for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {
-        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput][i];
         if (info->mStatus == BufferInfo::OWNED_BY_US) {
             postFillThisBuffer(info);
         }
@@ -8529,17 +8572,11 @@
 
         case kWhatSetSurface:
         {
-            ALOGV("[%s] Deferring setSurface", mCodec->mComponentName.c_str());
-
-            sp<AReplyToken> replyID;
-            CHECK(msg->senderAwaitsResponse(&replyID));
+            ALOGD("[%s] Deferring setSurface from OutputPortSettingsChangedState",
+                  mCodec->mComponentName.c_str());
 
             mCodec->deferMessage(msg);
 
-            sp<AMessage> response = new AMessage;
-            response->setInt32("err", OK);
-            response->postReply(replyID);
-
             handled = true;
             break;
         }
@@ -8594,7 +8631,7 @@
                 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str());
 
                 status_t err = OK;
-                if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) {
+                if (!mCodec->mBuffers[kPortIndexOutput].empty()) {
                     ALOGE("disabled port should be empty, but has %zu buffers",
                             mCodec->mBuffers[kPortIndexOutput].size());
                     err = FAILED_TRANSACTION;
@@ -8847,6 +8884,7 @@
     ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str());
 
     mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false;
+    mCodec->mDecodeOnlyTimesUs.clear();
 
     // If we haven't transitioned after 3 seconds, we're probably stuck.
     sp<AMessage> msg = new AMessage(ACodec::kWhatCheckIfStuck, mCodec);
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index 88b15ae..8f2bed2 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -114,6 +114,10 @@
         if (it->mClientBuffer->meta()->findInt32("csd", &csd)) {
             it->mCodecBuffer->meta()->setInt32("csd", csd);
         }
+        int32_t decodeOnly;
+        if (it->mClientBuffer->meta()->findInt32("decode-only", &decodeOnly)) {
+            it->mCodecBuffer->meta()->setInt32("decode-only", decodeOnly);
+        }
     }
     ALOGV("queueInputBuffer #%d", it->mBufferId);
     sp<AMessage> msg = mInputBufferFilled->dup();
@@ -263,6 +267,10 @@
     if (it->mClientBuffer->meta()->findInt32("csd", &csd)) {
         it->mCodecBuffer->meta()->setInt32("csd", csd);
     }
+    int32_t decodeOnly;
+    if (it->mClientBuffer->meta()->findInt32("decode-only", &decodeOnly)) {
+        it->mCodecBuffer->meta()->setInt32("decode-only", decodeOnly);
+    }
 
     ALOGV("queueSecureInputBuffer #%d", it->mBufferId);
     sp<AMessage> msg = mInputBufferFilled->dup();
@@ -339,7 +347,8 @@
         size_t offset,
         const CryptoPlugin::SubSample *subSamples,
         size_t numSubSamples,
-        const sp<MediaCodecBuffer> &buffer) {
+        const sp<MediaCodecBuffer> &buffer,
+        AString* errorDetailMsg) {
     std::shared_ptr<const std::vector<const BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
@@ -363,7 +372,6 @@
     ssize_t result = -1;
     ssize_t codecDataOffset = 0;
     if (mCrypto != NULL) {
-        AString errorDetailMsg;
         hardware::drm::V1_0::DestinationBuffer destination;
         if (secure) {
             destination.type = DrmBufferType::NATIVE_HANDLE;
@@ -379,7 +387,7 @@
 
         result = mCrypto->decrypt(key, iv, mode, pattern,
                 source, it->mClientBuffer->offset(),
-                subSamples, numSubSamples, destination, &errorDetailMsg);
+                subSamples, numSubSamples, destination, errorDetailMsg);
 
         if (result < 0) {
             return result;
@@ -433,7 +441,9 @@
                     result = (ssize_t)_bytesWritten;
                     detailedError = _detailedError;
                 });
-
+        if (errorDetailMsg) {
+            errorDetailMsg->setTo(detailedError.c_str(), detailedError.size());
+        }
         if (!returnVoid.isOk() || status != Status::OK || result < 0) {
             ALOGE("descramble failed, trans=%s, status=%d, result=%zd",
                     returnVoid.description().c_str(), status, result);
@@ -477,6 +487,10 @@
     return OK;
 }
 
+void ACodecBufferChannel::pollForRenderedBuffers() {
+    // TODO(b/266211548): Poll the native window for rendered buffers.
+}
+
 status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
     std::shared_ptr<const std::vector<const BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
@@ -634,6 +648,9 @@
     if (omxFlags & OMX_BUFFERFLAG_EOS) {
         flags |= MediaCodec::BUFFER_FLAG_EOS;
     }
+    if (omxFlags & OMX_BUFFERFLAG_DECODEONLY) {
+        flags |= MediaCodec::BUFFER_FLAG_DECODE_ONLY;
+    }
     it->mClientBuffer->meta()->setInt32("flags", flags);
 
     mCallback->onOutputBufferAvailable(
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 10baec4..a26fcbe 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -33,75 +33,6 @@
     },
 }
 
-cc_library_static {
-    name: "libstagefright_esds",
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.media",
-    ],
-    min_sdk_version: "29",
-
-    srcs: ["ESDS.cpp"],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-    shared_libs: [
-        "libstagefright_foundation",
-        "libutils"
-    ],
-    host_supported: true,
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-}
-
-cc_library_static {
-    name: "libstagefright_metadatautils",
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.media",
-    ],
-    min_sdk_version: "29",
-
-    srcs: ["MetaDataUtils.cpp"],
-
-    cflags: [
-        "-Werror",
-        "-Wall",
-    ],
-    sanitize: {
-        misc_undefined: [
-            "signed-integer-overflow",
-        ],
-        cfi: true,
-    },
-
-    header_libs: [
-        "libaudioclient_headers",
-        "libstagefright_foundation_headers",
-        "media_ndk_headers",
-    ],
-
-    host_supported: true,
-    target: {
-        darwin: {
-            enabled: false,
-        },
-    },
-
-    export_include_dirs: ["include"],
-}
-
 cc_library_shared {
     name: "libstagefright_codecbase",
 
@@ -166,6 +97,10 @@
         "liblog",
     ],
 
+    static_libs: [
+        "libstagefright_esds",
+    ],
+
     export_include_dirs: [
         "include",
     ],
@@ -220,6 +155,7 @@
         "libEGL",
         "libGLESv1_CM",
         "libGLESv2",
+        "libvulkan",
         "libgui",
         "liblog",
         "libprocessgroup",
@@ -302,6 +238,8 @@
         "CallbackMediaSource.cpp",
         "CameraSource.cpp",
         "CameraSourceTimeLapse.cpp",
+        "CodecErrorLog.cpp",
+        "CryptoAsync.cpp",
         "FrameDecoder.cpp",
         "HevcUtils.cpp",
         "InterfaceUtils.cpp",
@@ -332,6 +270,7 @@
         "SurfaceUtils.cpp",
         "ThrottledSource.cpp",
         "Utils.cpp",
+        "VideoRenderQualityTracker.cpp",
         "VideoFrameSchedulerBase.cpp",
         "VideoFrameScheduler.cpp",
     ],
@@ -365,7 +304,6 @@
         "libstagefright_codecbase",
         "libstagefright_foundation",
         "libstagefright_omx_utils",
-        "libRScpp",
         "libhidlallocatorutils",
         "libhidlbase",
         "libhidlmemory",
@@ -376,17 +314,16 @@
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
+        "server_configurable_flags",
     ],
 
     static_libs: [
         "libstagefright_esds",
         "libstagefright_color_conversion",
         "libyuv_static",
-        "libstagefright_mediafilter",
         "libstagefright_webm",
         "libstagefright_timedtext",
         "libogg",
-        "libwebm",
         "libstagefright_id3",
         "framework-permission-aidl-cpp",
         "libmediandk_format",
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index bfe8538..584dad6 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -522,7 +522,7 @@
 }
 
 status_t AudioSource::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo>* activeMicrophones) {
+        std::vector<media::MicrophoneInfoFw>* activeMicrophones) {
     if (mRecord != 0) {
         return mRecord->getActiveMicrophones(activeMicrophones);
     }
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9607425..967c316 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,7 +150,8 @@
 
     if (camera == 0) {
         mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                /*forceSlowJpegMode*/false);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
diff --git a/media/libstagefright/CodecErrorLog.cpp b/media/libstagefright/CodecErrorLog.cpp
new file mode 100644
index 0000000..9785623
--- /dev/null
+++ b/media/libstagefright/CodecErrorLog.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecErrorLog"
+
+#include <log/log.h>
+#include <media/stagefright/CodecErrorLog.h>
+
+namespace android {
+
+void CodecErrorLog::log(const char *tag, const char *message) {
+    std::unique_lock lock(mLock);
+    ALOG(LOG_ERROR, tag, "%s", message);
+    mStream << message << std::endl;
+}
+
+void CodecErrorLog::log(const char *tag, const std::string &message) {
+    log(tag, message.c_str());
+}
+
+std::string CodecErrorLog::extract() {
+    std::unique_lock lock(mLock);
+    std::string msg = mStream.str();
+    mStream.str("");
+    return msg;
+}
+
+void CodecErrorLog::clear() {
+    std::unique_lock lock(mLock);
+    mStream.str("");
+}
+
+}  // namespace android
diff --git a/media/libstagefright/CryptoAsync.cpp b/media/libstagefright/CryptoAsync.cpp
new file mode 100644
index 0000000..8b5c8ed
--- /dev/null
+++ b/media/libstagefright/CryptoAsync.cpp
@@ -0,0 +1,289 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CryptoAsync"
+
+#include <log/log.h>
+
+#include "hidl/HidlSupport.h"
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/CryptoAsync.h>
+
+namespace android {
+
+CryptoAsync::~CryptoAsync() {
+}
+
+status_t CryptoAsync::decrypt(sp<AMessage> &msg) {
+    int32_t decryptAction;
+    CHECK(msg->findInt32("action", &decryptAction));
+    if (mCallback == nullptr) {
+       ALOGE("Crypto callback channel is not set");
+       return -ENOSYS;
+    }
+    bool shouldPost = false;
+    Mutexed<std::list<sp<AMessage>>>::Locked pendingBuffers(mPendingBuffers);
+    if (mState != kCryptoAsyncActive) {
+       ALOGE("Cannot decrypt in errored state");
+       return -ENOSYS;
+    }
+    shouldPost = pendingBuffers->size() == 0 ? true : false;
+    pendingBuffers->push_back(std::move(msg));
+    if (shouldPost) {
+       sp<AMessage> decryptMsg = new AMessage(kWhatDecrypt, this);
+       decryptMsg->post();
+    }
+    return OK;
+}
+
+void CryptoAsync::stop(std::list<sp<AMessage>> * const buffers) {
+    sp<AMessage>  stopMsg = new AMessage(kWhatStop, this);
+    stopMsg->setPointer("remaining", static_cast<void*>(buffers));
+    sp<AMessage> response;
+    status_t err = stopMsg->postAndAwaitResponse(&response);
+    if (err == OK && response != NULL) {
+        CHECK(response->findInt32("err", &err));
+    } else {
+        ALOGE("Error handling stop in CryptoAsync");
+        //TODO: handle the error here.
+    }
+}
+
+status_t CryptoAsync::decryptAndQueue(sp<AMessage> & msg) {
+    std::shared_ptr<BufferChannelBase> channel = mBufferChannel.lock();
+    status_t err = OK;
+    sp<RefBase> obj;
+    size_t numSubSamples = 0;
+    int32_t secure = 0;
+    CryptoPlugin::Mode mode;
+    CryptoPlugin::Pattern pattern;
+    sp<ABuffer> keyBuffer;
+    sp<ABuffer> ivBuffer;
+    sp<ABuffer> subSamplesBuffer;
+    msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
+    msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
+    msg->findBuffer("key", &keyBuffer);
+    msg->findBuffer("iv", &ivBuffer);
+    msg->findBuffer("subSamples", &subSamplesBuffer);
+    msg->findInt32("secure", &secure);
+    msg->findSize("numSubSamples", &numSubSamples);
+    msg->findObject("buffer", &obj);
+    msg->findInt32("mode", (int32_t*)&mode);
+    AString errorDetailMsg;
+    const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
+    const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
+    const CryptoPlugin::SubSample * subSamples =
+       (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+    err = channel->queueSecureInputBuffer(buffer, secure, key, iv, mode,
+        pattern, subSamples, numSubSamples, &errorDetailMsg);
+    if (err != OK) {
+        std::list<sp<AMessage>> errorList;
+        msg->removeEntryByName("buffer");
+        msg->setInt32("err", err);
+        msg->setInt32("actionCode", ACTION_CODE_FATAL);
+        msg->setString("errorDetail", errorDetailMsg);
+        errorList.push_back(std::move(msg));
+        mCallback->onDecryptError(errorList);
+   }
+   return err;
+}
+
+status_t CryptoAsync::attachEncryptedBufferAndQueue(sp<AMessage> & msg) {
+    std::shared_ptr<BufferChannelBase> channel = mBufferChannel.lock();
+    status_t err = OK;
+    sp<RefBase> obj;
+    sp<RefBase> mem_obj;
+    sp<hardware::HidlMemory> memory;
+    size_t numSubSamples = 0;
+    int32_t secure = 0;
+    size_t offset;
+    size_t size;
+    CryptoPlugin::Mode mode;
+    CryptoPlugin::Pattern pattern;
+    sp<ABuffer> keyBuffer;
+    sp<ABuffer> ivBuffer;
+    sp<ABuffer> subSamplesBuffer;
+    msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
+    msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
+    msg->findBuffer("key", &keyBuffer);
+    msg->findBuffer("iv", &ivBuffer);
+    msg->findBuffer("subSamples", &subSamplesBuffer);
+    msg->findInt32("secure", &secure);
+    msg->findSize("numSubSamples", &numSubSamples);
+    msg->findObject("buffer", &obj);
+    msg->findInt32("mode", (int32_t*)&mode);
+    CHECK(msg->findObject("memory", &mem_obj));
+    CHECK(msg->findSize("offset", (size_t*)&offset));
+    AString errorDetailMsg;
+    // get key info
+    const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
+    // get iv info
+    const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
+
+    const CryptoPlugin::SubSample * subSamples =
+     (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+
+    // get MediaCodecBuffer
+    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+
+    // get HidlMemory
+    memory = static_cast<MediaCodec::WrapperObject<sp<hardware::HidlMemory>> *>
+        (mem_obj.get())->value;
+
+    // attach buffer
+    err = channel->attachEncryptedBuffer(
+        memory, secure, key, iv, mode, pattern,
+        offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+
+    // a generic error
+    auto handleError = [this, &err, &msg]() {
+        std::list<sp<AMessage>> errorList;
+        msg->removeEntryByName("buffer");
+        msg->setInt32("err", err);
+        msg->setInt32("actionCode", ACTION_CODE_FATAL);
+        errorList.push_back(std::move(msg));
+        mCallback->onDecryptError(errorList);
+    };
+    if (err != OK) {
+        handleError();
+        return err;
+     }
+     offset = buffer->offset();
+     size = buffer->size();
+
+    if (offset + size > buffer->capacity()) {
+        err = -ENOSYS;
+        handleError();
+        return err;
+    }
+    buffer->setRange(offset, size);
+    err = channel->queueInputBuffer(buffer);
+    if (err != OK) {
+        handleError();
+        return err;
+    }
+   return err;
+}
+
+void CryptoAsync::onMessageReceived(const sp<AMessage> & msg) {
+    status_t err = OK;
+    auto getCurrentAndNextTask =
+        [this](sp<AMessage> * const  current, uint32_t & nextTask) -> status_t {
+        sp<AMessage> obj;
+        Mutexed<std::list<sp<AMessage>>>::Locked pendingBuffers(mPendingBuffers);
+        if ((pendingBuffers->size() == 0) || (mState != kCryptoAsyncActive)) {
+           return -ENOMSG;
+        }
+        *current = std::move(*(pendingBuffers->begin()));
+        pendingBuffers->pop_front();
+        //Try to see if we will be able to process next buffer
+        while((nextTask == kWhatDoNothing) && pendingBuffers->size() > 0)
+        {
+            sp<AMessage> & nextBuffer = pendingBuffers->front();
+            if (nextBuffer == nullptr) {
+                pendingBuffers->pop_front();
+                continue;
+            }
+            nextTask = kWhatDecrypt;
+        }
+        return OK;
+    };
+    switch(msg->what()) {
+        case kWhatDecrypt:
+        {
+            sp<AMessage> thisMsg;
+            uint32_t nextTask = kWhatDoNothing;
+            if(OK != getCurrentAndNextTask(&thisMsg, nextTask)) {
+                return;
+            }
+            if (thisMsg != nullptr) {
+                int32_t action;
+                err = OK;
+                CHECK(thisMsg->findInt32("action", &action));
+                switch(action) {
+                    case kActionDecrypt:
+                    {
+                        err = decryptAndQueue(thisMsg);
+                        break;
+                    }
+
+                    case kActionAttachEncryptedBuffer:
+                    {
+                        err = attachEncryptedBufferAndQueue(thisMsg);
+                        break;
+                    }
+
+                    default:
+                    {
+                        ALOGE("Unrecognized action in decrypt");
+                    }
+                }
+                if (err != OK) {
+                    Mutexed<std::list<sp<AMessage>>>::Locked pendingBuffers(mPendingBuffers);
+                    mState = kCryptoAsyncError;
+                }
+            }
+            // we won't take  next buffers if buffer caused
+            // an error. We want the caller to deal with the error first
+            // Expected behahiour is that the caller acknowledge the error
+            // with a call to stop() which clear the queues.
+            // Then move forward with processing of next set of buffers.
+            if (mState == kCryptoAsyncActive && nextTask != kWhatDoNothing) {
+                sp<AMessage> nextMsg = new AMessage(nextTask,this);
+                nextMsg->post();
+            }
+            break;
+        }
+
+        case kWhatStop:
+        {
+            typedef std::list<sp<AMessage>> ReturnListType;
+            ReturnListType * returnList = nullptr;
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            sp<AMessage> response = new AMessage;
+            msg->findPointer("remaining", (void**)(&returnList));
+            Mutexed<std::list<sp<AMessage>>>::Locked pendingBuffers(mPendingBuffers);
+            if (returnList) {
+                returnList->clear();
+                returnList->splice(returnList->end(), std::move(*pendingBuffers));
+            }
+            pendingBuffers->clear();
+            mState = kCryptoAsyncActive;
+            response->setInt32("err", OK);
+            response->postReply(replyID);
+
+            break;
+        }
+
+        default:
+        {
+            status_t err = OK;
+            //TODO: do something with error here.
+            (void)err;
+            break;
+        }
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/DataConverter.cpp b/media/libstagefright/DataConverter.cpp
index 52be054..b53ac77 100644
--- a/media/libstagefright/DataConverter.cpp
+++ b/media/libstagefright/DataConverter.cpp
@@ -24,6 +24,10 @@
 #include <media/MediaCodecBuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <system/audio.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/format.h>
+
 
 namespace android {
 
@@ -81,12 +85,38 @@
     return numSamples * mTargetSampleSize;
 }
 
+static audio_format_t getAudioFormat(AudioEncoding e) {
+    audio_format_t format = AUDIO_FORMAT_INVALID;
+    switch (e) {
+        case kAudioEncodingPcm16bit:
+            format = AUDIO_FORMAT_PCM_16_BIT;
+            break;
+        case kAudioEncodingPcm8bit:
+            format = AUDIO_FORMAT_PCM_8_BIT;
+            break;
+        case kAudioEncodingPcmFloat:
+            format = AUDIO_FORMAT_PCM_FLOAT;
+            break;
+       case kAudioEncodingPcm24bitPacked:
+            format = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+            break;
+       case kAudioEncodingPcm32bit:
+            format = AUDIO_FORMAT_PCM_32_BIT;
+            break;
+        default:
+            ALOGE("Invalid AudioEncoding %d", e);
+        }
+        return format;
+}
 
 static size_t getAudioSampleSize(AudioEncoding e) {
     switch (e) {
-        case kAudioEncodingPcm16bit: return 2;
-        case kAudioEncodingPcm8bit:  return 1;
-        case kAudioEncodingPcmFloat: return 4;
+        case kAudioEncodingPcm16bit:
+        case kAudioEncodingPcm8bit:
+        case kAudioEncodingPcmFloat:
+        case kAudioEncodingPcm24bitPacked:
+        case kAudioEncodingPcm32bit:
+            return audio_bytes_per_sample(getAudioFormat(e));
         default: return 0;
     }
 }
@@ -116,7 +146,15 @@
     } else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm16bit) {
         memcpy_to_float_from_i16((float*)tgt->base(), (const int16_t*)src->data(), src->size() / 2);
     } else {
-        return INVALID_OPERATION;
+        audio_format_t srcFormat = getAudioFormat(mFrom);
+        audio_format_t dstFormat = getAudioFormat(mTo);
+
+        if ((srcFormat == AUDIO_FORMAT_INVALID) || (dstFormat == AUDIO_FORMAT_INVALID))
+            return INVALID_OPERATION;
+
+        size_t frames = src->size() / audio_bytes_per_sample(srcFormat);
+        memcpy_by_audio_format((void*)tgt->base(), dstFormat, (void*)src->data(),
+                srcFormat, frames);
     }
     return OK;
 }
diff --git a/media/libstagefright/FrameCaptureLayer.cpp b/media/libstagefright/FrameCaptureLayer.cpp
index d2cfd41..4e71943 100644
--- a/media/libstagefright/FrameCaptureLayer.cpp
+++ b/media/libstagefright/FrameCaptureLayer.cpp
@@ -64,14 +64,6 @@
     return updatedDataspace;
 }
 
-bool isHdrY410(const BufferItem &bi) {
-    ui::Dataspace dataspace = translateDataspace(static_cast<ui::Dataspace>(bi.mDataSpace));
-    // pixel format is HDR Y410 masquerading as RGBA_1010102
-    return ((dataspace == ui::Dataspace::BT2020_ITU_PQ ||
-            dataspace == ui::Dataspace::BT2020_ITU_HLG) &&
-            bi.mGraphicBuffer->getPixelFormat() == HAL_PIXEL_FORMAT_RGBA_1010102);
-}
-
 struct FrameCaptureLayer::BufferLayer : public FrameCaptureProcessor::Layer {
     BufferLayer(const BufferItem &bi) : mBufferItem(bi) {}
     void getLayerSettings(
@@ -95,7 +87,6 @@
     layerSettings->source.buffer.fence = mBufferItem.mFence;
     layerSettings->source.buffer.textureName = textureName;
     layerSettings->source.buffer.usePremultipliedAlpha = false;
-    layerSettings->source.buffer.isY410BT2020 = isHdrY410(mBufferItem);
     bool hasSmpte2086 = mBufferItem.mHdrMetadata.validTypes & HdrMetadata::SMPTE2086;
     bool hasCta861_3 = mBufferItem.mHdrMetadata.validTypes & HdrMetadata::CTA861_3;
     layerSettings->source.buffer.maxMasteringLuminance = hasSmpte2086
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 3df8766..b5bd975 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -86,11 +86,14 @@
         displayHeight = height;
     }
 
-    if (allocRotated && (rotationAngle == 90 || rotationAngle == 270)) {
-        int32_t tmp;
-        tmp = width; width = height; height = tmp;
-        tmp = displayWidth; displayWidth = displayHeight; displayHeight = tmp;
-        tmp = tileWidth; tileWidth = tileHeight; tileHeight = tmp;
+    if (allocRotated) {
+        if (rotationAngle == 90 || rotationAngle == 270) {
+            // swap width and height for 90 & 270 degrees rotation
+            std::swap(width, height);
+            std::swap(displayWidth, displayHeight);
+            std::swap(tileWidth, tileHeight);
+        }
+        // Rotation is already applied.
         rotationAngle = 0;
     }
 
@@ -237,6 +240,9 @@
 
     sp<IMemory> metaMem =
             allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth);
+    if (metaMem == nullptr) {
+        return NULL;
+    }
 
     // try to fill sequence meta's duration based on average frame rate,
     // default to 33ms if frame rate is unavailable.
@@ -349,6 +355,10 @@
     status_t err = OK;
     bool done = false;
     size_t retriesLeft = kRetryCount;
+    if (!mDecoder) {
+        ALOGE("decoder is not initialized");
+        return NO_INIT;
+    }
     do {
         size_t index;
         int64_t ptsUs = 0LL;
@@ -535,7 +545,7 @@
     if (dstFormat() == COLOR_Format32bitABGR2101010) {
         videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
     } else {
-        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+        videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
     }
 
     // For the thumbnail extraction case, try to allocate single buffer in both
@@ -678,7 +688,6 @@
     if (mCaptureLayer != nullptr) {
         return captureSurface();
     }
-
     ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
 
     uint32_t standard, range, transfer;
@@ -691,8 +700,18 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
+    sp<ABuffer> imgObj;
+    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+        MediaImage2 *imageData = nullptr;
+        imageData = (MediaImage2 *)(imgObj.get()->data());
+        if (imageData != nullptr) {
+            converter.setSrcMediaImage2(*imageData);
+        }
+    }
+    if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+        return ERROR_UNSUPPORTED;
+    }
     converter.setSrcColorSpace(standard, range, transfer);
-
     if (converter.isValid()) {
         converter.convert(
                 (const uint8_t *)videoFrameBuffer->data(),
@@ -857,7 +876,7 @@
     if (dstFormat() == COLOR_Format32bitABGR2101010) {
         videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
     } else {
-        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+        videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
     }
 
     if ((mGridRows == 1) && (mGridCols == 1)) {
@@ -913,7 +932,7 @@
         return ERROR_MALFORMED;
     }
 
-    int32_t width, height, stride, srcFormat;
+    int32_t width, height, stride;
     if (outputFormat->findInt32("width", &width) == false) {
         ALOGE("MediaImageDecoder::onOutputReceived:width is missing in outputFormat");
         return ERROR_MALFORMED;
@@ -926,10 +945,9 @@
         ALOGE("MediaImageDecoder::onOutputReceived:stride is missing in outputFormat");
         return ERROR_MALFORMED;
     }
-    if (outputFormat->findInt32("color-format", &srcFormat) == false) {
-        ALOGE("MediaImageDecoder::onOutputReceived: color format is missing in outputFormat");
-        return ERROR_MALFORMED;
-    }
+
+    int32_t srcFormat;
+    CHECK(outputFormat->findInt32("color-format", &srcFormat));
 
     uint32_t bitDepth = 8;
     if (COLOR_FormatYUVP010 == srcFormat) {
@@ -961,6 +979,17 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
+    sp<ABuffer> imgObj;
+    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+        MediaImage2 *imageData = nullptr;
+        imageData = (MediaImage2 *)(imgObj.get()->data());
+        if (imageData != nullptr) {
+            converter.setSrcMediaImage2(*imageData);
+        }
+    }
+    if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+        return ERROR_UNSUPPORTED;
+    }
     converter.setSrcColorSpace(standard, range, transfer);
 
     int32_t crop_left, crop_top, crop_right, crop_bottom;
diff --git a/media/libstagefright/HevcUtils.cpp b/media/libstagefright/HevcUtils.cpp
index 5f9c20e..60df162 100644
--- a/media/libstagefright/HevcUtils.cpp
+++ b/media/libstagefright/HevcUtils.cpp
@@ -102,10 +102,11 @@
 static bool findParam(uint32_t key, T *param,
         KeyedVector<uint32_t, uint64_t> &params) {
     CHECK(param);
-    if (params.indexOfKey(key) < 0) {
+    ssize_t index = params.indexOfKey(key);
+    if (index < 0) {
         return false;
     }
-    *param = (T) params[key];
+    *param = (T) params[index];
     return true;
 }
 
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 34b840e..fbc684b 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -30,7 +30,7 @@
 #include <media/stagefright/MetaData.h>
 #include <arpa/inet.h>
 
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
 
 namespace android {
 
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 63d3180..b84dc27 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -51,7 +51,7 @@
 #include <media/mediarecorder.h>
 #include <cutils/properties.h>
 
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
 #include "include/HevcUtils.h"
 
 #ifndef __predict_false
@@ -155,7 +155,10 @@
     void bufferChunk(int64_t timestampUs);
     bool isAvc() const { return mIsAvc; }
     bool isHevc() const { return mIsHevc; }
+    bool isAv1() const { return mIsAv1; }
     bool isHeic() const { return mIsHeic; }
+    bool isAvif() const { return mIsAvif; }
+    bool isHeif() const { return mIsHeif; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
     bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
@@ -319,10 +322,13 @@
     volatile bool mStarted;
     bool mIsAvc;
     bool mIsHevc;
+    bool mIsAv1;
     bool mIsDovi;
     bool mIsAudio;
     bool mIsVideo;
     bool mIsHeic;
+    bool mIsAvif;
+    bool mIsHeif;
     bool mIsMPEG4;
     bool mGotStartKeyFrame;
     bool mIsMalformed;
@@ -467,6 +473,7 @@
     void writePaspBox();
     void writeAvccBox();
     void writeHvccBox();
+    void writeAv1cBox();
     void writeDoviConfigBox();
     void writeUrlBox();
     void writeDrefBox();
@@ -547,6 +554,7 @@
     mStreamableFile = false;
     mTimeScale = -1;
     mHasFileLevelMeta = false;
+    mIsAvif = false;
     mFileLevelMetaDataSize = 0;
     mPrimaryItemId = 0;
     mAssociationEntryCount = 0;
@@ -660,11 +668,15 @@
             return "avc1";
         } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
             return "hvc1";
+        } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
+            return "av01";
         }
     } else if (!strncasecmp(mime, "application/", 12)) {
         return "mett";
     } else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
         return "heic";
+    } else if (!strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
+        return "avif";
     } else {
         ALOGE("Track (%s) other than video/audio/metadata is not supported", mime);
     }
@@ -709,8 +721,9 @@
     Track *track = new Track(this, source, 1 + mTracks.size());
     mTracks.push_back(track);
 
-    mHasMoovBox |= !track->isHeic();
-    mHasFileLevelMeta |= track->isHeic();
+    mHasMoovBox |= !track->isHeif();
+    mHasFileLevelMeta |= track->isHeif();
+    mIsAvif |= track->isAvif();
 
     return OK;
 }
@@ -792,7 +805,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
-        if ((*it)->isHeic()) {
+        if ((*it)->isHeif()) {
             metaSize += (*it)->getMetaSizeIncrease(rotation, mTracks.size());
         }
     }
@@ -994,8 +1007,8 @@
         return err;
     }
 
-    ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d",
-            mHasMoovBox, mHasFileLevelMeta);
+    ALOGV("muxer starting: mHasMoovBox %d, mHasFileLevelMeta %d, mIsAvif %d",
+            mHasMoovBox, mHasFileLevelMeta, mIsAvif);
 
     err = startWriterThread();
     if (err != OK) {
@@ -1311,7 +1324,7 @@
         }
 
         // skip image tracks
-        if ((*it)->isHeic()) continue;
+        if ((*it)->isHeif()) continue;
         nonImageTrackCount++;
 
         int64_t durationUs = (*it)->getDurationUs();
@@ -1489,7 +1502,7 @@
     int64_t minCttsOffsetTimeUs = kMaxCttsOffsetTimeUs;
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        if (!(*it)->isHeic()) {
+        if (!(*it)->isHeif()) {
             minCttsOffsetTimeUs =
                 std::min(minCttsOffsetTimeUs, (*it)->getMinCttsOffsetTimeUs());
         }
@@ -1505,7 +1518,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        if (!(*it)->isHeic()) {
+        if (!(*it)->isHeif()) {
             (*it)->writeTrackHeader();
         }
     }
@@ -1525,22 +1538,41 @@
         writeFourcc("isom");
         writeFourcc("3gp4");
     } else {
-        // Only write "heic" as major brand if the client specified HEIF
-        // AND we indeed receive some image heic tracks.
+        // Only write "heic"/"avif" as major brand if the client specified HEIF/AVIF
+        // AND we indeed receive some image heic/avif tracks.
         if (fileType == OUTPUT_FORMAT_HEIF && mHasFileLevelMeta) {
-            writeFourcc("heic");
+            if (mIsAvif) {
+                writeFourcc("avif");
+            } else {
+                writeFourcc("heic");
+            }
         } else {
             writeFourcc("mp42");
         }
         writeInt32(0);
         if (mHasFileLevelMeta) {
-            writeFourcc("mif1");
-            writeFourcc("heic");
+            if (mIsAvif) {
+                writeFourcc("mif1");
+                writeFourcc("miaf");
+                writeFourcc("avif");
+            } else {
+                writeFourcc("mif1");
+                writeFourcc("heic");
+            }
         }
         if (mHasMoovBox) {
             writeFourcc("isom");
             writeFourcc("mp42");
         }
+        // If an AV1 video track is present, write "av01" as one of the
+        // compatible brands.
+        for (List<Track *>::iterator it = mTracks.begin(); it != mTracks.end();
+             ++it) {
+            if ((*it)->isAv1()) {
+                writeFourcc("av01");
+                break;
+            }
+        }
     }
 
     endBox();
@@ -2103,7 +2135,8 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
          it != mTracks.end(); ++it) {
-        if (!(*it)->isHeic() && (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
+        if (!(*it)->isHeif() &&
+                (*it)->getDurationUs() >= mMaxFileDurationLimitUs) {
             return true;
         }
     }
@@ -2205,10 +2238,13 @@
     mMeta->findCString(kKeyMIMEType, &mime);
     mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsAv1 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1);
     mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
     mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+    mIsAvif = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF);
+    mIsHeif = mIsHeic || mIsAvif;
     mIsMPEG4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC);
 
@@ -2220,7 +2256,7 @@
         }
     }
 
-    if (!mIsHeic) {
+    if (!mIsHeif) {
         setTimeScale();
     } else {
         CHECK(mMeta->findInt32(kKeyWidth, &mWidth) && (mWidth > 0));
@@ -2301,7 +2337,7 @@
 
 void MPEG4Writer::Track::updateTrackSizeEstimate() {
     mEstimatedTrackSizeBytes = mMdatSizeBytes;  // media data size
-    if (!isHeic() && !mOwner->isFileStreamable()) {
+    if (!isHeif() && !mOwner->isFileStreamable()) {
         mEstimatedTrackSizeBytes += trackMetaDataSize();
     }
 }
@@ -2384,7 +2420,7 @@
 
 bool MPEG4Writer::Track::isExifData(
         MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
-    if (!mIsHeic) {
+    if (!mIsHeif) {
         return false;
     }
 
@@ -2413,12 +2449,12 @@
 }
 
 void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
-    CHECK(!mIsHeic);
+    CHECK(!mIsHeif);
     mCo64TableEntries->add(hton64(offset));
 }
 
 void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
-    CHECK(mIsHeic);
+    CHECK(mIsHeif);
 
     if (offset > UINT32_MAX || size > UINT32_MAX) {
         ALOGE("offset or size is out of range: %lld, %lld",
@@ -2464,8 +2500,10 @@
 
     if (mProperties.empty()) {
         mProperties.push_back(mOwner->addProperty_l({
-            .type = FOURCC('h', 'v', 'c', 'C'),
-            .hvcc = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
+            .type = static_cast<uint32_t>(mIsAvif ?
+                  FOURCC('a', 'v', '1', 'C') :
+                  FOURCC('h', 'v', 'c', 'C')),
+            .data = ABuffer::CreateAsCopy(mCodecSpecificData, mCodecSpecificDataSize)
         }));
 
         mProperties.push_back(mOwner->addProperty_l({
@@ -2485,7 +2523,7 @@
     mTileIndex++;
     if (hasGrid) {
         mDimgRefs.value.push_back(mOwner->addItem_l({
-            .itemType = "hvc1",
+            .itemType = mIsAvif ? "av01" : "hvc1",
             .itemId = mItemIdBase++,
             .isPrimary = false,
             .isHidden = true,
@@ -2521,7 +2559,7 @@
         }
     } else {
         mImageItemId = mOwner->addItem_l({
-            .itemType = "hvc1",
+            .itemType = mIsAvif ? "av01" : "hvc1",
             .itemId = mItemIdBase++,
             .isPrimary = (mIsPrimary != 0),
             .isHidden = false,
@@ -2538,7 +2576,7 @@
 // it affects the 'dimg' refs for tiled image, as we only have the refs after the
 // last tile sample is written.
 void MPEG4Writer::Track::flushItemRefs() {
-    CHECK(mIsHeic);
+    CHECK(mIsHeif);
 
     if (mImageItemId > 0) {
         mOwner->addRefs_l(mImageItemId, mDimgRefs);
@@ -2639,6 +2677,9 @@
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mMeta->findData(kKeyHVCC, &type, &data, &size);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1) ||
+               !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
+        mMeta->findData(kKeyAV1C, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
         getDolbyVisionProfile();
         if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
@@ -2749,7 +2790,7 @@
         size_t bytesWritten;
         off64_t offset = addSample_l(*it, usePrefix, tiffHdrOffset, &bytesWritten);
 
-        if (chunk->mTrack->isHeic()) {
+        if (chunk->mTrack->isHeif()) {
             chunk->mTrack->addItemOffsetAndSize(offset, bytesWritten, isExif);
         } else if (isFirstSample) {
             chunk->mTrack->addChunkOffset(offset);
@@ -2901,11 +2942,11 @@
     mStartTimeRealUs = startTimeUs;
 
     int32_t rotationDegrees;
-    if ((mIsVideo || mIsHeic) && params &&
+    if ((mIsVideo || mIsHeif) && params &&
             params->findInt32(kKeyRotation, &rotationDegrees)) {
         mRotation = rotationDegrees;
     }
-    if (mIsHeic) {
+    if (mIsHeif) {
         // Reserve the item ids, so that the item ids are ordered in the same
         // order that the image tracks are added.
         // If we leave the item ids to be assigned when the sample is written out,
@@ -3540,7 +3581,7 @@
                             (const uint8_t *)buffer->data()
                                 + buffer->range_offset(),
                             buffer->range_length());
-                } else if (mIsMPEG4) {
+                } else if (mIsMPEG4 || mIsAv1) {
                     err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
                             buffer->range_length());
                 }
@@ -3581,7 +3622,7 @@
         }
 
         // Per-frame metadata sample's size must be smaller than max allowed.
-        if (!mIsVideo && !mIsAudio && !mIsHeic &&
+        if (!mIsVideo && !mIsAudio && !mIsHeif &&
                 buffer->range_length() >= kMaxMetadataSize) {
             ALOGW("Buffer size is %zu. Maximum metadata buffer size is %lld for %s track",
                     buffer->range_length(), (long long)kMaxMetadataSize, trackName);
@@ -3705,7 +3746,7 @@
             mGotStartKeyFrame = true;
         }
 ////////////////////////////////////////////////////////////////////////////////
-        if (!mIsHeic) {
+        if (!mIsHeif) {
             if (mStszTableEntries->count() == 0) {
                 mFirstSampleTimeRealUs = systemTime() / 1000;
                 if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
@@ -3925,7 +3966,7 @@
             off64_t offset = mOwner->addSample_l(
                     copy, usePrefix, tiffHdrOffset, &bytesWritten);
 
-            if (mIsHeic) {
+            if (mIsHeif) {
                 addItemOffsetAndSize(offset, bytesWritten, isExif);
             } else {
                 if (mCo64TableEntries->count() == 0) {
@@ -3938,7 +3979,7 @@
         }
 
         mChunkSamples.push_back(copy);
-        if (mIsHeic) {
+        if (mIsHeif) {
             bufferChunk(0 /*timestampUs*/);
             ++nChunks;
         } else if (interleaveDurationUs == 0) {
@@ -3976,7 +4017,7 @@
 
     // Add final entries only for non-empty tracks.
     if (mStszTableEntries->count() > 0) {
-        if (mIsHeic) {
+        if (mIsHeif) {
             if (!mChunkSamples.empty()) {
                 bufferChunk(0);
                 ++nChunks;
@@ -4049,7 +4090,7 @@
         mOwner->mStartMeta->findInt32(kKeyEmptyTrackMalFormed, &emptyTrackMalformed) &&
         emptyTrackMalformed) {
         // MediaRecorder(sets kKeyEmptyTrackMalFormed by default) report empty tracks as malformed.
-        if (!mIsHeic && mStszTableEntries->count() == 0) {  // no samples written
+        if (!mIsHeif && mStszTableEntries->count() == 0) {  // no samples written
             ALOGE("The number of recorded samples is 0");
             mIsMalformed = true;
             return true;
@@ -4212,7 +4253,7 @@
 
 int32_t MPEG4Writer::Track::getMetaSizeIncrease(
         int32_t angle, int32_t trackCount) const {
-    CHECK(mIsHeic);
+    CHECK(mIsHeif);
 
     int32_t grid = (mTileWidth > 0);
     int32_t rotate = (angle > 0);
@@ -4262,8 +4303,10 @@
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
-        !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
+        !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_IMAGE_AVIF, mime)) {
         if (!mCodecSpecificData ||
             mCodecSpecificDataSize <= 0) {
             ALOGE("Missing codec specific data");
@@ -4282,7 +4325,7 @@
 const char *MPEG4Writer::Track::getTrackType() const {
     return mIsAudio ? "Audio" :
            mIsVideo ? "Video" :
-           mIsHeic  ? "Image" :
+           mIsHeif  ? "Image" :
                       "Metadata";
 }
 
@@ -4433,6 +4476,8 @@
         writeAvccBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
         writeHvccBox();
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AV1, mime)) {
+        writeAv1cBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
         if (mDoviProfile <= DolbyVisionProfileDvheSt) {
             writeHvccBox();
@@ -5000,6 +5045,15 @@
     mOwner->endBox();  // hvcC
 }
 
+void MPEG4Writer::Track::writeAv1cBox() {
+    CHECK(mCodecSpecificData);
+    CHECK_GE(mCodecSpecificDataSize, 4u);
+
+    mOwner->beginBox("av1C");
+    mOwner->write(mCodecSpecificData, mCodecSpecificDataSize);
+    mOwner->endBox();  // av1C
+}
+
 void MPEG4Writer::Track::writeDoviConfigBox() {
     CHECK_NE(mDoviProfile, 0u);
 
@@ -5384,7 +5438,7 @@
             case FOURCC('h', 'v', 'c', 'C'):
             {
                 beginBox("hvcC");
-                sp<ABuffer> hvcc = mProperties[propIndex].hvcc;
+                sp<ABuffer> hvcc = mProperties[propIndex].data;
                 // Patch avcc's lengthSize field to match the number
                 // of bytes we use to indicate the size of a nal unit.
                 uint8_t *ptr = (uint8_t *)hvcc->data();
@@ -5393,6 +5447,14 @@
                 endBox();
                 break;
             }
+            case FOURCC('a', 'v', '1', 'C'):
+            {
+                beginBox("av1C");
+                sp<ABuffer> av1c = mProperties[propIndex].data;
+                write(av1c->data(), av1c->size());
+                endBox();
+                break;
+            }
             case FOURCC('i', 's', 'p', 'e'):
             {
                 beginBox("ispe");
@@ -5496,7 +5558,7 @@
 
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it) {
-        if ((*it)->isHeic()) {
+        if ((*it)->isHeif()) {
             (*it)->flushItemRefs();
         }
     }
diff --git a/media/libstagefright/MediaAppender.cpp b/media/libstagefright/MediaAppender.cpp
index 21dcfa1..2d9c651 100644
--- a/media/libstagefright/MediaAppender.cpp
+++ b/media/libstagefright/MediaAppender.cpp
@@ -308,7 +308,11 @@
         ALOGE("MediaAppender::start() is called in invalid state %d", mState);
         return INVALID_OPERATION;
     }
-    mMuxer = new (std::nothrow) MediaMuxer(mFd, mFormat);
+    mMuxer = MediaMuxer::create(mFd, mFormat);
+    if (mMuxer == nullptr) {
+        ALOGE("MediaMuxer::create failed");
+        return INVALID_OPERATION;
+    }
     for (const auto& n : mFmtIndexMap) {
         ssize_t muxIndex = mMuxer->addTrack(n.second);
         if (muxIndex < 0) {
diff --git a/media/libstagefright/MediaClock.cpp b/media/libstagefright/MediaClock.cpp
index 24608a7..ed0819d 100644
--- a/media/libstagefright/MediaClock.cpp
+++ b/media/libstagefright/MediaClock.cpp
@@ -110,8 +110,12 @@
     if (mAnchorTimeRealUs != -1) {
         int64_t oldNowMediaUs =
             mAnchorTimeMediaUs + (nowUs - mAnchorTimeRealUs) * (double)mPlaybackRate;
-        if (nowMediaUs < oldNowMediaUs + kAnchorFluctuationAllowedUs
-                && nowMediaUs > oldNowMediaUs - kAnchorFluctuationAllowedUs) {
+        // earlier, we ensured that the anchor times are non-negative and the
+        // math to calculate the now/oldNow times stays non-negative.
+        // by casting into uint64_t, we gain headroom to avoid any overflows at the upper end
+        // when adding the fluctuation allowance.
+        if ((uint64_t)nowMediaUs < (uint64_t)oldNowMediaUs + kAnchorFluctuationAllowedUs
+                && (uint64_t)nowMediaUs + kAnchorFluctuationAllowedUs > (uint64_t)oldNowMediaUs) {
             return;
         }
     }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e50880a..e0ebc11 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -19,18 +19,18 @@
 #define LOG_TAG "MediaCodec"
 #include <utils/Log.h>
 
+#include <dlfcn.h>
+#include <inttypes.h>
+#include <random>
 #include <set>
 #include <stdlib.h>
-
-#include <inttypes.h>
-#include <stdlib.h>
-#include <dlfcn.h>
+#include <string>
 
 #include <C2Buffer.h>
 
 #include "include/SoftwareRenderer.h"
-#include "PlaybackDurationAccumulator.h"
 
+#include <android/api-level.h>
 #include <android/binder_manager.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -41,6 +41,7 @@
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
 #include <android/dlext.h>
+#include <android-base/stringprintf.h>
 #include <binder/IMemory.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
@@ -69,18 +70,19 @@
 #include <media/stagefright/BatteryChecker.h>
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/CCodec.h>
+#include <media/stagefright/CryptoAsync.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaFilter.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
+#include <server_configurable_flags/get_flags.h>
 #include <utils/Singleton.h>
 
 namespace android {
@@ -89,6 +91,9 @@
 using aidl::android::media::BnResourceManagerClient;
 using aidl::android::media::IResourceManagerClient;
 using aidl::android::media::IResourceManagerService;
+using aidl::android::media::ClientInfoParcel;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
 
 // key for media statistics
 static const char *kCodecKeyName = "codec";
@@ -98,6 +103,7 @@
 // These must be kept synchronized with the constants there.
 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
 static const char *kCodecCodec = "android.media.mediacodec.codec";  /* e.g. OMX.google.aac.decoder */
+static const char *kCodecId = "android.media.mediacodec.id";
 static const char *kCodecMime = "android.media.mediacodec.mime";    /* e.g. audio/mime */
 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
 static const char *kCodecModeVideo = "video";            /* values returned for kCodecMode */
@@ -105,7 +111,9 @@
 static const char *kCodecModeImage = "image";
 static const char *kCodecModeUnknown = "unknown";
 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
+static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
 static const char *kCodecSecure = "android.media.mediacodec.secure";   /* 0, 1 */
+static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
 static const char *kCodecHeight = "android.media.mediacodec.height";   /* 0..n */
 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees";  /* 0/90/180/270 */
@@ -114,15 +122,6 @@
 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
 static const char *kCodecPriority = "android.media.mediacodec.priority";
-static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
-static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
-static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
-static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
-static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
-static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
-static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
-static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
-static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
 
 // Min/Max QP before shaping
 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -160,6 +159,7 @@
 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
+static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
 
 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on";  /* 0..n */
 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off";  /* 0..n */
@@ -171,6 +171,29 @@
 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
+// HDR metrics
+static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
+static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
+static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
+static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
+static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
+static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
+static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
+// array/sync/async/block modes
+static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
+static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
+static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
+// max size configured by the app
+static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
+// max size actually used
+static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
+// max size suggested by the codec
+static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
+static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
+static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
+static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
 
 // the kCodecRecent* fields appear only in getMetrics() results
 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max";      /* in us */
@@ -178,13 +201,72 @@
 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg";      /* in us */
 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
-static const char *kCodecPlaybackDurationSec =
-        "android.media.mediacodec.playback-duration-sec"; /* in sec */
 
 /* -1: shaper disabled
    >=0: number of fields changed */
 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
 
+// Render metrics
+static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
+static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
+static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
+static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
+static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
+static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
+static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
+static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
+static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
+// Freeze
+static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
+static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
+static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
+static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
+static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
+static const char *kCodecFreezeDurationMsHistogram =
+        "android.media.mediacodec.freeze-duration-ms-histogram";
+static const char *kCodecFreezeDurationMsHistogramBuckets =
+        "android.media.mediacodec.freeze-duration-ms-histogram-buckets";
+static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
+static const char *kCodecFreezeDistanceMsHistogram =
+        "android.media.mediacodec.freeze-distance-ms-histogram";
+static const char *kCodecFreezeDistanceMsHistogramBuckets =
+        "android.media.mediacodec.freeze-distance-ms-histogram-buckets";
+// Judder
+static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
+static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
+static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
+static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
+static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
+static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
+static const char *kCodecJudderScoreHistogramBuckets =
+        "android.media.mediacodec.judder-score-histogram-buckets";
+// Freeze event
+static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
+static const char *kFreezeEventKeyName = "freeze";
+static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
+static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
+static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
+static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
+static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
+static const char *kFreezeEventDetailsDurationMs =
+        "android.media.mediacodec.freeze.details-duration-ms";
+static const char *kFreezeEventDetailsDistanceMs =
+        "android.media.mediacodec.freeze.details-distance-ms";
+// Judder event
+static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
+static const char *kJudderEventKeyName = "judder";
+static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
+static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
+static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
+static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
+static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
+static const char *kJudderEventDetailsActualDurationUs =
+        "android.media.mediacodec.judder.details-actual-duration-us";
+static const char *kJudderEventDetailsContentDurationUs =
+        "android.media.mediacodec.judder.details-content-duration-us";
+static const char *kJudderEventDetailsDistanceMs =
+        "android.media.mediacodec.judder.details-distance-ms";
+
 // XXX suppress until we get our representation right
 static bool kEmitHistogram = false;
 
@@ -210,20 +292,23 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 struct ResourceManagerClient : public BnResourceManagerClient {
-    explicit ResourceManagerClient(MediaCodec* codec, int32_t pid) :
-            mMediaCodec(codec), mPid(pid) {}
+    explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
+            mMediaCodec(codec), mPid(pid), mUid(uid) {}
 
     Status reclaimResource(bool* _aidl_return) override {
         sp<MediaCodec> codec = mMediaCodec.promote();
         if (codec == NULL) {
             // Codec is already gone, so remove the resources as well
-            ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+            ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
             std::shared_ptr<IResourceManagerService> service =
                     IResourceManagerService::fromBinder(binder);
             if (service == nullptr) {
                 ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
             }
-            service->removeClient(mPid, getId(this));
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(this)};
+            service->removeClient(clientInfo);
             *_aidl_return = true;
             return Status::ok();
         }
@@ -261,6 +346,7 @@
 private:
     wp<MediaCodec> mMediaCodec;
     int32_t mPid;
+    int32_t mUid;
 
     DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
 };
@@ -285,11 +371,20 @@
     void removeClient();
     void markClientForPendingRemoval();
     bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
+    void notifyClientCreated();
+    void notifyClientStarted(ClientConfigParcel& clientConfig);
+    void notifyClientStopped(ClientConfigParcel& clientConfig);
+    void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
+
+    inline void setCodecName(const char* name) {
+        mCodecName = name;
+    }
 
 private:
     Mutex mLock;
     pid_t mPid;
     uid_t mUid;
+    std::string mCodecName;
     std::shared_ptr<IResourceManagerService> mService;
     std::shared_ptr<IResourceManagerClient> mClient;
     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
@@ -321,7 +416,7 @@
 }
 
 status_t MediaCodec::ResourceManagerServiceProxy::init() {
-    ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
     mService = IResourceManagerService::fromBinder(binder);
     if (mService == nullptr) {
         ALOGE("Failed to get ResourceManagerService");
@@ -354,8 +449,11 @@
 }
 
 //static
-Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
-std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
+// these are no_destroy to keep them from being destroyed at process exit
+// where some thread calls exit() while other threads are still running.
+// see b/194783918
+[[clang::no_destroy]] Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
+[[clang::no_destroy]] std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
 
 //static
 void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
@@ -393,7 +491,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->addResource(mPid, mUid, getId(mClient), mClient, resources);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->addResource(clientInfo, mClient, resources);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::removeResource(
@@ -405,7 +507,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->removeResource(mPid, getId(mClient), resources);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->removeResource(clientInfo, resources);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
@@ -413,7 +519,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->removeClient(mPid, getId(mClient));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->removeClient(clientInfo);
 }
 
 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
@@ -421,7 +531,11 @@
     if (mService == nullptr) {
         return;
     }
-    mService->markClientForPendingRemoval(mPid, getId(mClient));
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->markClientForPendingRemoval(clientInfo);
 }
 
 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
@@ -431,10 +545,49 @@
         return false;
     }
     bool success;
-    Status status = mService->reclaimResource(mPid, resources, &success);
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    Status status = mService->reclaimResource(clientInfo, resources, &success);
     return status.isOk() && success;
 }
 
+void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                .uid = static_cast<int32_t>(mUid),
+                                .id = getId(mClient),
+                                .name = mCodecName};
+    mService->notifyClientCreated(clientInfo);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
+        ClientConfigParcel& clientConfig) {
+    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+    clientConfig.clientInfo.id = getId(mClient);
+    clientConfig.clientInfo.name = mCodecName;
+    mService->notifyClientStarted(clientConfig);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
+        ClientConfigParcel& clientConfig) {
+    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+    clientConfig.clientInfo.id = getId(mClient);
+    clientConfig.clientInfo.name = mCodecName;
+    mService->notifyClientStopped(clientConfig);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
+        ClientConfigParcel& clientConfig) {
+    clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+    clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+    clientConfig.clientInfo.id = getId(mClient);
+    clientConfig.clientInfo.name = mCodecName;
+    mService->notifyClientConfigChanged(clientConfig);
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
@@ -493,6 +646,7 @@
     kWhatReleaseCompleted    = 'rcom',
     kWhatFlushCompleted      = 'fcom',
     kWhatError               = 'erro',
+    kWhatCryptoError         = 'ercp',
     kWhatComponentAllocated  = 'cAll',
     kWhatComponentConfigured = 'cCon',
     kWhatInputSurfaceCreated = 'isfc',
@@ -501,6 +655,41 @@
     kWhatOutputFramesRendered = 'outR',
     kWhatOutputBuffersChanged = 'outC',
     kWhatFirstTunnelFrameReady = 'ftfR',
+    kWhatPollForRenderedBuffers = 'plrb',
+};
+
+class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
+public:
+
+    explicit CryptoAsyncCallback(const sp<AMessage> & notify):mNotify(notify) {
+    }
+
+    ~CryptoAsyncCallback() {}
+
+    void onDecryptComplete(const sp<AMessage> &result) override {
+        (void)result;
+    }
+
+    void onDecryptError(const std::list<sp<AMessage>> &errorMsgs) override {
+        // This error may be decrypt/queue error.
+        status_t errorCode ;
+        for (auto &emsg : errorMsgs) {
+             sp<AMessage> notify(mNotify->dup());
+             if(emsg->findInt32("err", &errorCode)) {
+                 if (isCryptoError(errorCode)) {
+                     notify->setInt32("what", kWhatCryptoError);
+                 } else {
+                     notify->setInt32("what", kWhatError);
+                 }
+                 notify->extend(emsg);
+                 notify->post();
+             } else {
+                 ALOGW("Buffers with no errorCode are not expected");
+             }
+        }
+    }
+private:
+    const sp<AMessage> mNotify;
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -791,6 +980,23 @@
     return new PersistentSurface(bufferProducer, bufferSource);
 }
 
+// GenerateCodecId generates a 64bit Random ID for each codec that is created.
+// The Codec ID is generated as:
+//   - A process-unique random high 32bits
+//   - An atomic sequence low 32bits
+//
+static uint64_t GenerateCodecId() {
+    static std::atomic_uint64_t sId = [] {
+        std::random_device rd;
+        std::mt19937 gen(rd());
+        std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
+        uint32_t randomID = distrib(gen);
+        uint64_t id = randomID;
+        return id << 32;
+    }();
+    return sId++;
+}
+
 MediaCodec::MediaCodec(
         const sp<ALooper> &looper, pid_t pid, uid_t uid,
         std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
@@ -807,9 +1013,6 @@
       mWidth(0),
       mHeight(0),
       mRotationDegrees(0),
-      mConfigColorTransfer(-1),
-      mHDRStaticInfo(false),
-      mHDR10PlusInfo(false),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -821,8 +1024,10 @@
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
-      mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
-      mIsSurfaceToScreen(false),
+      mIsSurfaceToDisplay(false),
+      mVideoRenderQualityTracker(
+              VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+                      server_configurable_flags::GetServerConfigurableFlag)),
       mLatencyUnknown(0),
       mBytesEncoded(0),
       mEarliestEncodedPtsUs(INT64_MAX),
@@ -835,18 +1040,21 @@
       mInputBufferCounter(0),
       mGetCodecBase(getCodecBase),
       mGetCodecInfo(getCodecInfo) {
+    mCodecId = GenerateCodecId();
     mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
-            ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid));
+            ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
     if (!mGetCodecBase) {
         mGetCodecBase = [](const AString &name, const char *owner) {
             return GetCodecBase(name, owner);
         };
     }
     if (!mGetCodecInfo) {
-        mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+        mGetCodecInfo = [&log = mErrorLog](const AString &name,
+                                           sp<MediaCodecInfo> *info) -> status_t {
             *info = nullptr;
             const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
             if (!mcl) {
+                log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
                 return NO_INIT;  // if called from Java should raise IOException
             }
             AString tmp = name;
@@ -861,9 +1069,14 @@
                 *info = mcl->getCodecInfo(codecIdx);
                 return OK;
             }
+            log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
+                                  name.c_str()));
             return NAME_NOT_FOUND;
         };
     }
+
+    // we want an empty metrics record for any early getMetrics() call
+    // this should be the *only* initMediametrics() call that's not on the Looper thread
     initMediametrics();
 }
 
@@ -872,8 +1085,17 @@
     mResourceManagerProxy->removeClient();
 
     flushMediametrics();
+
+    // clean any saved metrics info we stored as part of configure()
+    if (mConfigureMsg != nullptr) {
+        mediametrics_handle_t metricsHandle;
+        if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+            mediametrics_delete(metricsHandle);
+        }
+    }
 }
 
+// except for in constructor, called from the looper thread (and therefore mutexed)
 void MediaCodec::initMediametrics() {
     if (mMetricsHandle == 0) {
         mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -900,14 +1122,96 @@
     }
 
     mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    resetMetricsFields();
+}
+
+void MediaCodec::resetMetricsFields() {
+    mHdrInfoFlags = 0;
+
+    mApiUsageMetrics = ApiUsageMetrics();
+    mReliabilityContextMetrics = ReliabilityContextMetrics();
 }
 
 void MediaCodec::updateMediametrics() {
-    ALOGV("MediaCodec::updateMediametrics");
     if (mMetricsHandle == 0) {
+        ALOGW("no metrics handle found");
         return;
     }
 
+    Mutex::Autolock _lock(mMetricsLock);
+
+    mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
+    mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
+            ((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
+                    : ApiUsageMetrics::kAsynchronousMode)
+            : ApiUsageMetrics::kSynchronousMode;
+    mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
+    mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
+            mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
+
+    mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
+            mApiUsageMetrics.inputBufferSize.appMax);
+    mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
+            mApiUsageMetrics.inputBufferSize.usedMax);
+    mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
+            mApiUsageMetrics.inputBufferSize.codecMax);
+
+    mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
+    mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
+            mReliabilityContextMetrics.setOutputSurfaceCount);
+    mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
+            mReliabilityContextMetrics.resolutionChangeCount);
+
+    // Video rendering quality metrics
+    {
+        const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
+        if (m.frameReleasedCount > 0) {
+            mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
+            mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
+            mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
+            mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
+            mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
+        }
+        if (m.freezeDurationMsHistogram.getCount() >= 1) {
+            const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
+            mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
+            mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
+            mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
+                                   h.emitBuckets());
+        }
+        if (m.freezeDistanceMsHistogram.getCount() >= 1) {
+            const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
+            mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
+                                   h.emitBuckets());
+        }
+        if (m.judderScoreHistogram.getCount() >= 1) {
+            const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
+            mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
+            mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
+            mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
+            mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
+            mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
+            mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
+            mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
+                                   h.emitBuckets());
+        }
+        if (m.freezeEventCount != 0) {
+            mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
+        }
+        if (m.judderEventCount != 0) {
+            mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
+        }
+    }
+
     if (mLatencyHist.getCount() != 0 ) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -923,7 +1227,7 @@
     if (mLatencyUnknown > 0) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
     }
-    int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
+    int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
     if (playbackDurationSec > 0) {
         mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
     }
@@ -955,29 +1259,73 @@
                               mIndexOfFirstFrameWhenLowLatencyOn);
     }
 
-    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
-    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
 #if 0
     // enable for short term, only while debugging
     updateEphemeralMediametrics(mMetricsHandle);
 #endif
 }
 
-void MediaCodec::updateHDRFormatMetric() {
+void MediaCodec::updateHdrMetrics(bool isConfig) {
+    if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
+        return;
+    }
+
+    int32_t colorStandard = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
+    }
+    int32_t colorRange = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
+    }
+    int32_t colorTransfer = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
+    }
+    HDRStaticInfo info;
+    if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
+            && ColorUtils::isHDRStaticInfoValid(&info)) {
+        mHdrInfoFlags |= kFlagHasHdrStaticInfo;
+    }
+    mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
+            (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
+    sp<ABuffer> hdr10PlusInfo;
+    if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+            && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+        mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
+    }
+    mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
+            (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
+
+    // hdr format
+    sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
+
+    AString mime;
     int32_t profile = -1;
-    AString mediaType;
-    if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
-            && mOutputFormat->findString("mime", &mediaType)) {
-        hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
-        mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
+
+    if (codedFormat->findString("mime", &mime)
+            && codedFormat->findInt32(KEY_PROFILE, &profile)
+            && colorTransfer != -1) {
+        hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
+        mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
     }
 }
 
-hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
-        const AString &mediaType) {
-    switch (transfer) {
+hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    return (mFlags & kFlagIsEncoder)
+            ? getHdrFormatForEncoder(mime, profile, colorTransfer)
+            : getHdrFormatForDecoder(mime, profile, colorTransfer);
+}
+
+hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    switch (colorTransfer) {
         case COLOR_TRANSFER_ST2084:
-            if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+            if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
                 switch (profile) {
                     case VP9Profile2HDR:
                         return HDR_FORMAT_HDR10;
@@ -986,7 +1334,7 @@
                     default:
                         return HDR_FORMAT_NONE;
                 }
-            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+            } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
                 switch (profile) {
                     case AV1ProfileMain10HDR10:
                         return HDR_FORMAT_HDR10;
@@ -995,7 +1343,7 @@
                     default:
                         return HDR_FORMAT_NONE;
                 }
-            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+            } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
                 switch (profile) {
                     case HEVCProfileMain10HDR10:
                         return HDR_FORMAT_HDR10;
@@ -1008,7 +1356,7 @@
                 return HDR_FORMAT_NONE;
             }
         case COLOR_TRANSFER_HLG:
-            if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+            if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
                 return HDR_FORMAT_HLG;
             } else {
                 // TODO: DOLBY format
@@ -1019,7 +1367,50 @@
     }
 }
 
+hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    switch (colorTransfer) {
+        case COLOR_TRANSFER_ST2084:
+            if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
+                return HDR_FORMAT_NONE;
+            }
+            return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
+        case COLOR_TRANSFER_HLG:
+            if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+                return HDR_FORMAT_HLG;
+            }
+            // TODO: DOLBY format
+    }
+    return HDR_FORMAT_NONE;
+}
 
+bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
+    if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+        return true;
+    } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+        switch (profile) {
+            case VP9Profile2:
+            case VP9Profile3:
+            case VP9Profile2HDR:
+            case VP9Profile3HDR:
+            case VP9Profile2HDR10Plus:
+            case VP9Profile3HDR10Plus:
+                return true;
+        }
+    } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        switch (profile) {
+            case HEVCProfileMain10:
+            case HEVCProfileMain10HDR10:
+            case HEVCProfileMain10HDR10Plus:
+                return true;
+        }
+    }
+    return false;
+}
+
+
+// called to update info being passed back via getMetrics(), which is a
+// unique copy for that call, no concurrent access worries.
 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
     ALOGD("MediaCodec::updateEphemeralMediametrics()");
 
@@ -1027,16 +1418,15 @@
         return;
     }
 
-    Histogram recentHist;
-
     // build an empty histogram
+    MediaHistogram<int64_t> recentHist;
     recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
 
     // stuff it with the samples in the ring buffer
     {
         Mutex::Autolock al(mRecentLock);
 
-        for (int i=0; i<kRecentLatencyFrames; i++) {
+        for (int i = 0; i < kRecentLatencyFrames; i++) {
             if (mRecentSamples[i] != kRecentSampleInvalid) {
                 recentHist.insert(mRecentSamples[i]);
             }
@@ -1044,7 +1434,7 @@
     }
 
     // spit the data (if any) into the supplied analytics record
-    if (recentHist.getCount()!= 0 ) {
+    if (recentHist.getCount() != 0 ) {
         mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
         mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
         mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
@@ -1058,15 +1448,75 @@
     }
 }
 
+static std::string emitVector(std::vector<int32_t> vector) {
+    std::ostringstream sstr;
+    for (size_t i = 0; i < vector.size(); ++i) {
+        if (i != 0) {
+            sstr << ',';
+        }
+        sstr << vector[i];
+    }
+    return sstr.str();
+}
+
+static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
+    if (e.valid) {
+        mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
+        mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
+        mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
+        mediametrics_setInt64(handle, kFreezeEventCount, e.count);
+        mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
+        mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
+        mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
+                               emitVector(e.details.durationMs));
+        mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
+                               emitVector(e.details.distanceMs));
+        mediametrics_selfRecord(handle);
+        mediametrics_delete(handle);
+    }
+}
+
+static void reportToMediaMetricsIfValid(const JudderEvent &e) {
+    if (e.valid) {
+        mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
+        mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
+        mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
+        mediametrics_setInt64(handle, kJudderEventCount, e.count);
+        mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
+        mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
+        mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
+                               emitVector(e.details.actualRenderDurationUs));
+        mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
+                               emitVector(e.details.contentRenderDurationUs));
+        mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
+                               emitVector(e.details.distanceMs));
+        mediametrics_selfRecord(handle);
+        mediametrics_delete(handle);
+    }
+}
+
 void MediaCodec::flushMediametrics() {
+    ALOGD("flushMediametrics");
+
+    // update does its own mutex locking
     updateMediametrics();
+    resetMetricsFields();
+
+    // ensure mutex while we do our own work
+    Mutex::Autolock _lock(mMetricsLock);
     if (mMetricsHandle != 0) {
-        if (mediametrics_count(mMetricsHandle) > 0) {
+        if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
         }
         mediametrics_delete(mMetricsHandle);
         mMetricsHandle = 0;
     }
+    // we no longer have anything pending upload
+    mMetricsToUpload = false;
+
+    // Freeze and judder events are reported separately
+    reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
+    reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
 }
 
 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
@@ -1152,116 +1602,43 @@
     ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
 }
 
-void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
+void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
     int what = 0;
     msg->findInt32("what", &what);
     if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
         static bool logged = false;
         if (!logged) {
             logged = true;
-            ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
+            ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
         }
         return;
     }
-    // Playback duration only counts if the buffers are going to the screen.
-    if (!mIsSurfaceToScreen) {
-        return;
-    }
-    int64_t renderTimeNs;
-    size_t index = 0;
-    while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
-        mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
-    }
-}
-
-bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
-{
-    if (nbuckets <= 0 || width <= 0) {
-        return false;
-    }
-
-    // get histogram buckets
-    if (nbuckets == mBucketCount && mBuckets != NULL) {
-        // reuse our existing buffer
-        memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
-    } else {
-        // get a new pre-zeroed buffer
-        int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
-        if (newbuckets == NULL) {
-            goto bad;
+    // Rendered frames only matter if they're being sent to the display
+    if (mIsSurfaceToDisplay) {
+        int64_t renderTimeNs;
+        for (size_t index = 0;
+            msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
+            index++) {
+            // Capture metrics for playback duration
+            mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
+            // Capture metrics for quality
+            int64_t mediaTimeUs = 0;
+            if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
+                ALOGE("processRenderedFrames: no media time found");
+                continue;
+            }
+            // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
+            // rendered frame.
+            if (!mTunneled || mediaTimeUs != INT64_MAX) {
+                FreezeEvent freezeEvent;
+                JudderEvent judderEvent;
+                mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
+                                                           &judderEvent);
+                reportToMediaMetricsIfValid(freezeEvent);
+                reportToMediaMetricsIfValid(judderEvent);
+            }
         }
-        if (mBuckets != NULL)
-            free(mBuckets);
-        mBuckets = newbuckets;
     }
-
-    mWidth = width;
-    mFloor = floor;
-    mCeiling = floor + nbuckets * width;
-    mBucketCount = nbuckets;
-
-    mMin = INT64_MAX;
-    mMax = INT64_MIN;
-    mSum = 0;
-    mCount = 0;
-    mBelow = mAbove = 0;
-
-    return true;
-
-  bad:
-    if (mBuckets != NULL) {
-        free(mBuckets);
-        mBuckets = NULL;
-    }
-
-    return false;
-}
-
-void MediaCodec::Histogram::insert(int64_t sample)
-{
-    // histogram is not set up
-    if (mBuckets == NULL) {
-        return;
-    }
-
-    mCount++;
-    mSum += sample;
-    if (mMin > sample) mMin = sample;
-    if (mMax < sample) mMax = sample;
-
-    if (sample < mFloor) {
-        mBelow++;
-    } else if (sample >= mCeiling) {
-        mAbove++;
-    } else {
-        int64_t slot = (sample - mFloor) / mWidth;
-        CHECK(slot < mBucketCount);
-        mBuckets[slot]++;
-    }
-    return;
-}
-
-std::string MediaCodec::Histogram::emit()
-{
-    std::string value;
-    char buffer[64];
-
-    // emits:  width,Below{bucket0,bucket1,...., bucketN}above
-    // unconfigured will emit: 0,0{}0
-    // XXX: is this best representation?
-    snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
-             mFloor, mWidth, mBelow);
-    value = buffer;
-    for (int i = 0; i < mBucketCount; i++) {
-        if (i != 0) {
-            value = value + ",";
-        }
-        snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
-        value = value + buffer;
-    }
-    snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
-    value = value + buffer;
-    return value;
 }
 
 // when we send a buffer to the codec;
@@ -1404,6 +1781,21 @@
     }
 }
 
+bool MediaCodec::discardDecodeOnlyOutputBuffer(size_t index) {
+    Mutex::Autolock al(mBufferLock);
+    BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
+    sp<MediaCodecBuffer> buffer = info->mData;
+    int32_t flags;
+    CHECK(buffer->meta()->findInt32("flags", &flags));
+    if (flags & BUFFER_FLAG_DECODE_ONLY) {
+        info->mOwnedByClient = false;
+        info->mData.clear();
+        mBufferChannel->discardBuffer(buffer);
+        return true;
+    }
+    return false;
+}
+
 // static
 status_t MediaCodec::PostAndAwaitResponse(
         const sp<AMessage> &msg, sp<AMessage> *response) {
@@ -1457,8 +1849,6 @@
     } else if (name.startsWithIgnoreCase("omx.")) {
         // at this time only ACodec specifies a mime type.
         return new ACodec;
-    } else if (name.startsWithIgnoreCase("android.filter.")) {
-        return new MediaFilter;
     } else {
         return NULL;
     }
@@ -1489,6 +1879,8 @@
 status_t MediaCodec::init(const AString &name) {
     status_t err = mResourceManagerProxy->init();
     if (err != OK) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Fatal error: failed to initialize ResourceManager (err=%d)", err));
         mCodec = NULL; // remove the codec
         return err;
     }
@@ -1508,11 +1900,14 @@
     if (!name.startsWith("android.filter.")) {
         err = mGetCodecInfo(name, &mCodecInfo);
         if (err != OK) {
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
             mCodec = NULL;  // remove the codec.
             return err;
         }
         if (mCodecInfo == nullptr) {
-            ALOGE("Getting codec info with name '%s' failed", name.c_str());
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Getting codec info with name '%s' failed", name.c_str()));
             return NAME_NOT_FOUND;
         }
         secureCodec = name.endsWith(".secure");
@@ -1535,7 +1930,8 @@
 
     mCodec = mGetCodecBase(name, owner);
     if (mCodec == NULL) {
-        ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
         return NAME_NOT_FOUND;
     }
 
@@ -1547,7 +1943,7 @@
             mCodecLooper->setName("CodecLooper");
             err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
             if (OK != err) {
-                ALOGE("Codec Looper failed to start");
+                mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
                 return err;
             }
         }
@@ -1566,7 +1962,6 @@
     mBufferChannel->setCallback(
             std::unique_ptr<CodecBase::BufferCallback>(
                     new BufferCallback(new AMessage(kWhatCodecNotify, this))));
-
     sp<AMessage> msg = new AMessage(kWhatInit, this);
     if (mCodecInfo) {
         msg->setObject("codecInfo", mCodecInfo);
@@ -1575,6 +1970,8 @@
     }
     msg->setString("name", name);
 
+    // initial naming setup covers the period before the first call to ::configure().
+    // after that, we manage this through ::configure() and the setup message.
     if (mMetricsHandle != 0) {
         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
         mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
@@ -1586,6 +1983,11 @@
 
     std::vector<MediaResourceParcel> resources;
     resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
+
+    // If the ComponentName is not set yet, use the name passed by the user.
+    if (mComponentName.empty()) {
+        mResourceManagerProxy->setCodecName(name.c_str());
+    }
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -1600,6 +2002,12 @@
             break;
         }
     }
+
+    if (OK == err) {
+        // Notify the ResourceManager that, this codec has been created
+        // (initialized) successfully.
+        mResourceManagerProxy->notifyClientCreated();
+    }
     return err;
 }
 
@@ -1646,24 +2054,31 @@
         const sp<ICrypto> &crypto,
         const sp<IDescrambler> &descrambler,
         uint32_t flags) {
+
     sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+    mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
 
     // TODO: validity check log-session-id: it should be a 32-hex-digit.
     format->findString("log-session-id", &mLogSessionId);
 
-    if (mMetricsHandle != 0) {
+    if (nextMetricsHandle != 0) {
+        mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
         int32_t profile = 0;
         if (format->findInt32("profile", &profile)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
+            mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
         }
         int32_t level = 0;
         if (format->findInt32("level", &level)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
+            mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
         }
-        mediametrics_setInt32(mMetricsHandle, kCodecEncoder,
+        mediametrics_setInt32(nextMetricsHandle, kCodecEncoder,
                               (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
 
-        mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+        mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+
+        // moved here from ::init()
+        mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
+        mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
     }
 
     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
@@ -1672,77 +2087,64 @@
         if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
             mRotationDegrees = 0;
         }
-
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
-            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
-            mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
+        if (nextMetricsHandle != 0) {
+            mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
+            mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
             int32_t maxWidth = 0;
             if (format->findInt32("max-width", &maxWidth)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecMaxWidth, maxWidth);
+                mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
             }
             int32_t maxHeight = 0;
             if (format->findInt32("max-height", &maxHeight)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
+                mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
             }
             int32_t colorFormat = -1;
             if (format->findInt32("color-format", &colorFormat)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
+                mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
+            }
+            int32_t appMaxInputSize = -1;
+            if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
+                mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
             }
             if (mDomain == DOMAIN_VIDEO) {
                 float frameRate = -1.0;
                 if (format->findFloat("frame-rate", &frameRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
                 }
                 float captureRate = -1.0;
                 if (format->findFloat("capture-rate", &captureRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
                 }
                 float operatingRate = -1.0;
                 if (format->findFloat("operating-rate", &operatingRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
                 }
                 int32_t priority = -1;
                 if (format->findInt32("priority", &priority)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+                    mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
                 }
             }
-            int32_t colorStandard = -1;
-            if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
-            }
-            int32_t colorRange = -1;
-            if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
-            }
-            int32_t colorTransfer = -1;
-            if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
-                mConfigColorTransfer = colorTransfer;
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
-            }
-            HDRStaticInfo info;
-            if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
-                    && ColorUtils::isHDRStaticInfoValid(&info)) {
-                mHDRStaticInfo = true;
-            }
         }
 
         // Prevent possible integer overflow in downstream code.
         if (mWidth < 0 || mHeight < 0 ||
                (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
-            ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
+            mediametrics_delete(nextMetricsHandle);
             return BAD_VALUE;
         }
 
     } else {
-        if (mMetricsHandle != 0) {
+        if (nextMetricsHandle != 0) {
             int32_t channelCount;
             if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
+                mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
             }
             int32_t sampleRate;
             if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
+                mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
             }
         }
     }
@@ -1752,41 +2154,41 @@
                                                  enableMediaFormatShapingDefault);
         if (!enableShaping) {
             ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
-            if (mMetricsHandle != 0) {
-                mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
+            if (nextMetricsHandle != 0) {
+                mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
             }
         } else {
-            (void) shapeMediaFormat(format, flags);
+            (void) shapeMediaFormat(format, flags, nextMetricsHandle);
             // XXX: do we want to do this regardless of shaping enablement?
             mapFormat(mComponentName, format, nullptr, false);
         }
     }
 
     // push min/max QP to MediaMetrics after shaping
-    if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
+    if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
         int32_t qpIMin = -1;
         if (format->findInt32("video-qp-i-min", &qpIMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
         }
         int32_t qpIMax = -1;
         if (format->findInt32("video-qp-i-max", &qpIMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
         }
         int32_t qpPMin = -1;
         if (format->findInt32("video-qp-p-min", &qpPMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
         }
         int32_t qpPMax = -1;
         if (format->findInt32("video-qp-p-max", &qpPMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
         }
         int32_t qpBMin = -1;
         if (format->findInt32("video-qp-b-min", &qpBMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
         }
         int32_t qpBMax = -1;
         if (format->findInt32("video-qp-b-max", &qpBMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
         }
     }
 
@@ -1802,13 +2204,23 @@
         } else {
             msg->setPointer("descrambler", descrambler.get());
         }
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecCrypto, 1);
+        if (nextMetricsHandle != 0) {
+            mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
         }
     } else if (mFlags & kFlagIsSecure) {
         ALOGW("Crypto or descrambler should be given for secure codec");
     }
 
+    if (mConfigureMsg != nullptr) {
+        // if re-configuring, we have one of these from before.
+        // Recover the space before we discard the old mConfigureMsg
+        mediametrics_handle_t metricsHandle;
+        if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+            mediametrics_delete(metricsHandle);
+        }
+    }
+    msg->setInt64("metrics", nextMetricsHandle);
+
     // save msg for reset
     mConfigureMsg = msg;
 
@@ -2135,7 +2547,8 @@
 
 status_t MediaCodec::shapeMediaFormat(
             const sp<AMessage> &format,
-            uint32_t flags) {
+            uint32_t flags,
+            mediametrics_handle_t metricsHandle) {
     ALOGV("shapeMediaFormat entry");
 
     if (!(flags & CONFIGURE_FLAG_ENCODE)) {
@@ -2191,39 +2604,39 @@
         sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
         size_t changeCount = deltas->countEntries();
         ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
+        if (metricsHandle != 0) {
+            mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
         }
         if (changeCount > 0) {
-            if (mMetricsHandle != 0) {
+            if (metricsHandle != 0) {
                 // save some old properties before we fold in the new ones
                 int32_t bitrate;
                 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
                 }
                 int32_t qpIMin = -1;
                 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
                 }
                 int32_t qpIMax = -1;
                 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
                 }
                 int32_t qpPMin = -1;
                 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
                 }
                 int32_t qpPMax = -1;
                 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
                 }
                  int32_t qpBMin = -1;
                 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
                 }
                 int32_t qpBMax = -1;
                 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
                 }
             }
             // NB: for any field in both format and deltas, the deltas copy wins
@@ -2809,26 +3222,44 @@
     return OK;
 }
 
+// this is the user-callable entry point
 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
 
     reply = 0;
 
-    // shouldn't happen, but be safe
-    if (mMetricsHandle == 0) {
-        return UNKNOWN_ERROR;
+    sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
+    sp<AMessage> response;
+    status_t err;
+    if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+        return err;
     }
 
-    // update any in-flight data that's not carried within the record
-    updateMediametrics();
-
-    // send it back to the caller.
-    reply = mediametrics_dup(mMetricsHandle);
-
-    updateEphemeralMediametrics(reply);
+    CHECK(response->findInt64("metrics", &reply));
 
     return OK;
 }
 
+// runs on the looper thread (for mutex purposes)
+void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
+
+    mediametrics_handle_t results = 0;
+
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+
+    if (mMetricsHandle != 0) {
+        updateMediametrics();
+        results = mediametrics_dup(mMetricsHandle);
+        updateEphemeralMediametrics(results);
+    } else {
+        results = mediametrics_dup(mMetricsHandle);
+    }
+
+    sp<AMessage> response = new AMessage;
+    response->setInt64("metrics", results);
+    response->postReply(replyID);
+}
+
 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
     sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
     msg->setInt32("portIndex", kPortIndexInput);
@@ -2871,17 +3302,17 @@
         sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
     // use mutex instead of a context switch
     if (mReleasedByResourceManager) {
-        ALOGE("getBufferAndFormat - resource already released");
+        mErrorLog.log(LOG_TAG, "resource already released");
         return DEAD_OBJECT;
     }
 
     if (buffer == NULL) {
-        ALOGE("getBufferAndFormat - null MediaCodecBuffer");
+        mErrorLog.log(LOG_TAG, "null buffer");
         return INVALID_OPERATION;
     }
 
     if (format == NULL) {
-        ALOGE("getBufferAndFormat - null AMessage");
+        mErrorLog.log(LOG_TAG, "null format");
         return INVALID_OPERATION;
     }
 
@@ -2889,7 +3320,9 @@
     format->clear();
 
     if (!isExecuting()) {
-        ALOGE("getBufferAndFormat - not executing");
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Invalid to call %s; only valid in Executing states",
+                apiStateString().c_str()));
         return INVALID_OPERATION;
     }
 
@@ -2901,6 +3334,7 @@
     if (index >= buffers.size()) {
         ALOGE("getBufferAndFormat - trying to get buffer with "
               "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
+        mErrorLog.log(LOG_TAG, base::StringPrintf("Bad index (index=%zu)", index));
         return INVALID_OPERATION;
     }
 
@@ -2908,6 +3342,7 @@
     if (!info.mOwnedByClient) {
         ALOGE("getBufferAndFormat - invalid operation "
               "(the index %zu is not owned by client)", index);
+        mErrorLog.log(LOG_TAG, base::StringPrintf("index %zu is not owned by client", index));
         return INVALID_OPERATION;
     }
 
@@ -3035,6 +3470,7 @@
 
 void MediaCodec::cancelPendingDequeueOperations() {
     if (mFlags & kFlagDequeueInputPending) {
+        mErrorLog.log(LOG_TAG, "Pending dequeue input buffer request cancelled");
         PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
 
         ++mDequeueInputTimeoutGeneration;
@@ -3043,6 +3479,7 @@
     }
 
     if (mFlags & kFlagDequeueOutputPending) {
+        mErrorLog.log(LOG_TAG, "Pending dequeue output buffer request cancelled");
         PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
 
         ++mDequeueOutputTimeoutGeneration;
@@ -3052,8 +3489,16 @@
 }
 
 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
-    if (!isExecuting() || (mFlags & kFlagIsAsync)
-            || (newRequest && (mFlags & kFlagDequeueInputPending))) {
+    if (!isExecuting()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Invalid to call %s; only valid in executing state",
+                apiStateString().c_str()));
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (mFlags & kFlagIsAsync) {
+        mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (newRequest && (mFlags & kFlagDequeueInputPending)) {
+        mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue input request is pending");
         PostReplyWithError(replyID, INVALID_OPERATION);
         return true;
     } else if (mFlags & kFlagStickyError) {
@@ -3075,9 +3520,18 @@
     return true;
 }
 
-bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
-    if (!isExecuting() || (mFlags & kFlagIsAsync)
-            || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
+MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
+        const sp<AReplyToken> &replyID, bool newRequest) {
+    if (!isExecuting()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Invalid to call %s; only valid in executing state",
+                apiStateString().c_str()));
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (mFlags & kFlagIsAsync) {
+        mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
+        PostReplyWithError(replyID, INVALID_OPERATION);
+    } else if (newRequest && (mFlags & kFlagDequeueOutputPending)) {
+        mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue output request is pending");
         PostReplyWithError(replyID, INVALID_OPERATION);
     } else if (mFlags & kFlagStickyError) {
         PostReplyWithError(replyID, getStickyError());
@@ -3088,7 +3542,7 @@
         sp<AMessage> response = new AMessage;
         BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
         if (!info) {
-            return false;
+            return DequeueOutputResult::kNoBuffer;
         }
 
         // In synchronous mode, output format change should be handled
@@ -3099,10 +3553,13 @@
         if (mFlags & kFlagOutputFormatChanged) {
             PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
             mFlags &= ~kFlagOutputFormatChanged;
-            return true;
+            return DequeueOutputResult::kRepliedWithError;
         }
 
         ssize_t index = dequeuePortBuffer(kPortIndexOutput);
+        if (discardDecodeOnlyOutputBuffer(index)) {
+            return DequeueOutputResult::kDiscardedBuffer;
+        }
 
         response->setSize("index", index);
         response->setSize("offset", buffer->offset());
@@ -3121,9 +3578,21 @@
         statsBufferReceived(timeUs, buffer);
 
         response->postReply(replyID);
+        return DequeueOutputResult::kSuccess;
     }
 
-    return true;
+    return DequeueOutputResult::kRepliedWithError;
+}
+
+
+inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
+    clientConfig.codecType = toMediaResourceSubType(mDomain);
+    clientConfig.isEncoder = mFlags & kFlagIsEncoder;
+    clientConfig.isHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
+    clientConfig.width = mWidth;
+    clientConfig.height = mHeight;
+    clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+    clientConfig.id = mCodecId;
 }
 
 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
@@ -3132,9 +3601,10 @@
         {
             int32_t what;
             CHECK(msg->findInt32("what", &what));
-
+            AString codecErrorState;
             switch (what) {
                 case kWhatError:
+                case kWhatCryptoError:
                 {
                     int32_t err, actionCode;
                     CHECK(msg->findInt32("err", &err));
@@ -3147,11 +3617,20 @@
                         mFlags |= kFlagSawMediaServerDie;
                         mFlags &= ~kFlagIsComponentAllocated;
                     }
-
                     bool sendErrorResponse = true;
-                    std::string origin{"kWhatError:"};
+                    std::string origin;
+                    if (what == kWhatCryptoError) {
+                        origin = "kWhatCryptoError:";
+                    } else {
+                        origin = "kWhatError:";
+                        //TODO: add a new error state
+                    }
+                    codecErrorState = kCodecErrorState;
                     origin += stateString(mState);
-
+                    if (mCryptoAsync) {
+                        //TODO: do some book keeping on the buffers
+                        mCryptoAsync->stop();
+                    }
                     switch (mState) {
                         case INITIALIZING:
                         {
@@ -3236,8 +3715,7 @@
 
                                 setState(UNINITIALIZED);
                             } else {
-                                setState(
-                                        (mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
+                                setState((mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
                             }
                             break;
                         }
@@ -3253,7 +3731,11 @@
                             cancelPendingDequeueOperations();
 
                             if (mFlags & kFlagIsAsync) {
-                                onError(err, actionCode);
+                                if (what == kWhatError) {
+                                    onError(err, actionCode);
+                                } else if (what == kWhatCryptoError) {
+                                    onCryptoError(msg);
+                                }
                             }
                             switch (actionCode) {
                             case ACTION_CODE_TRANSIENT:
@@ -3285,7 +3767,11 @@
                                 actionCode = ACTION_CODE_FATAL;
                             }
                             if (mFlags & kFlagIsAsync) {
-                                onError(err, actionCode);
+                                if (what == kWhatError) {
+                                    onError(err, actionCode);
+                                } else if (what == kWhatCryptoError) {
+                                    onCryptoError(msg);
+                                }
                             }
                             switch (actionCode) {
                             case ACTION_CODE_TRANSIENT:
@@ -3333,6 +3819,8 @@
                     if (mComponentName.c_str()) {
                         mediametrics_setCString(mMetricsHandle, kCodecCodec,
                                                 mComponentName.c_str());
+                        // Update the codec name.
+                        mResourceManagerProxy->setCodecName(mComponentName.c_str());
                     }
 
                     const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
@@ -3352,14 +3840,11 @@
                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
                     }
 
-                    MediaCodecInfo::Attributes attr = mCodecInfo
-                            ? mCodecInfo->getAttributes()
-                            : MediaCodecInfo::Attributes(0);
-                    if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
-                        // software codec is currently ignored.
-                        mResourceManagerProxy->addResource(MediaResource::CodecResource(
+                    mediametrics_setInt32(mMetricsHandle, kCodecHardware,
+                                          MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
+
+                    mResourceManagerProxy->addResource(MediaResource::CodecResource(
                             mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
-                    }
 
                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
                     break;
@@ -3382,8 +3867,6 @@
                     CHECK(msg->findMessage("input-format", &mInputFormat));
                     CHECK(msg->findMessage("output-format", &mOutputFormat));
 
-                    updateHDRFormatMetric();
-
                     // limit to confirming the opt-in behavior to minimize any behavioral change
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
@@ -3426,6 +3909,19 @@
                         if (interestingFormat->findInt32("level", &level)) {
                             mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
                         }
+                        sp<AMessage> uncompressedFormat =
+                                (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
+                        int32_t componentColorFormat  = -1;
+                        if (uncompressedFormat->findInt32("android._color-format",
+                                &componentColorFormat)) {
+                            mediametrics_setInt32(mMetricsHandle,
+                                    kCodecComponentColorFormat, componentColorFormat);
+                        }
+                        updateHdrMetrics(true /* isConfig */);
+                        int32_t codecMaxInputSize = -1;
+                        if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
+                            mApiUsageMetrics.inputBufferSize.codecMax = codecMaxInputSize;
+                        }
                         // bitrate and bitrate mode, encoder only
                         if (mFlags & kFlagIsEncoder) {
                             // encoder specific values
@@ -3465,7 +3961,6 @@
                                 mComponentName.c_str(),
                                 mInputFormat->debugString(4).c_str(),
                                 mOutputFormat->debugString(4).c_str());
-                        updateHDRFormatMetric();
                         CHECK(obj != NULL);
                         response->setObject("input-surface", obj);
                         mHaveInputSurface = true;
@@ -3490,7 +3985,6 @@
                     if (!msg->findInt32("err", &err)) {
                         CHECK(msg->findMessage("input-format", &mInputFormat));
                         CHECK(msg->findMessage("output-format", &mOutputFormat));
-                        updateHDRFormatMetric();
                         mHaveInputSurface = true;
                     } else {
                         response->setInt32("err", err);
@@ -3532,8 +4026,24 @@
                         mResourceManagerProxy->addResource(
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
+                    // Notify the RM that the codec is in use (has been started).
+                    ClientConfigParcel clientConfig;
+                    initClientConfigParcel(clientConfig);
+                    mResourceManagerProxy->notifyClientStarted(clientConfig);
+
                     setState(STARTED);
                     postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
+
+                    // Now that the codec has started, configure, by default, the peek behavior to
+                    // be undefined for backwards compatibility with older releases. Later, if an
+                    // app explicitly enables or disables peek, the parameter will be turned off and
+                    // the legacy undefined behavior is disallowed.
+                    // See updateTunnelPeek called in onSetParameters for more details.
+                    if (mTunneled && mTunnelPeekState == TunnelPeekState::kLegacyMode) {
+                        sp<AMessage> params = new AMessage;
+                        params->setInt32("android._tunnel-peek-set-legacy", 1);
+                        mCodec->signalSetParameters(params);
+                    }
                     break;
                 }
 
@@ -3557,7 +4067,7 @@
                                 asString(previousState),
                                 asString(TunnelPeekState::kBufferRendered));
                     }
-                    updatePlaybackDuration(msg);
+                    processRenderedFrames(msg);
                     // check that we have a notification set
                     if (mOnFrameRenderedNotification != NULL) {
                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -3710,11 +4220,26 @@
                         handleOutputFormatChangeIfNeeded(buffer);
                         onOutputBufferAvailable();
                     } else if (mFlags & kFlagDequeueOutputPending) {
-                        CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
-
-                        ++mDequeueOutputTimeoutGeneration;
-                        mFlags &= ~kFlagDequeueOutputPending;
-                        mDequeueOutputReplyID = 0;
+                        DequeueOutputResult dequeueResult =
+                            handleDequeueOutputBuffer(mDequeueOutputReplyID);
+                        switch (dequeueResult) {
+                            case DequeueOutputResult::kNoBuffer:
+                                TRESPASS();
+                                break;
+                            case DequeueOutputResult::kDiscardedBuffer:
+                                break;
+                            case DequeueOutputResult::kRepliedWithError:
+                                [[fallthrough]];
+                            case DequeueOutputResult::kSuccess:
+                            {
+                                ++mDequeueOutputTimeoutGeneration;
+                                mFlags &= ~kFlagDequeueOutputPending;
+                                mDequeueOutputReplyID = 0;
+                                break;
+                            }
+                            default:
+                                TRESPASS();
+                        }
                     } else {
                         postActivityNotificationIfPossible();
                     }
@@ -3736,6 +4261,16 @@
                               mState, stateString(mState).c_str());
                         break;
                     }
+
+                    if (mIsSurfaceToDisplay) {
+                        mVideoRenderQualityTracker.resetForDiscontinuity();
+                    }
+
+                    // Notify the RM that the codec has been stopped.
+                    ClientConfigParcel clientConfig;
+                    initClientConfigParcel(clientConfig);
+                    mResourceManagerProxy->notifyClientStopped(clientConfig);
+
                     setState(INITIALIZED);
                     if (mReplyID) {
                         postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
@@ -3786,12 +4321,17 @@
                         break;
                     }
 
+                    if (mIsSurfaceToDisplay) {
+                        mVideoRenderQualityTracker.resetForDiscontinuity();
+                    }
+
                     if (mFlags & kFlagIsAsync) {
                         setState(FLUSHED);
                     } else {
                         setState(STARTED);
                         mCodec->signalResume();
                     }
+                    mReliabilityContextMetrics.flushCount++;
 
                     postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
                     break;
@@ -3858,6 +4398,9 @@
                 // callback can't be set after codec is executing,
                 // or before it's initialized (as the callback
                 // will be cleared when it goes to INITIALIZED)
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "Invalid to call %s; only valid at Initialized state",
+                        apiStateString().c_str()));
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
@@ -3879,9 +4422,19 @@
             break;
         }
 
+        case kWhatGetMetrics:
+        {
+            onGetMetrics(msg);
+            break;
+        }
+
+
         case kWhatConfigure:
         {
             if (mState != INITIALIZED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "configure() is valid only at Initialized state; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
@@ -3899,6 +4452,22 @@
             sp<AMessage> format;
             CHECK(msg->findMessage("format", &format));
 
+            // start with a copy of the passed metrics info for use in this run
+            mediametrics_handle_t handle;
+            CHECK(msg->findInt64("metrics", &handle));
+            if (handle != 0) {
+                if (mMetricsHandle != 0) {
+                    flushMediametrics();
+                }
+                mMetricsHandle = mediametrics_dup(handle);
+                // and set some additional metrics values
+                initMediametrics();
+            }
+
+            // from this point forward, in this configure/use/release lifecycle, we want to
+            // upload our data
+            mMetricsToUpload = true;
+
             int32_t push;
             if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
                 mFlags |= kFlagPushBlankBuffersOnShutdown;
@@ -3923,14 +4492,28 @@
                 handleSetSurface(NULL);
             }
 
+            mApiUsageMetrics.isUsingOutputSurface = true;
+
             uint32_t flags;
             CHECK(msg->findInt32("flags", (int32_t *)&flags));
-            if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
+            if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
+                flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
                 if (!(mFlags & kFlagIsAsync)) {
+                    mErrorLog.log(
+                            LOG_TAG, "Block model is only valid with callback set (async mode)");
                     PostReplyWithError(replyID, INVALID_OPERATION);
                     break;
                 }
-                mFlags |= kFlagUseBlockModel;
+                if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
+                    mFlags |= kFlagUseBlockModel;
+                }
+                if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
+                    mFlags |= kFlagUseCryptoAsync;
+                    if ((mFlags & kFlagUseBlockModel)) {
+                        ALOGW("CrytoAsync not yet enabled for block model,\
+                                falling back to normal");
+                    }
+                }
             }
             mReplyID = replyID;
             setState(CONFIGURING);
@@ -3956,6 +4539,27 @@
 
             mDescrambler = static_cast<IDescrambler *>(descrambler);
             mBufferChannel->setDescrambler(mDescrambler);
+            if ((mFlags & kFlagUseCryptoAsync) &&
+                mCrypto  && (mDomain == DOMAIN_VIDEO)) {
+                // set kFlagUseCryptoAsync but do-not use this for block model
+                // this is to propagate the error in onCryptoError()
+                // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
+                //                     with CONFIGURE_FLAG_USE_BLOCK_MODEL)
+                if (!(mFlags & kFlagUseBlockModel)) {
+                    mCryptoAsync = new CryptoAsync(mBufferChannel);
+                    mCryptoAsync->setCallback(
+                    std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
+                    mCryptoLooper = new ALooper();
+                    mCryptoLooper->setName("CryptoAsyncLooper");
+                    mCryptoLooper->registerHandler(mCryptoAsync);
+                    status_t err = mCryptoLooper->start();
+                    if (err != OK) {
+                        ALOGE("Crypto Looper failed to start");
+                        mCryptoAsync = nullptr;
+                        mCryptoLooper = nullptr;
+                    }
+                }
+            }
 
             format->setInt32("flags", flags);
             if (flags & CONFIGURE_FLAG_ENCODE) {
@@ -3972,14 +4576,7 @@
             } else {
                 mTunneled = false;
             }
-
-            // If mTunnelPeekState is still in kLegacyMode at this point,
-            // configure the codec in legacy mode
-            if (mTunneled && (mTunnelPeekState == TunnelPeekState::kLegacyMode)) {
-                sp<AMessage> params = new AMessage;
-                params->setInt32("android._tunnel-peek-set-legacy", 1);
-                onSetParameters(params);
-            }
+            mediametrics_setInt32(mMetricsHandle, kCodecTunneled, mTunneled ? 1 : 0);
 
             int32_t background = 0;
             if (format->findInt32("android._background-mode", &background) && background) {
@@ -4007,9 +4604,13 @@
                     sp<Surface> surface = static_cast<Surface *>(obj.get());
                     if (mSurface == NULL) {
                         // do not support setting surface if it was not set
+                        mErrorLog.log(LOG_TAG,
+                                      "Cannot set surface if the codec is not configured with "
+                                      "a surface already");
                         err = INVALID_OPERATION;
                     } else if (obj == NULL) {
                         // do not support unsetting surface
+                        mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
                         err = BAD_VALUE;
                     } else {
                         err = connectToSurface(surface);
@@ -4034,12 +4635,16 @@
                                 (void)disconnectFromSurface();
                                 mSurface = surface;
                             }
+                            mReliabilityContextMetrics.setOutputSurfaceCount++;
                         }
                     }
                     break;
                 }
 
                 default:
+                    mErrorLog.log(LOG_TAG, base::StringPrintf(
+                            "setSurface() is valid only at Executing states; currently %s",
+                            apiStateString().c_str()));
                     err = INVALID_OPERATION;
                     break;
             }
@@ -4053,6 +4658,9 @@
         {
             // Must be configured, but can't have been started yet.
             if (mState != CONFIGURED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "setInputSurface() is valid only at Configured state; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
@@ -4088,6 +4696,9 @@
                 PostReplyWithError(msg, OK);
                 break;
             } else if (mState != CONFIGURED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "start() is valid only at Configured state; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             }
@@ -4133,7 +4744,9 @@
 
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
-
+            if (mCryptoAsync) {
+                mCryptoAsync->stop();
+            }
             sp<AMessage> asyncNotify;
             (void)msg->findMessage("async", &asyncNotify);
             // post asyncNotify if going out of scope.
@@ -4165,6 +4778,7 @@
                     if (mFlags & kFlagIsAsync) {
                         onError(DEAD_OBJECT, ACTION_CODE_FATAL);
                     }
+                    mErrorLog.log(LOG_TAG, "Released by resource manager");
                     mReleasedByResourceManager = true;
                 }
 
@@ -4201,6 +4815,7 @@
                 // the previous stop/release completes and then reply with OK.
                 status_t err = mState == targetState ? OK : INVALID_OPERATION;
                 response->setInt32("err", err);
+                // TODO: mErrorLog
                 if (err == OK && targetState == UNINITIALIZED) {
                     mComponentName.clear();
                 }
@@ -4309,13 +4924,13 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (mFlags & kFlagIsAsync) {
-                ALOGE("dequeueInputBuffer can't be used in async mode");
+                mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used in async mode");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
 
             if (mHaveInputSurface) {
-                ALOGE("dequeueInputBuffer can't be used with input surface");
+                mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used with input surface");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
@@ -4370,6 +4985,9 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "queueInputBuffer() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4397,32 +5015,44 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (mFlags & kFlagIsAsync) {
-                ALOGE("dequeueOutputBuffer can't be used in async mode");
+                mErrorLog.log(LOG_TAG, "dequeueOutputBuffer can't be used in async mode");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
 
-            if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
-                break;
-            }
+            DequeueOutputResult dequeueResult =
+                handleDequeueOutputBuffer(replyID, true /* new request */);
+            switch (dequeueResult) {
+                case DequeueOutputResult::kNoBuffer:
+                    [[fallthrough]];
+                case DequeueOutputResult::kDiscardedBuffer:
+                {
+                    int64_t timeoutUs;
+                    CHECK(msg->findInt64("timeoutUs", &timeoutUs));
 
-            int64_t timeoutUs;
-            CHECK(msg->findInt64("timeoutUs", &timeoutUs));
+                    if (timeoutUs == 0LL) {
+                        PostReplyWithError(replyID, -EAGAIN);
+                        break;
+                    }
 
-            if (timeoutUs == 0LL) {
-                PostReplyWithError(replyID, -EAGAIN);
-                break;
-            }
+                    mFlags |= kFlagDequeueOutputPending;
+                    mDequeueOutputReplyID = replyID;
 
-            mFlags |= kFlagDequeueOutputPending;
-            mDequeueOutputReplyID = replyID;
-
-            if (timeoutUs > 0LL) {
-                sp<AMessage> timeoutMsg =
-                    new AMessage(kWhatDequeueOutputTimedOut, this);
-                timeoutMsg->setInt32(
-                        "generation", ++mDequeueOutputTimeoutGeneration);
-                timeoutMsg->post(timeoutUs);
+                    if (timeoutUs > 0LL) {
+                        sp<AMessage> timeoutMsg =
+                            new AMessage(kWhatDequeueOutputTimedOut, this);
+                        timeoutMsg->setInt32(
+                                "generation", ++mDequeueOutputTimeoutGeneration);
+                        timeoutMsg->post(timeoutUs);
+                    }
+                    break;
+                }
+                case DequeueOutputResult::kRepliedWithError:
+                    [[fallthrough]];
+                case DequeueOutputResult::kSuccess:
+                    break;
+                default:
+                    TRESPASS();
             }
             break;
         }
@@ -4452,6 +5082,9 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "releaseOutputBuffer() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4465,9 +5098,25 @@
             break;
         }
 
+        case kWhatPollForRenderedBuffers:
+        {
+            if (isExecuting()) {
+                mBufferChannel->pollForRenderedBuffers();
+            }
+            break;
+        }
+
         case kWhatSignalEndOfInputStream:
         {
-            if (!isExecuting() || !mHaveInputSurface) {
+            if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "signalEndOfInputStream() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
+                PostReplyWithError(msg, INVALID_OPERATION);
+                break;
+            } else if (!mHaveInputSurface) {
+                mErrorLog.log(
+                        LOG_TAG, "signalEndOfInputStream() called without an input surface set");
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4491,7 +5140,14 @@
         {
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
-            if (!isExecuting() || (mFlags & kFlagIsAsync)) {
+            if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "getInput/OutputBuffers() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
+                PostReplyWithError(replyID, INVALID_OPERATION);
+                break;
+            } else if (mFlags & kFlagIsAsync) {
+                mErrorLog.log(LOG_TAG, "getInput/OutputBuffers() is not supported with callbacks");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4517,6 +5173,8 @@
                 }
             }
 
+            mApiUsageMetrics.isArrayMode = true;
+
             (new AMessage)->postReply(replyID);
             break;
         }
@@ -4524,6 +5182,9 @@
         case kWhatFlush:
         {
             if (!isExecuting()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "flush() is valid only at Executing states; currently %s",
+                        apiStateString().c_str()));
                 PostReplyWithError(msg, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4541,7 +5202,11 @@
             mReplyID = replyID;
             // TODO: skip flushing if already FLUSHED
             setState(FLUSHING);
-
+            if (mCryptoAsync) {
+                std::list<sp<AMessage>> pendingBuffers;
+                mCryptoAsync->stop(&pendingBuffers);
+                //TODO: do something with these buffers
+            }
             mCodec->signalFlush();
             returnBuffersToCodec();
             TunnelPeekState previousState = mTunnelPeekState;
@@ -4563,10 +5228,17 @@
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
 
-            if ((mState != CONFIGURED && mState != STARTING &&
-                 mState != STARTED && mState != FLUSHING &&
-                 mState != FLUSHED)
-                    || format == NULL) {
+            if (mState != CONFIGURED && mState != STARTING &&
+                    mState != STARTED && mState != FLUSHING &&
+                    mState != FLUSHED) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "getInput/OutputFormat() is valid at Executing states "
+                        "and Configured state; currently %s",
+                        apiStateString().c_str()));
+                PostReplyWithError(replyID, INVALID_OPERATION);
+                break;
+            } else if (format == NULL) {
+                mErrorLog.log(LOG_TAG, "Fatal error: format is not initialized");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             } else if (mFlags & kFlagStickyError) {
@@ -4601,6 +5273,7 @@
             CHECK(msg->senderAwaitsResponse(&replyID));
 
             if (mComponentName.empty()) {
+                mErrorLog.log(LOG_TAG, "Fatal error: name is not set");
                 PostReplyWithError(replyID, INVALID_OPERATION);
                 break;
             }
@@ -4681,7 +5354,6 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
-    updateHDRFormatMetric();
     mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4697,7 +5369,7 @@
         // rendering.
         int32_t dataSpace;
         if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
-            ALOGD("[%s] setting dataspace on output surface to #%x",
+            ALOGD("[%s] setting dataspace on output surface to %#x",
                     mComponentName.c_str(), dataSpace);
             int err = native_window_set_buffers_data_space(
                     mSurface.get(), (android_dataspace)dataSpace);
@@ -4707,9 +5379,6 @@
             HDRStaticInfo info;
             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
                 setNativeWindowHdrMetadata(mSurface.get(), &info);
-                if (ColorUtils::isHDRStaticInfoValid(&info)) {
-                    mHDRStaticInfo = true;
-                }
             }
         }
 
@@ -4718,7 +5387,6 @@
                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
-            mHDR10PlusInfo = true;
         }
 
         if (mime.startsWithIgnoreCase("video/")) {
@@ -4753,32 +5421,33 @@
         postActivityNotificationIfPossible();
     }
 
-    // Notify mCrypto of video resolution changes
-    if (mCrypto != NULL) {
-        int32_t left, top, right, bottom, width, height;
-        if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
-            mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
-        } else if (mOutputFormat->findInt32("width", &width)
-                && mOutputFormat->findInt32("height", &height)) {
-            mCrypto->notifyResolution(width, height);
-        }
+    // Update the width and the height.
+    int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
+    bool resolutionChanged = false;
+    if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+        mWidth = right - left + 1;
+        mHeight = bottom - top + 1;
+        resolutionChanged = true;
+    } else if (mOutputFormat->findInt32("width", &width) &&
+               mOutputFormat->findInt32("height", &height)) {
+        mWidth = width;
+        mHeight = height;
+        resolutionChanged = true;
     }
 
-    if (mMetricsHandle != 0) {
-        int32_t colorStandard = -1;
-        if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorStandard, colorStandard);
+    // Notify mCrypto and the RM of video resolution changes
+    if (resolutionChanged) {
+        if (mCrypto != NULL) {
+            mCrypto->notifyResolution(mWidth, mHeight);
         }
-        int32_t colorRange = -1;
-        if (format->findInt32( KEY_COLOR_RANGE, &colorRange)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorRange, colorRange);
-        }
-        int32_t colorTransfer = -1;
-        if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorTransfer, colorTransfer);
-        }
+        ClientConfigParcel clientConfig;
+        initClientConfigParcel(clientConfig);
+        mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
+        mReliabilityContextMetrics.resolutionChangeCount++;
     }
-}
+
+    updateHdrMetrics(false /* isConfig */);
+ }
 
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
@@ -4786,7 +5455,7 @@
     size_t i = 0;
     for (;;) {
         sp<ABuffer> csd;
-        if (!format->findBuffer(AStringPrintf("csd-%u", i).c_str(), &csd)) {
+        if (!format->findBuffer(base::StringPrintf("csd-%zu", i).c_str(), &csd)) {
             break;
         }
         if (csd->size() == 0) {
@@ -4821,7 +5490,7 @@
                 }
                 sDealer = new MemoryDealer(
                         newDealerCapacity,
-                        AStringPrintf("CSD(%dMB)", newDealerCapacity / 1048576).c_str());
+                        base::StringPrintf("CSD(%zuMB)", newDealerCapacity / 1048576).c_str());
                 mem = sDealer->allocate(csd->size());
             }
             memcpy(mem->unsecurePointer(), csd->data(), csd->size());
@@ -4832,9 +5501,14 @@
                 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
             C2WriteView view{block->map().get()};
             if (view.error() != C2_OK) {
+                mErrorLog.log(LOG_TAG, "Fatal error: failed to allocate and map a block");
                 return -EINVAL;
             }
             if (csd->size() > view.capacity()) {
+                mErrorLog.log(LOG_TAG, base::StringPrintf(
+                        "Fatal error: allocated block is too small "
+                        "(csd size %zu; block cap %u)",
+                        csd->size(), view.capacity()));
                 return -EINVAL;
             }
             memcpy(view.base(), csd->data(), csd->size());
@@ -4845,10 +5519,16 @@
         const sp<MediaCodecBuffer> &codecInputData = info.mData;
 
         if (csd->size() > codecInputData->capacity()) {
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "CSD is too large to fit in input buffer "
+                    "(csd size %zu; buffer cap %zu)",
+                    csd->size(), codecInputData->capacity()));
             return -EINVAL;
         }
         if (codecInputData->data() == NULL) {
             ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
             return -EINVAL;
         }
 
@@ -4901,6 +5581,7 @@
 
         mActivityNotify.clear();
         mCallback.clear();
+        mErrorLog.clear();
     }
 
     if (newState == UNINITIALIZED) {
@@ -4997,7 +5678,7 @@
         CHECK(msg->findSize("offset", &offset));
     }
     const CryptoPlugin::SubSample *subSamples;
-    size_t numSubSamples;
+    size_t numSubSamples = 0;
     const uint8_t *key = NULL;
     const uint8_t *iv = NULL;
     CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
@@ -5021,9 +5702,9 @@
         if (!hasCryptoOrDescrambler()) {
             ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
                     mComponentName.c_str());
+            mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
             return -EINVAL;
         }
-
         CHECK(msg->findPointer("subSamples", (void **)&subSamples));
         CHECK(msg->findSize("numSubSamples", &numSubSamples));
         CHECK(msg->findPointer("key", (void **)&key));
@@ -5044,29 +5725,123 @@
     }
 
     if (index >= mPortBuffers[kPortIndexInput].size()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "index out of range (index=%zu)", mPortBuffers[kPortIndexInput].size()));
         return -ERANGE;
     }
 
     BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
     sp<MediaCodecBuffer> buffer = info->mData;
+    if (buffer == nullptr) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Fatal error: failed to fetch buffer for index %zu", index));
+        return -EACCES;
+    }
+    if (!info->mOwnedByClient) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "client does not own the buffer #%zu", index));
+        return -EACCES;
+    }
+    auto setInputBufferParams = [this, &buffer]
+        (int64_t timeUs, uint32_t flags = 0) -> status_t {
+        status_t err = OK;
+        buffer->meta()->setInt64("timeUs", timeUs);
+        if (flags & BUFFER_FLAG_EOS) {
+            buffer->meta()->setInt32("eos", true);
+        }
 
+        if (flags & BUFFER_FLAG_CODECCONFIG) {
+            buffer->meta()->setInt32("csd", true);
+        }
+        bool isBufferDecodeOnly = ((flags & BUFFER_FLAG_DECODE_ONLY) != 0);
+        if (isBufferDecodeOnly) {
+            buffer->meta()->setInt32("decode-only", true);
+        }
+        if (mTunneled && !isBufferDecodeOnly) {
+            TunnelPeekState previousState = mTunnelPeekState;
+            switch(mTunnelPeekState){
+                case TunnelPeekState::kEnabledNoBuffer:
+                    buffer->meta()->setInt32("tunnel-first-frame", 1);
+                    mTunnelPeekState = TunnelPeekState::kEnabledQueued;
+                    ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(mTunnelPeekState));
+                break;
+                case TunnelPeekState::kDisabledNoBuffer:
+                    buffer->meta()->setInt32("tunnel-first-frame", 1);
+                    mTunnelPeekState = TunnelPeekState::kDisabledQueued;
+                    ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(mTunnelPeekState));
+                break;
+            default:
+                break;
+           }
+        }
+     return err;
+    };
+    auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
+        size_t key_len = (key != nullptr)? 16 : 0;
+        size_t iv_len = (iv != nullptr)? 16 : 0;
+        sp<ABuffer> shared_key;
+        sp<ABuffer> shared_iv;
+        if (key_len > 0) {
+            shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
+        }
+        if (iv_len > 0) {
+            shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
+        }
+        sp<ABuffer> subSamples_buffer =
+            new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
+        CryptoPlugin::SubSample * samples =
+           (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
+        for (int s = 0 ; s < numSubSamples ; s++) {
+            samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
+            samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
+        }
+        // set decrypt Action
+        cryptoInfo->setInt32("action", action);
+        cryptoInfo->setObject("buffer", buffer);
+        cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
+        cryptoInfo->setBuffer("key", shared_key);
+        cryptoInfo->setBuffer("iv", shared_iv);
+        cryptoInfo->setInt32("mode", (int)mode);
+        cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
+        cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
+        cryptoInfo->setBuffer("subSamples", subSamples_buffer);
+        cryptoInfo->setSize("numSubSamples", numSubSamples);
+    };
     if (c2Buffer || memory) {
         sp<AMessage> tunings = NULL;
         if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
             onSetParameters(tunings);
         }
-
         status_t err = OK;
         if (c2Buffer) {
             err = mBufferChannel->attachBuffer(c2Buffer, buffer);
         } else if (memory) {
+            AString errorDetailMsg;
             err = mBufferChannel->attachEncryptedBuffer(
                     memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
-                    offset, subSamples, numSubSamples, buffer);
+                    offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+            if (err != OK && hasCryptoOrDescrambler()
+                    && (mFlags & kFlagUseCryptoAsync)) {
+                // create error detail
+                AString errorDetailMsg;
+                sp<AMessage> cryptoErrorInfo = new AMessage();
+                buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
+                cryptoErrorInfo->setInt32("err", err);
+                cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
+                cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
+                onCryptoError(cryptoErrorInfo);
+                // we want cryptoError to be in the callback
+                // but Codec IllegalStateException to be triggered.
+                err = INVALID_OPERATION;
+            }
         } else {
+            mErrorLog.log(LOG_TAG, "Fatal error: invalid queue request without a buffer");
             err = UNKNOWN_ERROR;
         }
-
         if (err == OK && !buffer->asC2Buffer()
                 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
             C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
@@ -5082,57 +5857,32 @@
                 flags &= ~BUFFER_FLAG_EOS;
             }
         }
-
         offset = buffer->offset();
         size = buffer->size();
         if (err != OK) {
-            ALOGI("block model buffer attach failed: err = %s (%d)",
+            ALOGE("block model buffer attach failed: err = %s (%d)",
                   StrMediaError(err).c_str(), err);
             return err;
         }
     }
 
-    if (buffer == nullptr || !info->mOwnedByClient) {
-        return -EACCES;
-    }
-
     if (offset + size > buffer->capacity()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "buffer offset and size goes beyond the capacity: "
+                "offset=%zu, size=%zu, cap=%zu",
+                offset, size, buffer->capacity()));
+        return -EINVAL;
+    }
+    buffer->setRange(offset, size);
+    status_t err = OK;
+    err = setInputBufferParams(timeUs, flags);
+    if (err != OK) {
         return -EINVAL;
     }
 
-    buffer->setRange(offset, size);
-    buffer->meta()->setInt64("timeUs", timeUs);
-    if (flags & BUFFER_FLAG_EOS) {
-        buffer->meta()->setInt32("eos", true);
-    }
+    int32_t usedMaxInputSize = mApiUsageMetrics.inputBufferSize.usedMax;
+    mApiUsageMetrics.inputBufferSize.usedMax = size > usedMaxInputSize ? size : usedMaxInputSize;
 
-    if (flags & BUFFER_FLAG_CODECCONFIG) {
-        buffer->meta()->setInt32("csd", true);
-    }
-
-    if (mTunneled) {
-        TunnelPeekState previousState = mTunnelPeekState;
-        switch(mTunnelPeekState){
-            case TunnelPeekState::kEnabledNoBuffer:
-                buffer->meta()->setInt32("tunnel-first-frame", 1);
-                mTunnelPeekState = TunnelPeekState::kEnabledQueued;
-                ALOGV("TunnelPeekState: %s -> %s",
-                        asString(previousState),
-                        asString(mTunnelPeekState));
-                break;
-            case TunnelPeekState::kDisabledNoBuffer:
-                buffer->meta()->setInt32("tunnel-first-frame", 1);
-                mTunnelPeekState = TunnelPeekState::kDisabledQueued;
-                ALOGV("TunnelPeekState: %s -> %s",
-                        asString(previousState),
-                        asString(mTunnelPeekState));
-                break;
-            default:
-                break;
-        }
-    }
-
-    status_t err = OK;
     if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
         AString *errorDetailMsg;
         CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
@@ -5148,7 +5898,13 @@
                 }
             }
         }
-        err = mBufferChannel->queueSecureInputBuffer(
+        if (mCryptoAsync) {
+            // prepare a message and enqueue
+            sp<AMessage> cryptoInfo = new AMessage();
+            buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
+            mCryptoAsync->decrypt(cryptoInfo);
+        } else {
+            err = mBufferChannel->queueSecureInputBuffer(
                 buffer,
                 (mFlags & kFlagIsSecure),
                 key,
@@ -5158,6 +5914,7 @@
                 subSamples,
                 numSubSamples,
                 errorDetailMsg);
+        }
         if (err != OK) {
             mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
             ALOGW("Log queueSecureInputBuffer error: %d", err);
@@ -5171,6 +5928,10 @@
     }
 
     if (err == OK) {
+        if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
+            mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
+        }
+
         // synchronization boundary for getBufferAndFormat
         Mutex::Autolock al(mBufferLock);
         info->mOwnedByClient = false;
@@ -5196,14 +5957,13 @@
 size_t MediaCodec::CreateFramesRenderedMessage(
         const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
     size_t index = 0;
-
     for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
             it != done.cend(); ++it) {
         if (it->getRenderTimeNs() < 0) {
             continue; // dropped frame from tracking
         }
-        msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
-        msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
+        msg->setInt64(base::StringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
+        msg->setInt64(base::StringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
         ++index;
     }
     return index;
@@ -5219,16 +5979,28 @@
     }
 
     if (!isExecuting()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "releaseOutputBuffer() is valid at Executing states; currently %s",
+                apiStateString().c_str()));
         return -EINVAL;
     }
 
     if (index >= mPortBuffers[kPortIndexOutput].size()) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "index out of range (index=%zu)", mPortBuffers[kPortIndexOutput].size()));
         return -ERANGE;
     }
 
     BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
 
-    if (info->mData == nullptr || !info->mOwnedByClient) {
+    if (!info->mOwnedByClient) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "client does not own the buffer #%zu", index));
+        return -EACCES;
+    }
+    if (info->mData == nullptr) {
+        mErrorLog.log(LOG_TAG, base::StringPrintf(
+                "Fatal error: null buffer for index %zu", index));
         return -EACCES;
     }
 
@@ -5242,14 +6014,16 @@
     }
 
     if (render && buffer->size() != 0) {
-        int64_t mediaTimeUs = -1;
+        int64_t mediaTimeUs = INT64_MIN;
         buffer->meta()->findInt64("timeUs", &mediaTimeUs);
 
+        bool noRenderTime = false;
         int64_t renderTimeNs = 0;
         if (!msg->findInt64("timestampNs", &renderTimeNs)) {
             // use media timestamp if client did not request a specific render timestamp
             ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
             renderTimeNs = mediaTimeUs * 1000;
+            noRenderTime = true;
         }
 
         if (mSoftRenderer != NULL) {
@@ -5267,16 +6041,50 @@
                 }
             }
         }
+
+        // If rendering to the screen, then schedule a time in the future to poll to see if this
+        // frame was ever rendered to seed onFrameRendered callbacks.
+        if (mIsSurfaceToDisplay) {
+            if (mediaTimeUs != INT64_MIN) {
+                noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
+                             : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
+                                                                          renderTimeNs);
+            }
+            // can't initialize this in the constructor because the Looper parent class needs to be
+            // initialized first
+            if (mMsgPollForRenderedBuffers == nullptr) {
+                mMsgPollForRenderedBuffers = new AMessage(kWhatPollForRenderedBuffers, this);
+            }
+            // Schedule the poll to occur 100ms after the render time - should be safe for
+            // determining if the frame was ever rendered. If no render time was specified, the
+            // presentation timestamp is used instead, which almost certainly occurs in the past,
+            // since it's almost always a zero-based offset from the start of the stream. In these
+            // scenarios, we expect the frame to be rendered with no delay.
+            int64_t delayUs = noRenderTime ? 0 : renderTimeNs / 1000 - ALooper::GetNowUs();
+            delayUs += 100 * 1000; /* 100ms in microseconds */
+            status_t err =
+                    mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
+                                                           delayUs);
+            if (err != OK) {
+                ALOGE("unexpected failure to post pollForRenderedBuffers: %d", err);
+            }
+        }
         status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
 
         if (err == NO_INIT) {
-            ALOGE("rendering to non-initilized(obsolete) surface");
+            mErrorLog.log(LOG_TAG, "rendering to non-initialized(obsolete) surface");
             return err;
         }
         if (err != OK) {
             ALOGI("rendring output error %d", err);
         }
     } else {
+        if (mIsSurfaceToDisplay && buffer->size() != 0) {
+            int64_t mediaTimeUs = INT64_MIN;
+            if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
+                mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
+            }
+        }
         mBufferChannel->discardBuffer(buffer);
     }
 
@@ -5286,7 +6094,7 @@
 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
 
-    List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
+    std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
 
     if (availBuffers->empty()) {
         return nullptr;
@@ -5303,7 +6111,7 @@
         return -EAGAIN;
     }
 
-    List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
+    std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
     size_t index = *availBuffers->begin();
     CHECK_EQ(info, &mPortBuffers[portIndex][index]);
     availBuffers->erase(availBuffers->begin());
@@ -5343,7 +6151,7 @@
 
         // in case we don't connect, ensure that we don't signal the surface is
         // connected to the screen
-        mIsSurfaceToScreen = false;
+        mIsSurfaceToDisplay = false;
 
         err = nativeWindowConnect(surface.get(), "connectToSurface");
         if (err == OK) {
@@ -5373,7 +6181,7 @@
             // keep track whether or not the buffers of the connected surface go to the screen
             int result = 0;
             surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
-            mIsSurfaceToScreen = result != 0;
+            mIsSurfaceToDisplay = result != 0;
         }
     }
     // do not return ALREADY_EXISTS unless surfaces are the same
@@ -5391,7 +6199,7 @@
         }
         // assume disconnected even on error
         mSurface.clear();
-        mIsSurfaceToScreen = false;
+        mIsSurfaceToDisplay = false;
     }
     return err;
 }
@@ -5423,6 +6231,9 @@
 void MediaCodec::onOutputBufferAvailable() {
     int32_t index;
     while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
+        if (discardDecodeOnlyOutputBuffer(index)) {
+            continue;
+        }
         const sp<MediaCodecBuffer> &buffer =
             mPortBuffers[kPortIndexOutput][index].mData;
         sp<AMessage> msg = mCallback->dup();
@@ -5446,7 +6257,14 @@
         msg->post();
     }
 }
-
+void MediaCodec::onCryptoError(const sp<AMessage> & msg) {
+    if (mCallback != NULL) {
+        sp<AMessage> cb_msg = mCallback->dup();
+        cb_msg->setInt32("callbackID", CB_CRYPTO_ERROR);
+        cb_msg->extend(msg);
+        cb_msg->post();
+    }
+}
 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
     if (mCallback != NULL) {
         sp<AMessage> msg = mCallback->dup();
@@ -5508,6 +6326,9 @@
 }
 
 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
+    if (mState == UNINITIALIZED || mState == INITIALIZING) {
+        return NO_INIT;
+    }
     updateLowLatency(params);
     mapFormat(mComponentName, params, nullptr, false);
     updateTunnelPeek(params);
@@ -5540,12 +6361,14 @@
             memcpy(csd->data() + 4, nalStart, nalSize);
 
             mOutputFormat->setBuffer(
-                    AStringPrintf("csd-%u", csdIndex).c_str(), csd);
+                    base::StringPrintf("csd-%u", csdIndex).c_str(), csd);
 
             ++csdIndex;
         }
 
         if (csdIndex != 2) {
+            mErrorLog.log(LOG_TAG, base::StringPrintf(
+                    "codec config data contains %u NAL units; expected 2.", csdIndex));
             return ERROR_MALFORMED;
         }
     } else {
@@ -5587,6 +6410,32 @@
     mDeferredMessages.clear();
 }
 
+std::string MediaCodec::apiStateString() {
+    const char *rval = NULL;
+    char rawbuffer[16]; // room for "%d"
+
+    switch (mState) {
+        case UNINITIALIZED:
+            rval = (mFlags & kFlagStickyError) ? "at Error state" : "at Released state";
+            break;
+        case INITIALIZING: rval = "while constructing"; break;
+        case INITIALIZED: rval = "at Uninitialized state"; break;
+        case CONFIGURING: rval = "during configure()"; break;
+        case CONFIGURED: rval = "at Configured state"; break;
+        case STARTING: rval = "during start()"; break;
+        case STARTED: rval = "at Running state"; break;
+        case FLUSHING: rval = "during flush()"; break;
+        case FLUSHED: rval = "at Flushed state"; break;
+        case STOPPING: rval = "during stop()"; break;
+        case RELEASING: rval = "during release()"; break;
+        default:
+            snprintf(rawbuffer, sizeof(rawbuffer), "at %d", mState);
+            rval = rawbuffer;
+            break;
+    }
+    return rval;
+}
+
 std::string MediaCodec::stateString(State state) {
     const char *rval = NULL;
     char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index a3040f4..4ad3276 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -31,6 +31,7 @@
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
 #include <media/stagefright/CCodec.h>
 #include <media/stagefright/Codec2InfoBuilder.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
 #include <media/stagefright/MediaCodecListOverrides.h>
 #include <media/stagefright/MediaErrors.h>
@@ -354,7 +355,7 @@
 
 //static
 void MediaCodecList::findMatchingCodecs(
-        const char *mime, bool encoder, uint32_t flags, sp<AMessage> format,
+        const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
         Vector<AString> *matches) {
     matches->clear();
 
@@ -398,11 +399,23 @@
             property_get_bool("debug.stagefright.swcodec", false)) {
         matches->sort(compareSoftwareCodecsFirst);
     }
+
+    // if we did NOT find anything maybe it's because of a profile mismatch.
+    // let's recurse after trimming the profile from the format to see if that yields
+    // a suitable codec.
+    //
+    int profile = -1;
+    if (matches->empty() && format != nullptr && format->findInt32(KEY_PROFILE, &profile)) {
+        ALOGV("no matching codec found, retrying without profile");
+        sp<AMessage> formatNoProfile = format->dup();
+        formatNoProfile->removeEntryByName(KEY_PROFILE);
+        findMatchingCodecs(mime, encoder, flags, formatNoProfile, matches);
+    }
 }
 
-/*static*/
-bool MediaCodecList::codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info,
-                                        sp<AMessage> format) {
+// static
+bool MediaCodecList::codecHandlesFormat(
+        const char *mime, const sp<MediaCodecInfo> &info, const sp<AMessage> &format) {
 
     if (format == nullptr) {
         ALOGD("codecHandlesFormat: no format, so no extra checks");
@@ -510,9 +523,7 @@
         }
 
         int32_t profile = -1;
-        if (format->findInt32("profile", &profile)) {
-            int32_t level = -1;
-            format->findInt32("level", &level);
+        if (format->findInt32(KEY_PROFILE, &profile)) {
             Vector<MediaCodecInfo::ProfileLevel> profileLevels;
             capabilities->getSupportedProfileLevels(&profileLevels);
             auto it = profileLevels.begin();
@@ -520,14 +531,11 @@
                 if (profile != it->mProfile) {
                     continue;
                 }
-                if (level > -1 && level > it->mLevel) {
-                    continue;
-                }
                 break;
             }
 
             if (it == profileLevels.end()) {
-                ALOGV("Codec does not support profile %d with level %d", profile, level);
+                ALOGV("Codec does not support profile %d", profile);
                 return false;
             }
         }
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index a946f71..9768f97 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -46,6 +46,30 @@
            format == MediaMuxer::OUTPUT_FORMAT_HEIF;
 }
 
+MediaMuxer* MediaMuxer::create(int fd, OutputFormat format) {
+    bool isInputValid = true;
+    if (isMp4Format(format)) {
+        isInputValid = MPEG4Writer::isFdOpenModeValid(fd);
+    } else if (format == OUTPUT_FORMAT_WEBM) {
+        isInputValid = WebmWriter::isFdOpenModeValid(fd);
+    } else if (format == OUTPUT_FORMAT_OGG) {
+        isInputValid = OggWriter::isFdOpenModeValid(fd);
+    } else {
+        ALOGE("MediaMuxer does not support output format %d", format);
+        return nullptr;
+    }
+    if (!isInputValid) {
+        ALOGE("File descriptor is not suitable for format %d", format);
+        return nullptr;
+    }
+
+    MediaMuxer *muxer = new (std::nothrow) MediaMuxer(fd, (MediaMuxer::OutputFormat)format);
+    if (muxer == nullptr) {
+        ALOGE("Failed to create writer object");
+    }
+    return muxer;
+}
+
 MediaMuxer::MediaMuxer(int fd, OutputFormat format)
     : mFormat(format),
       mState(UNINITIALIZED) {
@@ -138,7 +162,7 @@
         return INVALID_OPERATION;
     }
     if (!isMp4Format(mFormat)) {
-        ALOGE("setLocation() is only supported for .mp4, .3gp or .heic output.");
+        ALOGE("setLocation() is only supported for .mp4, .3gp, .heic or .avif output.");
         return INVALID_OPERATION;
     }
 
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 6893324..28ca9ff 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -20,7 +20,7 @@
 
 #include <media/stagefright/NuMediaExtractor.h>
 
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
 
 #include <datasource/DataSourceFactory.h>
 #include <datasource/FileSource.h>
@@ -72,6 +72,37 @@
     }
 }
 
+status_t NuMediaExtractor::initMediaExtractor(const sp<DataSource>& dataSource) {
+    status_t err = OK;
+
+    mImpl = MediaExtractorFactory::Create(dataSource);
+    if (mImpl == NULL) {
+        ALOGE("%s: failed to create MediaExtractor", __FUNCTION__);
+        return ERROR_UNSUPPORTED;
+    }
+
+    setEntryPointToRemoteMediaExtractor();
+
+    if (!mCasToken.empty()) {
+        err = mImpl->setMediaCas(mCasToken);
+        if (err != OK) {
+            ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
+            return err;
+        }
+    }
+
+    // Get the name of the implementation.
+    mName = mImpl->name();
+
+    // Update the duration and bitrate
+    err = updateDurationAndBitrate();
+    if (err == OK) {
+        mDataSource = dataSource;
+    }
+
+    return OK;
+}
+
 status_t NuMediaExtractor::setDataSource(
         const sp<MediaHTTPService> &httpService,
         const char *path,
@@ -89,28 +120,8 @@
         return -ENOENT;
     }
 
-    mImpl = MediaExtractorFactory::Create(dataSource);
-
-    if (mImpl == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-    setEntryPointToRemoteMediaExtractor();
-
-    status_t err = OK;
-    if (!mCasToken.empty()) {
-        err = mImpl->setMediaCas(mCasToken);
-        if (err != OK) {
-            ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
-            return err;
-        }
-    }
-
-    err = updateDurationAndBitrate();
-    if (err == OK) {
-        mDataSource = dataSource;
-    }
-
-    return OK;
+    // Initialize MediaExtractor using the data source
+    return initMediaExtractor(dataSource);
 }
 
 status_t NuMediaExtractor::setDataSource(int fd, off64_t offset, off64_t size) {
@@ -131,27 +142,8 @@
         return err;
     }
 
-    mImpl = MediaExtractorFactory::Create(fileSource);
-
-    if (mImpl == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-    setEntryPointToRemoteMediaExtractor();
-
-    if (!mCasToken.empty()) {
-        err = mImpl->setMediaCas(mCasToken);
-        if (err != OK) {
-            ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
-            return err;
-        }
-    }
-
-    err = updateDurationAndBitrate();
-    if (err == OK) {
-        mDataSource = fileSource;
-    }
-
-    return OK;
+    // Initialize MediaExtractor using the file source
+    return initMediaExtractor(fileSource);
 }
 
 status_t NuMediaExtractor::setDataSource(const sp<DataSource> &source) {
@@ -166,32 +158,13 @@
         return err;
     }
 
-    mImpl = MediaExtractorFactory::Create(source);
-
-    if (mImpl == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-    setEntryPointToRemoteMediaExtractor();
-
-    if (!mCasToken.empty()) {
-        err = mImpl->setMediaCas(mCasToken);
-        if (err != OK) {
-            ALOGE("%s: failed to setMediaCas (%d)", __FUNCTION__, err);
-            return err;
-        }
-    }
-
-    err = updateDurationAndBitrate();
-    if (err == OK) {
-        mDataSource = source;
-    }
-
-    return err;
+    // Initialize MediaExtractor using the given data source
+    return initMediaExtractor(source);
 }
 
 const char* NuMediaExtractor::getName() const {
     Mutex::Autolock autoLock(mLock);
-    return mImpl == nullptr ? nullptr : mImpl->name().string();
+    return mImpl == nullptr ? nullptr : mName.string();
 }
 
 static String8 arrayToString(const std::vector<uint8_t> &array) {
@@ -325,6 +298,9 @@
     size_t psshsize;
     if (meta->findData(kKeyPssh, &type, &pssh, &psshsize)) {
         sp<ABuffer> buf = new ABuffer(psshsize);
+        if (buf->data() == nullptr) {
+            return -ENOMEM;
+        }
         memcpy(buf->data(), pssh, psshsize);
         (*format)->setBuffer("pssh", buf);
     }
@@ -335,6 +311,9 @@
     if (meta->findData(kKeySlowMotionMarkers, &type, &slomoMarkers, &slomoMarkersSize)
             && slomoMarkersSize > 0) {
         sp<ABuffer> buf = new ABuffer(slomoMarkersSize);
+        if (buf->data() == nullptr) {
+            return -ENOMEM;
+        }
         memcpy(buf->data(), slomoMarkers, slomoMarkersSize);
         (*format)->setBuffer("slow-motion-markers", buf);
     }
@@ -666,9 +645,12 @@
         numPageSamples = -1;
     }
 
+    // caller has verified there is sufficient space
+    // insert, including accounting for the space used.
     memcpy((uint8_t *)buffer->data() + mbuf->range_length(),
            &numPageSamples,
            sizeof(numPageSamples));
+    buffer->setRange(buffer->offset(), buffer->size() + sizeof(numPageSamples));
 
     uint32_t type;
     const void *data;
@@ -717,6 +699,8 @@
 
     ssize_t minIndex = fetchAllTrackSamples();
 
+    buffer->setRange(0, 0);     // start with an empty buffer
+
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
     }
@@ -732,25 +716,25 @@
         sampleSize += sizeof(int32_t);
     }
 
+    // capacity() is ok since we cleared out the buffer
     if (buffer->capacity() < sampleSize) {
         return -ENOMEM;
     }
 
+    const size_t srclen = it->mBuffer->range_length();
     const uint8_t *src =
         (const uint8_t *)it->mBuffer->data()
             + it->mBuffer->range_offset();
 
-    memcpy((uint8_t *)buffer->data(), src, it->mBuffer->range_length());
+    memcpy((uint8_t *)buffer->data(), src, srclen);
+    buffer->setRange(0, srclen);
 
     status_t err = OK;
     if (info->mTrackFlags & kIsVorbis) {
+        // adjusts range when it inserts the extra bits
         err = appendVorbisNumPageSamples(it->mBuffer, buffer);
     }
 
-    if (err == OK) {
-        buffer->setRange(0, sampleSize);
-    }
-
     return err;
 }
 
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index e67496e..f02e168 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -7,3 +7,5 @@
 
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
+
+per-file Camera*.cpp = file:/camera/OWNERS
diff --git a/media/libstagefright/OggWriter.cpp b/media/libstagefright/OggWriter.cpp
index 0f5e95e..cff37a3 100644
--- a/media/libstagefright/OggWriter.cpp
+++ b/media/libstagefright/OggWriter.cpp
@@ -96,6 +96,7 @@
         return ERROR_UNSUPPORTED;
     }
 
+    // NOLINTNEXTLINE(clang-analyzer-unix.MallocSizeof)
     mOs = (OggStreamState*) malloc(sizeof(ogg_stream_state));
     if (ogg_stream_init((ogg_stream_state*)mOs, rand()) == -1) {
         ALOGE("ogg stream init failed");
@@ -242,13 +243,15 @@
 
     mDone = true;
 
-    void* dummy;
-    pthread_join(mThread, &dummy);
+    status_t status = mSource->stop();
 
-    status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
+    void *pthread_res;
+    pthread_join(mThread, &pthread_res);
+
+    status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(pthread_res));
     {
-        status_t status = mSource->stop();
-        if (err == OK && (status != OK && status != ERROR_END_OF_STREAM)) {
+        if (err == OK &&
+            (status != OK && status != ERROR_END_OF_STREAM)) {
             err = status;
         }
     }
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 1f569ef..291b892 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -193,10 +193,38 @@
 
 status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */) {
     status_t err = NO_ERROR;
-    ANativeWindowBuffer* anb = NULL;
+    ANativeWindowBuffer* anb = nullptr;
     int numBufs = 0;
     int minUndequeuedBufs = 0;
 
+    auto handleError = [](ANativeWindow *nativeWindow, ANativeWindowBuffer* anb, status_t err)
+    {
+        if (anb != nullptr) {
+            nativeWindow->cancelBuffer(nativeWindow, anb, -1);
+            anb = nullptr;
+        }
+
+        // Clean up after success or error.
+        status_t err2 = native_window_api_disconnect(nativeWindow, NATIVE_WINDOW_API_CPU);
+        if (err2 != NO_ERROR) {
+            ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)",
+                    strerror(-err2), -err2);
+            if (err == NO_ERROR) {
+                err = err2;
+            }
+        }
+
+        err2 = nativeWindowConnect(nativeWindow, "pushBlankBuffersToNativeWindow(err2)");
+        if (err2 != NO_ERROR) {
+            ALOGE("error pushing blank frames: api_connect failed: %s (%d)", strerror(-err), -err);
+            if (err == NO_ERROR) {
+                err = err2;
+            }
+        }
+
+        return err;
+    };
+
     // We need to reconnect to the ANativeWindow as a CPU client to ensure that
     // no frames get dropped by SurfaceFlinger assuming that these are video
     // frames.
@@ -217,24 +245,29 @@
             nativeWindow, 1, 1, HAL_PIXEL_FORMAT_RGBX_8888, 0, GRALLOC_USAGE_SW_WRITE_OFTEN,
             false /* reconnect */);
     if (err != NO_ERROR) {
-        goto error;
+        return handleError(nativeWindow, anb, err);
     }
 
     static_cast<Surface*>(nativeWindow)->getIGraphicBufferProducer()->allowAllocation(true);
 
+    // In nonblocking mode(timetout = 0), native_window_dequeue_buffer_and_wait()
+    // can fail with timeout. Changing to blocking mode will ensure that dequeue
+    // does not timeout.
+    static_cast<Surface*>(nativeWindow)->getIGraphicBufferProducer()->setDequeueTimeout(-1);
+
     err = nativeWindow->query(nativeWindow,
             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
     if (err != NO_ERROR) {
         ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query "
                 "failed: %s (%d)", strerror(-err), -err);
-        goto error;
+        return handleError(nativeWindow, anb, err);
     }
 
     numBufs = minUndequeuedBufs + 1;
     err = native_window_set_buffer_count(nativeWindow, numBufs);
     if (err != NO_ERROR) {
         ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", strerror(-err), -err);
-        goto error;
+        return handleError(nativeWindow, anb, err);
     }
 
     // We push numBufs + 1 buffers to ensure that we've drawn into the same
@@ -252,7 +285,7 @@
         sp<GraphicBuffer> buf(GraphicBuffer::from(anb));
 
         // Fill the buffer with the a 1x1 checkerboard pattern ;)
-        uint32_t *img = NULL;
+        uint32_t *img = nullptr;
         err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
         if (err != NO_ERROR) {
             ALOGE("error pushing blank frames: lock failed: %s (%d)", strerror(-err), -err);
@@ -273,34 +306,10 @@
             break;
         }
 
-        anb = NULL;
+        anb = nullptr;
     }
 
-error:
-
-    if (anb != NULL) {
-        nativeWindow->cancelBuffer(nativeWindow, anb, -1);
-        anb = NULL;
-    }
-
-    // Clean up after success or error.
-    status_t err2 = native_window_api_disconnect(nativeWindow, NATIVE_WINDOW_API_CPU);
-    if (err2 != NO_ERROR) {
-        ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", strerror(-err2), -err2);
-        if (err == NO_ERROR) {
-            err = err2;
-        }
-    }
-
-    err2 = nativeWindowConnect(nativeWindow, "pushBlankBuffersToNativeWindow(err2)");
-    if (err2 != NO_ERROR) {
-        ALOGE("error pushing blank frames: api_connect failed: %s (%d)", strerror(-err), -err);
-        if (err == NO_ERROR) {
-            err = err2;
-        }
-    }
-
-    return err;
+    return handleError(nativeWindow, anb, err);
 }
 
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason) {
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index a443ed9..863177d 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -24,7 +24,7 @@
 #include <utility>
 #include <vector>
 
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
 #include "include/HevcUtils.h"
 
 #include <cutils/properties.h>
@@ -794,6 +794,12 @@
         { "thumbnail-height", kKeyThumbnailHeight },
         { "track-id", kKeyTrackID },
         { "valid-samples", kKeyValidSamples },
+        { "dvb-component-tag", kKeyDvbComponentTag},
+        { "dvb-audio-description", kKeyDvbAudioDescription},
+        { "dvb-teletext-magazine-number", kKeyDvbTeletextMagazineNumber},
+        { "dvb-teletext-page-number", kKeyDvbTeletextPageNumber},
+        { "profile", kKeyAudioProfile },
+        { "level", kKeyAudioLevel },
     }
 };
 
@@ -1002,6 +1008,26 @@
         msg->setInt32("is-sync-frame", 1);
     }
 
+    int32_t dvbComponentTag = 0;
+    if (meta->findInt32(kKeyDvbComponentTag, &dvbComponentTag)) {
+        msg->setInt32("dvb-component-tag", dvbComponentTag);
+    }
+
+    int32_t dvbAudioDescription = 0;
+    if (meta->findInt32(kKeyDvbAudioDescription, &dvbAudioDescription)) {
+        msg->setInt32("dvb-audio-description", dvbAudioDescription);
+    }
+
+    int32_t dvbTeletextMagazineNumber = 0;
+    if (meta->findInt32(kKeyDvbTeletextMagazineNumber, &dvbTeletextMagazineNumber)) {
+        msg->setInt32("dvb-teletext-magazine-number", dvbTeletextMagazineNumber);
+    }
+
+    int32_t dvbTeletextPageNumber = 0;
+    if (meta->findInt32(kKeyDvbTeletextPageNumber, &dvbTeletextPageNumber)) {
+        msg->setInt32("dvb-teletext-page-number", dvbTeletextPageNumber);
+    }
+
     const char *lang;
     if (meta->findCString(kKeyMediaLanguage, &lang)) {
         msg->setString("language", lang);
@@ -1788,6 +1814,26 @@
         meta->setInt32(kKeyMaxBitRate, maxBitrate);
     }
 
+    int32_t dvbComponentTag = 0;
+    if (msg->findInt32("dvb-component-tag", &dvbComponentTag) && dvbComponentTag > 0) {
+        meta->setInt32(kKeyDvbComponentTag, dvbComponentTag);
+    }
+
+    int32_t dvbAudioDescription = 0;
+    if (msg->findInt32("dvb-audio-description", &dvbAudioDescription)) {
+        meta->setInt32(kKeyDvbAudioDescription, dvbAudioDescription);
+    }
+
+    int32_t dvbTeletextMagazineNumber = 0;
+    if (msg->findInt32("dvb-teletext-magazine-number", &dvbTeletextMagazineNumber)) {
+        meta->setInt32(kKeyDvbTeletextMagazineNumber, dvbTeletextMagazineNumber);
+    }
+
+    int32_t dvbTeletextPageNumber = 0;
+    if (msg->findInt32("dvb-teletext-page-number", &dvbTeletextPageNumber)) {
+        meta->setInt32(kKeyDvbTeletextPageNumber, dvbTeletextPageNumber);
+    }
+
     AString lang;
     if (msg->findString("language", &lang)) {
         meta->setCString(kKeyMediaLanguage, lang.c_str());
@@ -1911,10 +1957,10 @@
         if (msg->findString("ts-schema", &tsSchema)) {
             unsigned int numLayers = 0;
             unsigned int numBLayers = 0;
-            char dummy;
+            char placeholder;
             int tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
-                    &numLayers, &dummy, &numBLayers, &dummy);
-            if ((tags == 1 || (tags == 3 && dummy == '+'))
+                    &numLayers, &placeholder, &numBLayers, &placeholder);
+            if ((tags == 1 || (tags == 3 && placeholder == '+'))
                     && numLayers > 0 && numLayers < UINT32_MAX - numBLayers
                     && numLayers + numBLayers <= INT32_MAX) {
                 meta->setInt32(kKeyTemporalLayerCount, numLayers + numBLayers);
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
new file mode 100644
index 0000000..4f12a37
--- /dev/null
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -0,0 +1,739 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "VideoRenderQualityTracker"
+#include <utils/Log.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+#include <assert.h>
+#include <charconv>
+#include <cmath>
+#include <stdio.h>
+#include <sys/time.h>
+
+#include <android-base/parsebool.h>
+#include <android-base/parseint.h>
+
+namespace android {
+
+using android::base::ParseBoolResult;
+
+static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
+static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
+        VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
+
+typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
+        GetServerConfigurableFlagFn;
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+                                      char const *flagNameSuffix, bool *value) {
+    std::string flagName("render_metrics_");
+    flagName.append(flagNameSuffix);
+    std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName,
+                                                          *value ? "true" : "false");
+    switch (android::base::ParseBool(valueStr)) {
+    case ParseBoolResult::kTrue: *value = true; break;
+    case ParseBoolResult::kFalse: *value = false; break;
+    case ParseBoolResult::kError:
+        ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+              valueStr.c_str());
+        break;
+    }
+}
+
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+                                      char const *flagNameSuffix, int32_t *value) {
+    char defaultStr[11];
+    sprintf(defaultStr, "%d", int(*value));
+    std::string flagName("render_metrics_");
+    flagName.append(flagNameSuffix);
+    std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, defaultStr);
+    if (!android::base::ParseInt(valueStr.c_str(), value) || valueStr.size() == 0) {
+        ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+              valueStr.c_str());
+        return;
+    }
+}
+
+template<typename T>
+static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
+                                      char const *flagNameSuffix, std::vector<T> *value) {
+    std::stringstream sstr;
+    for (int i = 0; i < value->size(); ++i) {
+        if (i != 0) {
+            sstr << ",";
+        }
+        sstr << (*value)[i];
+    }
+    std::string flagName("render_metrics_");
+    flagName.append(flagNameSuffix);
+    std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, sstr.str());
+    if (valueStr.size() == 0) {
+        return;
+    }
+    // note: using android::base::Tokenize fails to catch parsing failures for values ending in ','
+    std::vector<T> newValues;
+    const char *p = valueStr.c_str();
+    const char *last = p + valueStr.size();
+    while (p != last) {
+        if (*p == ',') {
+            p++;
+        }
+        T value = -1;
+        auto [ptr, error] = std::from_chars(p, last, value);
+        if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+            ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
+                  valueStr.c_str());
+            return;
+        }
+        p = ptr;
+        newValues.push_back(value);
+    }
+    *value = std::move(newValues);
+}
+
+VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
+    clear();
+}
+
+void VideoRenderQualityMetrics::clear() {
+    firstRenderTimeUs = 0;
+    frameReleasedCount = 0;
+    frameRenderedCount = 0;
+    frameDroppedCount = 0;
+    frameSkippedCount = 0;
+    contentFrameRate = FRAME_RATE_UNDETERMINED;
+    desiredFrameRate = FRAME_RATE_UNDETERMINED;
+    actualFrameRate = FRAME_RATE_UNDETERMINED;
+    freezeEventCount = 0;
+    freezeDurationMsHistogram.clear();
+    freezeDistanceMsHistogram.clear();
+    judderEventCount = 0;
+    judderScoreHistogram.clear();
+}
+
+VideoRenderQualityTracker::Configuration
+        VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
+            GetServerConfigurableFlagFn getServerConfigurableFlagFn) {
+    VideoRenderQualityTracker::Configuration c;
+#define getFlag(FIELDNAME, FLAGNAME) \
+    getServerConfigurableFlag(getServerConfigurableFlagFn, FLAGNAME, &c.FIELDNAME)
+    getFlag(enabled, "enabled");
+    getFlag(areSkippedFramesDropped, "are_skipped_frames_dropped");
+    getFlag(maxExpectedContentFrameDurationUs, "max_expected_content_frame_duration_us");
+    getFlag(frameRateDetectionToleranceUs, "frame_rate_detection_tolerance_us");
+    getFlag(liveContentFrameDropToleranceUs, "live_content_frame_drop_tolerance_us");
+    getFlag(freezeDurationMsHistogramBuckets, "freeze_duration_ms_histogram_buckets");
+    getFlag(freezeDurationMsHistogramToScore, "freeze_duration_ms_histogram_to_score");
+    getFlag(freezeDistanceMsHistogramBuckets, "freeze_distance_ms_histogram_buckets");
+    getFlag(freezeEventMax, "freeze_event_max");
+    getFlag(freezeEventDetailsMax, "freeze_event_details_max");
+    getFlag(freezeEventDistanceToleranceMs, "freeze_event_distance_tolerance_ms");
+    getFlag(judderErrorToleranceUs, "judder_error_tolerance_us");
+    getFlag(judderScoreHistogramBuckets, "judder_score_histogram_buckets");
+    getFlag(judderScoreHistogramToScore, "judder_score_histogram_to_score");
+    getFlag(judderEventMax, "judder_event_max");
+    getFlag(judderEventDetailsMax, "judder_event_details_max");
+    getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
+#undef getFlag
+    return c;
+}
+
+VideoRenderQualityTracker::Configuration::Configuration() {
+    enabled = true;
+
+    // Assume that the app is skipping frames because it's detected that the frame couldn't be
+    // rendered in time.
+    areSkippedFramesDropped = true;
+
+    // 400ms is 8 frames at 20 frames per second and 24 frames at 60 frames per second
+    maxExpectedContentFrameDurationUs = 400 * 1000;
+
+    // Allow for 2 milliseconds of deviation when detecting frame rates
+    frameRateDetectionToleranceUs = 2 * 1000;
+
+    // Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
+    // because of frame drops for live content, or because the user is seeking.
+    liveContentFrameDropToleranceUs = 200 * 1000;
+
+    // Freeze configuration
+    freezeDurationMsHistogramBuckets = {1, 20, 40, 60, 80, 100, 120, 150, 175, 225, 300, 400, 500};
+    freezeDurationMsHistogramToScore = {1,  1,  1,  1,  1,   1,   1,   1,   1,   1,   1,   1,   1};
+    freezeDistanceMsHistogramBuckets = {0, 20, 100, 400, 1000, 2000, 3000, 4000, 8000, 15000, 30000,
+                                        60000};
+    freezeEventMax = 0; // enabled only when debugging
+    freezeEventDetailsMax = 20;
+    freezeEventDistanceToleranceMs = 60000; // lump freeze occurrences together when 60s or less
+
+    // Judder configuration
+    judderErrorToleranceUs = 2000;
+    judderScoreHistogramBuckets = {1, 4, 5, 9, 11, 20, 30, 40, 50, 60, 70, 80};
+    judderScoreHistogramToScore = {1, 1, 1, 1,  1,  1,  1,  1,  1,  1,  1,  1};
+    judderEventMax = 0; // enabled only when debugging
+    judderEventDetailsMax = 20;
+    judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker() : mConfiguration(Configuration()) {
+    configureHistograms(mMetrics, mConfiguration);
+    clear();
+}
+
+VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
+        mConfiguration(configuration) {
+    configureHistograms(mMetrics, mConfiguration);
+    clear();
+}
+
+void VideoRenderQualityTracker::onTunnelFrameQueued(int64_t contentTimeUs) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    // Since P-frames are queued out of order, hold onto the P-frame until we can track it in
+    // render order. This only works because it depends on today's encoding algorithms that only
+    // allow B-frames to refer to ONE P-frame that comes after it. If the cardinality of P-frames
+    // in a single mini-GOP is increased, this algorithm breaks down.
+    if (mTunnelFrameQueuedContentTimeUs == -1) {
+        mTunnelFrameQueuedContentTimeUs = contentTimeUs;
+    } else if (contentTimeUs < mTunnelFrameQueuedContentTimeUs) {
+        onFrameReleased(contentTimeUs, 0);
+    } else {
+        onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
+        mTunnelFrameQueuedContentTimeUs = contentTimeUs;
+    }
+}
+
+void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    // Frames skipped at the beginning shouldn't really be counted as skipped frames, since the
+    // app might be seeking to a starting point that isn't the first key frame.
+    if (mLastRenderTimeUs == -1) {
+        return;
+    }
+
+    resetIfDiscontinuity(contentTimeUs, -1);
+
+    // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
+    // app could be terminating the playback. The pending count will be added to the metrics if and
+    // when the next frame is rendered.
+    mPendingSkippedFrameContentTimeUsList.push_back(contentTimeUs);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs) {
+    onFrameReleased(contentTimeUs, nowUs() * 1000);
+}
+
+void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs,
+                                                int64_t desiredRenderTimeNs) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    int64_t desiredRenderTimeUs = desiredRenderTimeNs / 1000;
+    resetIfDiscontinuity(contentTimeUs, desiredRenderTimeUs);
+    mMetrics.frameReleasedCount++;
+    mNextExpectedRenderedFrameQueue.push({contentTimeUs, desiredRenderTimeUs});
+    mLastContentTimeUs = contentTimeUs;
+}
+
+void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+                                                FreezeEvent *freezeEventOut,
+                                                JudderEvent *judderEventOut) {
+    if (!mConfiguration.enabled) {
+        return;
+    }
+
+    int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
+
+    if (mLastRenderTimeUs != -1) {
+        mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
+    }
+    // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
+    // frames since the app is not skipping them to terminate playback.
+    for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
+        processMetricsForSkippedFrame(contentTimeUs);
+    }
+    mPendingSkippedFrameContentTimeUsList = {};
+
+    // We can render a pending queued frame if it's the last frame of the video, so release it
+    // immediately.
+    if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
+        onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
+        mTunnelFrameQueuedContentTimeUs = -1;
+    }
+
+    static const FrameInfo noFrame = {-1, -1};
+    FrameInfo nextExpectedFrame = noFrame;
+    while (!mNextExpectedRenderedFrameQueue.empty()) {
+        nextExpectedFrame = mNextExpectedRenderedFrameQueue.front();
+        mNextExpectedRenderedFrameQueue.pop();
+        // Happy path - the rendered frame is what we expected it to be
+        if (contentTimeUs == nextExpectedFrame.contentTimeUs) {
+            break;
+        }
+        // This isn't really supposed to happen - the next rendered frame should be the expected
+        // frame, or, if there's frame drops, it will be a frame later in the content stream
+        if (contentTimeUs < nextExpectedFrame.contentTimeUs) {
+            ALOGW("Rendered frame is earlier than the next expected frame (%lld, %lld)",
+                  (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
+            break;
+        }
+        processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
+                                      nextExpectedFrame.desiredRenderTimeUs);
+    }
+    processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
+                                   nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
+                                   freezeEventOut, judderEventOut);
+    mLastRenderTimeUs = actualRenderTimeUs;
+}
+
+VideoRenderQualityTracker::FreezeEvent VideoRenderQualityTracker::getAndResetFreezeEvent() {
+    FreezeEvent event = std::move(mFreezeEvent);
+    mFreezeEvent.valid = false;
+    return event;
+}
+
+VideoRenderQualityTracker::JudderEvent VideoRenderQualityTracker::getAndResetJudderEvent() {
+    JudderEvent event = std::move(mJudderEvent);
+    mJudderEvent.valid = false;
+    return event;
+}
+
+const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() {
+    if (!mConfiguration.enabled) {
+        return mMetrics;
+    }
+
+    mMetrics.freezeScore = 0;
+    if (mConfiguration.freezeDurationMsHistogramToScore.size() ==
+        mMetrics.freezeDurationMsHistogram.size()) {
+        for (int i = 0; i < mMetrics.freezeDurationMsHistogram.size(); ++i) {
+            mMetrics.freezeScore += mMetrics.freezeDurationMsHistogram[i] *
+                    mConfiguration.freezeDurationMsHistogramToScore[i];
+        }
+    }
+    mMetrics.freezeRate = float(double(mMetrics.freezeDurationMsHistogram.getSum()) /
+            mRenderDurationMs);
+
+    mMetrics.judderScore = 0;
+    if (mConfiguration.judderScoreHistogramToScore.size() == mMetrics.judderScoreHistogram.size()) {
+        for (int i = 0; i < mMetrics.judderScoreHistogram.size(); ++i) {
+            mMetrics.judderScore += mMetrics.judderScoreHistogram[i] *
+                    mConfiguration.judderScoreHistogramToScore[i];
+        }
+    }
+    mMetrics.judderRate = float(double(mMetrics.judderScoreHistogram.getCount()) /
+            (mMetrics.frameReleasedCount + mMetrics.frameSkippedCount));
+
+    return mMetrics;
+}
+
+void VideoRenderQualityTracker::clear() {
+    mRenderDurationMs = 0;
+    mMetrics.clear();
+    resetForDiscontinuity();
+}
+
+void VideoRenderQualityTracker::resetForDiscontinuity() {
+    mLastContentTimeUs = -1;
+    mLastRenderTimeUs = -1;
+    mLastFreezeEndTimeUs = -1;
+    mLastJudderEndTimeUs = -1;
+    mWasPreviousFrameDropped = false;
+    mFreezeEvent.valid = false;
+    mJudderEvent.valid = false;
+
+    // Don't worry about tracking frame rendering times from now up until playback catches up to the
+    // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
+    // to the user is is minimal, so better to just keep things simple and don't bother.
+    mNextExpectedRenderedFrameQueue = {};
+    mTunnelFrameQueuedContentTimeUs = -1;
+
+    // Ignore any frames that were skipped just prior to the discontinuity.
+    mPendingSkippedFrameContentTimeUsList = {};
+
+    // All frame durations can be now ignored since all bets are off now on what the render
+    // durations should be after the discontinuity.
+    for (int i = 0; i < FrameDurationUs::SIZE; ++i) {
+        mActualFrameDurationUs[i] = -1;
+        mDesiredFrameDurationUs[i] = -1;
+        mContentFrameDurationUs[i] = -1;
+    }
+    mActualFrameDurationUs.priorTimestampUs = -1;
+    mDesiredFrameDurationUs.priorTimestampUs = -1;
+    mContentFrameDurationUs.priorTimestampUs = -1;
+}
+
+bool VideoRenderQualityTracker::resetIfDiscontinuity(int64_t contentTimeUs,
+                                                     int64_t desiredRenderTimeUs) {
+    if (mLastContentTimeUs == -1) {
+        resetForDiscontinuity();
+        return true;
+    }
+    if (contentTimeUs < mLastContentTimeUs) {
+        ALOGI("Video playback jumped %d ms backwards in content time (%d -> %d)",
+              int((mLastContentTimeUs - contentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+              int(contentTimeUs / 1000));
+        resetForDiscontinuity();
+        return true;
+    }
+    if (contentTimeUs - mLastContentTimeUs > mConfiguration.maxExpectedContentFrameDurationUs) {
+        // The content frame duration could be long due to frame drops for live content. This can be
+        // detected by looking at the app's desired rendering duration. If the app's rendered frame
+        // duration is roughly the same as the content's frame duration, then it is assumed that
+        // the forward discontinuity is due to frame drops for live content. A false positive can
+        // occur if the time the user spends seeking is equal to the duration of the seek. This is
+        // very unlikely to occur in practice but CAN occur - the user starts seeking forward, gets
+        // distracted, and then returns to seeking forward.
+        bool skippedForwardDueToLiveContentFrameDrops = false;
+        if (desiredRenderTimeUs != -1) {
+            int64_t contentFrameDurationUs = contentTimeUs - mLastContentTimeUs;
+            int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
+            skippedForwardDueToLiveContentFrameDrops =
+                    abs(contentFrameDurationUs - desiredFrameDurationUs) <
+                    mConfiguration.liveContentFrameDropToleranceUs;
+        }
+        if (!skippedForwardDueToLiveContentFrameDrops) {
+            ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
+                int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
+                int(contentTimeUs / 1000));
+            resetForDiscontinuity();
+            return true;
+        }
+    }
+    return false;
+}
+
+void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTimeUs) {
+    mMetrics.frameSkippedCount++;
+    if (mConfiguration.areSkippedFramesDropped) {
+        processMetricsForDroppedFrame(contentTimeUs, -1);
+        return;
+    }
+    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+    updateFrameDurations(mDesiredFrameDurationUs, -1);
+    updateFrameDurations(mActualFrameDurationUs, -1);
+    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+    mWasPreviousFrameDropped = false;
+}
+
+void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
+                                                              int64_t desiredRenderTimeUs) {
+    mMetrics.frameDroppedCount++;
+    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+    updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+    updateFrameDurations(mActualFrameDurationUs, -1);
+    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+    updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+    mWasPreviousFrameDropped = true;
+}
+
+void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
+                                                               int64_t desiredRenderTimeUs,
+                                                               int64_t actualRenderTimeUs,
+                                                               FreezeEvent *freezeEventOut,
+                                                               JudderEvent *judderEventOut) {
+    // Capture the timestamp at which the first frame was rendered
+    if (mMetrics.firstRenderTimeUs == 0) {
+        mMetrics.firstRenderTimeUs = actualRenderTimeUs;
+    }
+
+    mMetrics.frameRenderedCount++;
+
+    // The content time is -1 when it was rendered after a discontinuity (e.g. seek) was detected.
+    // So, even though a frame was rendered, it's impact on the user is insignificant, so don't do
+    // anything other than count it as a rendered frame.
+    if (contentTimeUs == -1) {
+        return;
+    }
+    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
+    updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
+    updateFrameDurations(mActualFrameDurationUs, actualRenderTimeUs);
+    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
+    updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
+    updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
+
+    // If the previous frame was dropped, there was a freeze if we've already rendered a frame
+    if (mWasPreviousFrameDropped && mLastRenderTimeUs != -1) {
+        processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
+                      mMetrics, mConfiguration);
+        mLastFreezeEndTimeUs = actualRenderTimeUs;
+    }
+    maybeCaptureFreezeEvent(actualRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent, mMetrics,
+                            mConfiguration, freezeEventOut);
+
+    // Judder is computed on the prior video frame, not the current video frame
+    int64_t judderScore = computePreviousJudderScore(mActualFrameDurationUs,
+                                                     mContentFrameDurationUs,
+                                                     mConfiguration);
+    if (judderScore != 0) {
+        int64_t judderTimeUs = actualRenderTimeUs - mActualFrameDurationUs[0] -
+                mActualFrameDurationUs[1];
+        processJudder(judderScore, judderTimeUs, mLastJudderEndTimeUs, mActualFrameDurationUs,
+                      mContentFrameDurationUs, mJudderEvent, mMetrics, mConfiguration);
+        mLastJudderEndTimeUs = judderTimeUs + mActualFrameDurationUs[1];
+    }
+    maybeCaptureJudderEvent(actualRenderTimeUs, mLastJudderEndTimeUs, mJudderEvent, mMetrics,
+                            mConfiguration, judderEventOut);
+
+    mWasPreviousFrameDropped = false;
+}
+
+void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
+                                              int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+                                              VideoRenderQualityMetrics &m,
+                                              const Configuration &c) {
+    int32_t durationMs = int32_t((actualRenderTimeUs - lastRenderTimeUs) / 1000);
+    m.freezeDurationMsHistogram.insert(durationMs);
+    int32_t distanceMs = -1;
+    if (lastFreezeEndTimeUs != -1) {
+        // The distance to the last freeze is measured from the end of the last freze to the start
+        // of this freeze.
+        distanceMs = int32_t((lastRenderTimeUs - lastFreezeEndTimeUs) / 1000);
+        m.freezeDistanceMsHistogram.insert(distanceMs);
+    }
+    if (c.freezeEventMax > 0) {
+        if (e.valid == false) {
+            m.freezeEventCount++;
+            e.valid = true;
+            e.initialTimeUs = lastRenderTimeUs;
+            e.durationMs = 0;
+            e.sumDurationMs = 0;
+            e.sumDistanceMs = 0;
+            e.count = 0;
+            e.details.durationMs.clear();
+            e.details.distanceMs.clear();
+        // The first occurrence in the event should not have the distance recorded as part of the
+        // event, because it belongs in a vacuum between two events. However we still want the
+        // distance recorded in the details to calculate times in all details in all events.
+        } else if (distanceMs != -1) {
+            e.durationMs += distanceMs;
+            e.sumDistanceMs += distanceMs;
+        }
+        e.durationMs += durationMs;
+        e.count++;
+        e.sumDurationMs += durationMs;
+        if (e.details.durationMs.size() < c.freezeEventDetailsMax) {
+            e.details.durationMs.push_back(durationMs);
+            e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+        }
+    }
+}
+
+void VideoRenderQualityTracker::maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,
+                                                        int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+                                                        const VideoRenderQualityMetrics & m,
+                                                        const Configuration &c,
+                                                        FreezeEvent *freezeEventOut) {
+    if (lastFreezeEndTimeUs == -1 || !e.valid) {
+        return;
+    }
+    // Future freeze occurrences are still pulled into the current freeze event if under tolerance
+    int64_t distanceMs = (actualRenderTimeUs - lastFreezeEndTimeUs) / 1000;
+    if (distanceMs < c.freezeEventDistanceToleranceMs) {
+        return;
+    }
+    if (freezeEventOut != nullptr && m.freezeEventCount <= c.freezeEventMax) {
+        *freezeEventOut = std::move(e);
+    }
+    // start recording a new freeze event after pushing the current one back to the caller
+    e.valid = false;
+}
+
+int64_t VideoRenderQualityTracker::computePreviousJudderScore(
+        const FrameDurationUs &actualFrameDurationUs,
+        const FrameDurationUs &contentFrameDurationUs,
+        const Configuration &c) {
+    // If the frame before or after was dropped, then don't generate a judder score, since any
+    // problems with frame drops are scored as a freeze instead.
+    if (actualFrameDurationUs[0] == -1 || actualFrameDurationUs[1] == -1 ||
+        actualFrameDurationUs[2] == -1) {
+        return 0;
+    }
+
+    // Don't score judder for when playback is paused or rebuffering (long frame duration), or if
+    // the player is intentionally playing each frame at a slow rate (e.g. half-rate). If the long
+    // frame duration was unintentional, it is assumed that this will be coupled with a later frame
+    // drop, and be scored as a freeze instead of judder.
+    if (actualFrameDurationUs[1] >= 2 * contentFrameDurationUs[1]) {
+        return 0;
+    }
+
+    // The judder score is based on the error of this frame
+    int64_t errorUs = actualFrameDurationUs[1] - contentFrameDurationUs[1];
+    // Don't score judder if the previous frame has high error, but this frame has low error
+    if (abs(errorUs) < c.judderErrorToleranceUs) {
+        return 0;
+    }
+
+    // Add a penalty if this frame has judder that amplifies the problem introduced by previous
+    // judder, instead of catching up for the previous judder (50, 16, 16, 50) vs (50, 16, 50, 16)
+    int64_t previousErrorUs = actualFrameDurationUs[2] - contentFrameDurationUs[2];
+    // Don't add the pentalty for errors from the previous frame if the previous frame has low error
+    if (abs(previousErrorUs) >= c.judderErrorToleranceUs) {
+        errorUs = abs(errorUs) + abs(errorUs + previousErrorUs);
+    }
+
+    // Avoid scoring judder for 3:2 pulldown or other minimally-small frame duration errors
+    if (abs(errorUs) < contentFrameDurationUs[1] / 4) {
+        return 0;
+    }
+
+    return abs(errorUs) / 1000; // error in millis to keep numbers small
+}
+
+void VideoRenderQualityTracker::processJudder(int32_t judderScore, int64_t judderTimeUs,
+                                              int64_t lastJudderEndTime,
+                                              const FrameDurationUs &actualDurationUs,
+                                              const FrameDurationUs &contentDurationUs,
+                                              JudderEvent &e, VideoRenderQualityMetrics &m,
+                                              const Configuration &c) {
+    int32_t distanceMs = -1;
+    if (lastJudderEndTime != -1) {
+        distanceMs = int32_t((judderTimeUs - lastJudderEndTime) / 1000);
+    }
+    m.judderScoreHistogram.insert(judderScore);
+    if (c.judderEventMax > 0) {
+        if (!e.valid) {
+            m.judderEventCount++;
+            e.valid = true;
+            e.initialTimeUs = judderTimeUs;
+            e.durationMs = 0;
+            e.sumScore = 0;
+            e.sumDistanceMs = 0;
+            e.count = 0;
+            e.details.contentRenderDurationUs.clear();
+            e.details.actualRenderDurationUs.clear();
+            e.details.distanceMs.clear();
+        // The first occurrence in the event should not have the distance recorded as part of the
+        // event, because it belongs in a vacuum between two events. However we still want the
+        // distance recorded in the details to calculate the times using all details in all events.
+        } else if (distanceMs != -1) {
+            e.durationMs += distanceMs;
+            e.sumDistanceMs += distanceMs;
+        }
+        e.durationMs += actualDurationUs[1] / 1000;
+        e.count++;
+        e.sumScore += judderScore;
+        if (e.details.contentRenderDurationUs.size() < c.judderEventDetailsMax) {
+            e.details.actualRenderDurationUs.push_back(actualDurationUs[1]);
+            e.details.contentRenderDurationUs.push_back(contentDurationUs[1]);
+            e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
+        }
+    }
+}
+
+void VideoRenderQualityTracker::maybeCaptureJudderEvent(int64_t actualRenderTimeUs,
+                                                        int64_t lastJudderEndTimeUs, JudderEvent &e,
+                                                        const VideoRenderQualityMetrics &m,
+                                                        const Configuration &c,
+                                                        JudderEvent *judderEventOut) {
+    if (lastJudderEndTimeUs == -1 || !e.valid) {
+        return;
+    }
+    // Future judder occurrences are still pulled into the current judder event if under tolerance
+    int64_t distanceMs = (actualRenderTimeUs - lastJudderEndTimeUs) / 1000;
+    if (distanceMs < c.judderEventDistanceToleranceMs) {
+        return;
+    }
+    if (judderEventOut != nullptr && m.judderEventCount <= c.judderEventMax) {
+        *judderEventOut = std::move(e);
+    }
+    // start recording a new judder event after pushing the current one back to the caller
+    e.valid = false;
+}
+
+void VideoRenderQualityTracker::configureHistograms(VideoRenderQualityMetrics &m,
+                                                    const Configuration &c) {
+    m.freezeDurationMsHistogram.setup(c.freezeDurationMsHistogramBuckets);
+    m.freezeDistanceMsHistogram.setup(c.freezeDistanceMsHistogramBuckets);
+    m.judderScoreHistogram.setup(c.judderScoreHistogramBuckets);
+}
+
+int64_t VideoRenderQualityTracker::nowUs() {
+    struct timespec t;
+    t.tv_sec = t.tv_nsec = 0;
+    clock_gettime(CLOCK_MONOTONIC, &t);
+    return (t.tv_sec * 1000000000LL + t.tv_nsec) / 1000LL;
+}
+
+void VideoRenderQualityTracker::updateFrameDurations(FrameDurationUs &durationUs,
+                                                     int64_t newTimestampUs) {
+    for (int i = FrameDurationUs::SIZE - 1; i > 0; --i ) {
+        durationUs[i] = durationUs[i - 1];
+    }
+    if (newTimestampUs == -1) {
+        durationUs[0] = -1;
+    } else {
+        durationUs[0] = durationUs.priorTimestampUs == -1 ? -1 :
+                newTimestampUs - durationUs.priorTimestampUs;
+        durationUs.priorTimestampUs = newTimestampUs;
+    }
+}
+
+void VideoRenderQualityTracker::updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+                                                const Configuration &c) {
+    float newFrameRate = detectFrameRate(durationUs, c);
+    if (newFrameRate != FRAME_RATE_UNDETERMINED) {
+        frameRate = newFrameRate;
+    }
+}
+
+float VideoRenderQualityTracker::detectFrameRate(const FrameDurationUs &durationUs,
+                                                 const Configuration &c) {
+    // At least 3 frames are necessary to detect stable frame rates
+    assert(FrameDurationUs::SIZE >= 3);
+    if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1) {
+        return FRAME_RATE_UNDETERMINED;
+    }
+    // Only determine frame rate if the render durations are stable across 3 frames
+    if (abs(durationUs[0] - durationUs[1]) > c.frameRateDetectionToleranceUs ||
+        abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs) {
+        return is32pulldown(durationUs, c) ? FRAME_RATE_24_3_2_PULLDOWN : FRAME_RATE_UNDETERMINED;
+    }
+    return 1000.0 * 1000.0 / durationUs[0];
+}
+
+bool VideoRenderQualityTracker::is32pulldown(const FrameDurationUs &durationUs,
+                                             const Configuration &c) {
+    // At least 5 frames are necessary to detect stable 3:2 pulldown
+    assert(FrameDurationUs::SIZE >= 5);
+    if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1 || durationUs[3] == -1 ||
+        durationUs[4] == -1) {
+        return false;
+    }
+    // 3:2 pulldown expects that every other frame has identical duration...
+    if (abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs ||
+        abs(durationUs[1] - durationUs[3]) > c.frameRateDetectionToleranceUs ||
+        abs(durationUs[0] - durationUs[4]) > c.frameRateDetectionToleranceUs) {
+        return false;
+    }
+    // ... for either 2 vsysncs or 3 vsyncs
+    if ((abs(durationUs[0] - 33333) < c.frameRateDetectionToleranceUs &&
+         abs(durationUs[1] - 50000) < c.frameRateDetectionToleranceUs) ||
+        (abs(durationUs[0] - 50000) < c.frameRateDetectionToleranceUs &&
+         abs(durationUs[1] - 33333) < c.frameRateDetectionToleranceUs)) {
+        return true;
+    }
+    return false;
+}
+
+} // namespace android
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 4b4f65f..f91a8b2 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -33,10 +33,8 @@
 #include <functional>
 #include <sys/time.h>
 
-#define USE_LIBYUV
 #define PERF_PROFILING 0
 
-
 #if defined(__aarch64__) || defined(__ARM_NEON__)
 #define USE_NEON_Y410 1
 #else
@@ -48,6 +46,48 @@
 #endif
 
 namespace android {
+typedef const struct libyuv::YuvConstants LibyuvConstants;
+
+struct LibyuvConstPair {
+    const LibyuvConstants *yuv;
+    const LibyuvConstants *yvu;
+};
+
+// Function to resolve YUV Matrices defined in libyuv
+static LibyuvConstPair getLibYUVMatrix(
+        const ColorConverter::ColorSpace &colorSpace, bool is10Bit) {
+    LibyuvConstPair matrix = {nullptr, nullptr};
+    const bool isFullRange = (colorSpace.mRange == ColorUtils::kColorRangeFull);
+    if (colorSpace.isI601()) {
+        matrix.yuv = &libyuv::kYuvI601Constants;
+        matrix.yvu = &libyuv::kYvuI601Constants;
+    } else if (colorSpace.isJ601()) {
+        matrix.yuv = &libyuv::kYuvJPEGConstants;
+        matrix.yvu = &libyuv::kYvuJPEGConstants;
+    } else if (colorSpace.isH709()) {
+        matrix.yuv = &libyuv::kYuvH709Constants;
+        matrix.yvu = &libyuv::kYvuH709Constants;
+    } else if (colorSpace.isF709()) {
+        matrix.yuv = &libyuv::kYuvF709Constants;
+        matrix.yvu = &libyuv::kYvuF709Constants;
+    } else if (colorSpace.isBt2020()) {
+        matrix.yuv = &libyuv::kYuv2020Constants;
+        matrix.yvu = &libyuv::kYvu2020Constants;
+    } else if (colorSpace.isBtV2020()) {
+        matrix.yuv = &libyuv::kYuvV2020Constants;
+        matrix.yvu = &libyuv::kYvuV2020Constants;
+    } else {
+        // unspecified
+        if (isFullRange) {
+            matrix.yuv = is10Bit ? &libyuv::kYuvV2020Constants : &libyuv::kYuvJPEGConstants;
+            matrix.yvu = is10Bit ? &libyuv::kYvuV2020Constants : &libyuv::kYvuJPEGConstants;
+        } else {
+            matrix.yuv = is10Bit ? &libyuv::kYuv2020Constants : &libyuv::kYuvI601Constants;
+            matrix.yvu = is10Bit ? &libyuv::kYvu2020Constants : &libyuv::kYvuI601Constants;
+        }
+    }
+    return matrix;
+}
 
 static bool isRGB(OMX_COLOR_FORMATTYPE colorFormat) {
     return colorFormat == OMX_COLOR_Format16bitRGB565
@@ -56,28 +96,234 @@
             || colorFormat == COLOR_Format32bitABGR2101010;
 }
 
-bool ColorConverter::ColorSpace::isBt2020() const {
-    return (mStandard == ColorUtils::kColorStandardBT2020);
+// check for limited Range
+bool ColorConverter::ColorSpace::isLimitedRange() const {
+    return mRange == ColorUtils::kColorRangeLimited;
 }
 
-bool ColorConverter::ColorSpace::isH420() const {
+// BT.2020 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isBt2020() const {
+    return (mStandard == ColorUtils::kColorStandardBT2020
+            && mRange == ColorUtils::kColorRangeLimited);
+}
+
+// BT.2020 full range YUV to RGB
+bool ColorConverter::ColorSpace::isBtV2020() const {
+    return (mStandard == ColorUtils::kColorStandardBT2020
+            && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 full range YUV to RGB
+bool ColorConverter::ColorSpace::isF709() const {
+    return (mStandard == ColorUtils::kColorStandardBT709
+            && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isH709() const {
     return (mStandard == ColorUtils::kColorStandardBT709)
             && (mRange == ColorUtils::kColorRangeLimited);
 }
 
+// BT.601 limited range YUV to RGB
 // the matrix coefficients are the same for both 601.625 and 601.525 standards
-bool ColorConverter::ColorSpace::isI420() const {
+bool ColorConverter::ColorSpace::isI601() const {
     return ((mStandard == ColorUtils::kColorStandardBT601_625)
             || (mStandard == ColorUtils::kColorStandardBT601_525))
             && (mRange == ColorUtils::kColorRangeLimited);
 }
 
-bool ColorConverter::ColorSpace::isJ420() const {
+// BT.601 full range YUV to RGB
+bool ColorConverter::ColorSpace::isJ601() const {
     return ((mStandard == ColorUtils::kColorStandardBT601_625)
             || (mStandard == ColorUtils::kColorStandardBT601_525))
             && (mRange == ColorUtils::kColorRangeFull);
 }
 
+// Utility functions for MediaImage2
+static MediaImage2 CreateYUV420PlanarMediaImage2(
+        uint32_t width, uint32_t height, uint32_t stride,
+        uint32_t vstride, uint32_t bitDepth) {
+    const uint32_t componentBytes = (bitDepth + 7) / 8;
+    return MediaImage2 {
+        .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+        .mNumPlanes = 3,
+        .mWidth = width,
+        .mHeight = height,
+        .mBitDepth = bitDepth,
+        .mBitDepthAllocated = componentBytes * 8,
+        .mPlane = {
+            {
+                .mOffset = 0,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 1,
+                .mVertSubsampling = 1,
+            },
+            {
+                .mOffset = stride * vstride,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride / 2),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            },
+            {
+                .mOffset = stride * vstride * 5 / 4,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride / 2),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            }
+        },
+    };
+}
+
+static MediaImage2 CreateYUV420SemiPlanarMediaImage2(
+        uint32_t width, uint32_t height, uint32_t stride,
+        uint32_t vstride, uint32_t bitDepth, bool uv = true /*nv12 or not*/) {
+    const uint32_t componentBytes = (bitDepth + 7) / 8;
+    return MediaImage2 {
+        .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+        .mNumPlanes = 3,
+        .mWidth = width,
+        .mHeight = height,
+        .mBitDepth = bitDepth,
+        .mBitDepthAllocated = componentBytes * 8,
+        .mPlane = {
+            {
+                .mOffset = 0,
+                .mColInc = static_cast<int32_t>(componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 1,
+                .mVertSubsampling = 1,
+            },
+            {
+                .mOffset = stride * vstride + (uv ? 0 : componentBytes),
+                .mColInc = static_cast<int32_t>(2 * componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            },
+            {
+                .mOffset = stride * vstride + (uv ? componentBytes : 0),
+                .mColInc = static_cast<int32_t>(2 * componentBytes),
+                .mRowInc = static_cast<int32_t>(stride),
+                .mHorizSubsampling = 2,
+                .mVertSubsampling = 2,
+            }
+        },
+    };
+}
+
+ColorConverter::Image::Image(const MediaImage2& img)
+    :mImage(img),
+    mLayout(ImageLayoutUnknown),
+    mSampling(ImageSamplingUnknown) {
+    const MediaImage2::PlaneInfo &yPlane =
+            img.mPlane[MediaImage2::PlaneIndex::Y];
+    const MediaImage2::PlaneInfo &uPlane =
+            img.mPlane[MediaImage2::PlaneIndex::U];
+    const MediaImage2::PlaneInfo &vPlane =
+            img.mPlane[MediaImage2::PlaneIndex::V];
+
+    if (mImage.mNumPlanes != 3) {
+        ALOGE("Conversion error: MediaImage2 mNumPlanes != 3");
+        mLayout = ImageLayoutUnknown;
+        mSampling = ImageSamplingUnknown;
+        mBitDepth = ImageBitDepthInvalid;
+        return;
+    }
+
+    if (mImage.mBitDepth == 8
+            && yPlane.mColInc == 1
+            && uPlane.mColInc == 1
+            && vPlane.mColInc == 1
+            && yPlane.mVertSubsampling == 1
+            && uPlane.mVertSubsampling == 2
+            && vPlane.mVertSubsampling == 2) {
+        mLayout = ImageLayout420Planar;
+        mSampling = ImageSamplingYUV420;
+    } else if (mImage.mBitDepth == 8
+            && yPlane.mColInc == 1
+            && uPlane.mColInc == 2
+            && vPlane.mColInc == 2
+            && yPlane.mVertSubsampling == 1
+            && uPlane.mVertSubsampling == 2
+            && vPlane.mVertSubsampling == 2
+            && ((vPlane.mOffset == uPlane.mOffset + 1) ||
+            (uPlane.mOffset == vPlane.mOffset + 1))) {
+        mLayout = ImageLayout420SemiPlanar;
+        mSampling = ImageSamplingYUV420;
+    }
+
+    mBitDepth = ImageBitDepthInvalid;
+    switch (img.mBitDepth) {
+        case 8:
+            mBitDepth = ImageBitDepth8;
+            break;
+
+        case 10:
+        case 12:
+        case 16:
+        default:
+            // TODO: Implement 10b, 12b and 16b using MediaImage2
+            mBitDepth = ImageBitDepthInvalid;
+    }
+
+}
+
+status_t ColorConverter::Image::getYUVPlaneOffsetAndStride(
+        const BitmapParams &src,
+        uint32_t *y_offset,
+        uint32_t *u_offset,
+        uint32_t *v_offset,
+        size_t *y_stride,
+        size_t *u_stride,
+        size_t *v_stride) const {
+
+    if (y_offset == nullptr || u_offset == nullptr || v_offset == nullptr
+            || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    if (mImage.mNumPlanes != 3) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    const MediaImage2::PlaneInfo &yPlane = mImage.mPlane[MediaImage2::PlaneIndex::Y];
+    *y_offset = yPlane.mOffset
+            + src.mCropTop * yPlane.mRowInc
+            + src.mCropLeft * yPlane.mColInc;
+
+    const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+    *u_offset = uPlane.mOffset
+            + (src.mCropTop / uPlane.mVertSubsampling) * uPlane.mRowInc
+            + (src.mCropLeft / uPlane.mHorizSubsampling) * uPlane.mColInc;
+
+    const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+    *v_offset = vPlane.mOffset
+            + (src.mCropTop / vPlane.mVertSubsampling) * vPlane.mRowInc
+            + (src.mCropLeft / vPlane.mHorizSubsampling) * vPlane.mColInc;
+
+    *y_stride = yPlane.mRowInc;
+    *u_stride = uPlane.mRowInc;
+    *v_stride = vPlane.mRowInc;
+
+    return OK;
+}
+
+bool ColorConverter::Image::isNV21() const {
+    if (getLayout() == ImageLayout420SemiPlanar) {
+        const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+        const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+
+        int componentBytes = (mImage.mBitDepthAllocated) / 8;
+
+        return (((vPlane.mOffset + componentBytes) == uPlane.mOffset));
+    }
+    return false;
+}
+
 /**
  * This class approximates the standard YUV to RGB conversions by factoring the matrix
  * coefficients to 1/256th-s (as dividing by 256 is easy to do with right shift). The chosen value
@@ -227,8 +473,42 @@
     mClip10Bit = NULL;
 }
 
+// Set MediaImage2 Flexible formats
+void ColorConverter::setSrcMediaImage2(MediaImage2 img) {
+    mSrcImage = Image(img);
+ }
+
+bool ColorConverter::isValidForMediaImage2() const {
+
+    if (!mSrcImage
+            || mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+        // TODO: support Yonly or RGB etc?
+        return false;
+    }
+    // try to identify the src format
+
+    BitDepth_t srcBitDepth = mSrcImage->getBitDepth();
+
+    //TODO: support 12b and 16b ?
+    if (srcBitDepth == ImageBitDepthInvalid) {
+        return false;
+    }
+
+    return ((srcBitDepth == ImageBitDepth8  &&
+            (mDstFormat == OMX_COLOR_Format16bitRGB565
+            || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+            || mDstFormat == OMX_COLOR_Format32bitBGRA8888))
+
+            || (srcBitDepth == ImageBitDepth10
+            && (mDstFormat == COLOR_Format32bitABGR2101010)));
+}
+
 bool ColorConverter::isValid() const {
     switch ((int32_t)mSrcFormat) {
+        case COLOR_FormatYUV420Flexible:
+            return isValidForMediaImage2();
+            break;
+
         case OMX_COLOR_FormatYUV420Planar16:
             if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
                 return true;
@@ -240,22 +520,23 @@
                     || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
 
         case OMX_COLOR_FormatCbYCrY:
-        case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
-        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
             return mDstFormat == OMX_COLOR_Format16bitRGB565;
 
         case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
+        case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+            if (mSrcImage) {
+                return isValidForMediaImage2();
+            }
             return mDstFormat == OMX_COLOR_Format16bitRGB565
                     || mDstFormat == OMX_COLOR_Format32BitRGBA8888
                     || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
-#else
-            return mDstFormat == OMX_COLOR_Format16bitRGB565;
-#endif
+
         case COLOR_FormatYUVP010:
             return mDstFormat == COLOR_Format32bitABGR2101010;
 
         default:
+            //TODO: Should this be enabled for MediaImage2?
             return false;
     }
 }
@@ -320,6 +601,13 @@
         mStride = mWidth;
         break;
 
+    case COLOR_FormatYUV420Flexible:
+        // MediaImage2 should be used.
+        mBpp = 1;
+        mStride = mWidth;
+
+        break;
+
     default:
         ALOGE("Unsupported color format %d", mColorFormat);
         mBpp = 1;
@@ -340,6 +628,14 @@
     return mCropBottom - mCropTop + 1;
 }
 
+bool ColorConverter::BitmapParams::isValid() const {
+    if (!((mStride & 1) == 0  // stride must be even
+        && mStride >= mBpp * cropWidth())) {
+            return false;
+    }
+    return true;
+}
+
 status_t ColorConverter::convert(
         const void *srcBits,
         size_t srcWidth, size_t srcHeight, size_t srcStride,
@@ -352,83 +648,83 @@
     BitmapParams src(
             const_cast<void *>(srcBits),
             srcWidth, srcHeight, srcStride,
-            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom, mSrcFormat);
+            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom,
+            mSrcFormat);
 
     BitmapParams dst(
             dstBits,
             dstWidth, dstHeight, dstStride,
             dstCropLeft, dstCropTop, dstCropRight, dstCropBottom, mDstFormat);
 
-    if (!((src.mCropLeft & 1) == 0
-        && src.cropWidth() == dst.cropWidth()
-        && src.cropHeight() == dst.cropHeight())) {
+    if (!(src.isValid()
+            && dst.isValid()
+            && (src.mCropLeft & 1) == 0
+            && src.cropWidth() == dst.cropWidth()
+            && src.cropHeight() == dst.cropHeight())) {
         return ERROR_UNSUPPORTED;
     }
-
-    status_t err;
-
-    switch ((int32_t)mSrcFormat) {
-        case OMX_COLOR_FormatYUV420Planar:
-#ifdef USE_LIBYUV
-            err = convertYUV420PlanarUseLibYUV(src, dst);
-#else
-            err = convertYUV420Planar(src, dst);
+#if PERF_PROFILING
+    int64_t startTimeUs = ALooper::GetNowUs();
 #endif
+    status_t err;
+    switch ((int32_t)mSrcFormat) {
+        case COLOR_FormatYUV420Flexible:
+            err = convertYUVMediaImage(src, dst);
+            break;
+
+        case OMX_COLOR_FormatYUV420Planar:
+            if (!mSrcImage) {
+                mSrcImage = Image(CreateYUV420PlanarMediaImage2(
+                        srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+            }
+            err = convertYUVMediaImage(src, dst);
+
             break;
 
         case OMX_COLOR_FormatYUV420Planar16:
-        {
-#if PERF_PROFILING
-            int64_t startTimeUs = ALooper::GetNowUs();
-#endif
             err = convertYUV420Planar16(src, dst);
-#if PERF_PROFILING
-            int64_t endTimeUs = ALooper::GetNowUs();
-            ALOGD("convertYUV420Planar16 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
             break;
-        }
 
         case COLOR_FormatYUVP010:
-        {
-#if PERF_PROFILING
-            int64_t startTimeUs = ALooper::GetNowUs();
-#endif
             err = convertYUVP010(src, dst);
-#if PERF_PROFILING
-            int64_t endTimeUs = ALooper::GetNowUs();
-            ALOGD("convertYUVP010 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
+
             break;
-        }
 
         case OMX_COLOR_FormatCbYCrY:
             err = convertCbYCrY(src, dst);
             break;
 
         case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
-            err = convertQCOMYUV420SemiPlanar(src, dst);
+            if (!mSrcImage) {
+                mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+                    srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/, false));
+            }
+            err = convertYUVMediaImage(src, dst);
+
             break;
 
         case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
-            err = convertYUV420SemiPlanarUseLibYUV(src, dst);
-#else
-            err = convertYUV420SemiPlanar(src, dst);
-#endif
-            break;
-
         case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
-            err = convertTIYUV420PackedSemiPlanar(src, dst);
+            if (!mSrcImage) {
+                mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+                    srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+            }
+            err = convertYUVMediaImage(src, dst);
+
             break;
 
         default:
-        {
+
             CHECK(!"Should not be here. Unknown color conversion.");
             break;
-        }
     }
 
+#if PERF_PROFILING
+    int64_t endTimeUs = ALooper::GetNowUs();
+    ALOGD("%s image took %lld us", asString_ColorFormat(mSrcFormat,"Unknown"),
+            (long long) (endTimeUs - startTimeUs));
+#endif
+
     return err;
 }
 
@@ -437,32 +733,40 @@
     const bool is10Bit = (mSrcFormat == COLOR_FormatYUVP010
             || mSrcFormat == OMX_COLOR_FormatYUV420Planar16);
 
-    switch (mSrcColorSpace.mStandard) {
-    case ColorUtils::kColorStandardBT601_525:
-    case ColorUtils::kColorStandardBT601_625:
+    ColorAspects::Primaries primaries;
+    ColorAspects::MatrixCoeffs matrix;
+    if (ColorUtils::unwrapColorAspectsFromColorStandard(
+            mSrcColorSpace.mStandard, &primaries, &matrix) != OK) {
+        matrix = ColorAspects::MatrixUnspecified;
+    }
+
+    switch (matrix) {
+    case ColorAspects::MatrixBT601_6:
+    case ColorAspects::MatrixBT470_6M:   // use 601 matrix as that is the closest for now
+    case ColorAspects::MatrixSMPTE240M:  // use 601 matrix as that is the closest for now
         return (isFullRange ? &BT601_FULL :
                 is10Bit ? &BT601_LTD_10BIT : &BT601_LIMITED);
 
-    case ColorUtils::kColorStandardBT709:
+    case ColorAspects::MatrixBT709_5:
         return (isFullRange ? &BT709_FULL :
                 is10Bit ? &BT709_LTD_10BIT : &BT709_LIMITED);
 
-    case ColorUtils::kColorStandardBT2020:
+    case ColorAspects::MatrixBT2020:
+    case ColorAspects::MatrixBT2020Constant: // use 2020 matrix as that is the closest for now
         return (isFullRange ? &BT2020_FULL :
                 is10Bit ? &BT2020_LTD_10BIT : &BT2020_LIMITED);
 
     default:
-        // for now use the default matrices for unhandled color spaces
-        // TODO: fail?
-        // return nullptr;
-        [[fallthrough]];
-
-    case ColorUtils::kColorStandardUnspecified:
-        return is10Bit ? &BT2020_LTD_10BIT : &BT601_LIMITED;
-
+        // use BT.2020 for 10-bit and 601 for 8-bit by default
+        if (is10Bit) {
+            return isFullRange ? &BT2020_FULL : &BT2020_LTD_10BIT;
+        } else {
+            return isFullRange ? &BT601_FULL : &BT601_LIMITED;
+        }
     }
 }
 
+// Interleaved YUV 422 CbYCrY to RGB565
 status_t ColorConverter::convertCbYCrY(
         const BitmapParams &src, const BitmapParams &dst) {
     // XXX Untested
@@ -485,10 +789,10 @@
         + dst.mCropTop * dst.mWidth + dst.mCropLeft;
 
     const uint8_t *src_ptr = (const uint8_t *)src.mBits
-        + (src.mCropTop * dst.mWidth + src.mCropLeft) * 2;
+        + (src.mCropTop * src.mWidth + src.mCropLeft) * 2;
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
-        for (size_t x = 0; x < src.cropWidth(); x += 2) {
+        for (size_t x = 0; x < src.cropWidth() - 1; x += 2) {
             signed y1 = (signed)src_ptr[2 * x + 1] - _c16;
             signed y2 = (signed)src_ptr[2 * x + 3] - _c16;
             signed u = (signed)src_ptr[2 * x] - 128;
@@ -533,67 +837,103 @@
     return OK;
 }
 
+status_t ColorConverter::getSrcYUVPlaneOffsetAndStride(
+        const BitmapParams &src,
+        uint32_t *y_offset, uint32_t *u_offset, uint32_t *v_offset,
+        size_t *y_stride, size_t *u_stride, size_t *v_stride) const {
+    if (y_offset == nullptr || u_offset == nullptr || v_offset == nullptr
+            || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+        ALOGE("nullptrs given for yuv source offset / stride");
+        return ERROR_MALFORMED;
+    }
+
+    if (mSrcImage) {
+        // if we have MediaImage2; get the info from MediaImage2
+        return mSrcImage->getYUVPlaneOffsetAndStride(src, y_offset, u_offset, v_offset,
+                y_stride, u_stride, v_stride);
+    }
+    return ERROR_UNSUPPORTED;
+}
 /*
     libyuv supports the following color spaces:
 
-    I420: BT.601 limited range
-    J420: BT.601 full range (jpeg)
-    H420: BT.709 limited range
+    I601:  BT.601 limited range
+    J601:  BT.601 full range (jpeg)
+    H709:  BT.709 limited range
+    F709:  BT.709 Full range
+    2020:  BT.2020 limited range
+    V2020: BT.2020 Full range
 
 */
 
-#define DECLARE_YUV2RGBFUNC(func, rgb) int (*func)(     \
-        const uint8_t*, int, const uint8_t*, int,       \
-        const uint8_t*, int, uint8_t*, int, int, int)   \
-        = mSrcColorSpace.isH420() ? libyuv::H420To##rgb \
-        : mSrcColorSpace.isJ420() ? libyuv::J420To##rgb \
-        : libyuv::I420To##rgb
-
 status_t ColorConverter::convertYUV420PlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
-    // Fall back to our conversion if libyuv does not support the color space.
-    // I420 (BT.601 limited) is default, so don't fall back if we end up using it anyway.
-    if (!mSrcColorSpace.isH420() && !mSrcColorSpace.isJ420()
-            // && !mSrcColorSpace.isI420() /* same as line below */
-            && getMatrix() != &BT601_LIMITED) {
-        return convertYUV420Planar(src, dst);
+    LibyuvConstPair yuvConstants =
+            getLibYUVMatrix(mSrcColorSpace, false);
+
+    uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+    size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+    if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+                          &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+        return ERROR_UNSUPPORTED;
     }
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+    const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
 
-    const uint8_t *src_u =
-        (const uint8_t *)src.mBits + src.mStride * src.mHeight
-        + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2);
+    const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
 
-    const uint8_t *src_v =
-        src_u + (src.mStride / 2) * (src.mHeight / 2);
+    const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
 
     switch (mDstFormat) {
     case OMX_COLOR_Format16bitRGB565:
     {
-        DECLARE_YUV2RGBFUNC(func, RGB565);
-        (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
-                (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
-        break;
-    }
+        libyuv::I420ToRGB565Matrix(src_y,
+                src_stride_y,
+                src_u,
+                src_stride_u,
+                src_v,
+                src_stride_v,
+                dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
 
-    case OMX_COLOR_Format32BitRGBA8888:
-    {
-        DECLARE_YUV2RGBFUNC(func, ABGR);
-        (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
-                (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
         break;
     }
 
     case OMX_COLOR_Format32bitBGRA8888:
     {
-        DECLARE_YUV2RGBFUNC(func, ARGB);
-        (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
-                (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
+        libyuv::I420ToARGBMatrix(src_y,
+                src_stride_y,
+                src_u,
+                src_stride_u,
+                src_v,
+                src_stride_v,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
+        break;
+    }
+
+    case OMX_COLOR_Format32BitRGBA8888:
+    {
+        libyuv::I420ToARGBMatrix(src_y,
+                src_stride_y,
+                src_v,
+                src_stride_v,
+                src_u,
+                src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yvu,
+                src.cropWidth(),
+                src.cropHeight());
         break;
     }
 
@@ -606,38 +946,90 @@
 
 status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
-    // Fall back to our conversion if libyuv does not support the color space.
-    // libyuv only supports BT.601 limited range NV12. Don't fall back if we end up using it anyway.
-    if (// !mSrcColorSpace.isI420() && /* same as below */
-        getMatrix() != &BT601_LIMITED) {
-        return convertYUV420SemiPlanar(src, dst);
-    }
+    LibyuvConstPair yuvConstants =
+            getLibYUVMatrix(mSrcColorSpace, false);
 
+    uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+    size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+    if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+                          &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+        return ERROR_UNSUPPORTED;
+    }
+    (void)v_offset;
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+    const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
 
-    const uint8_t *src_u =
-        (const uint8_t *)src.mBits + src.mStride * src.mHeight
-        + (src.mCropTop / 2) * src.mStride + src.mCropLeft;
+    const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
+
+    const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
+
+    bool isNV21 = (u_offset == (v_offset + 1)) ? true : false;
+
+    // libyuv function signature for semiplanar formats;
+    std::function<int(const uint8_t*, int,
+            const uint8_t*, int, uint8_t *, int,
+            LibyuvConstants *, int, int)> libyuvFunc;
 
     switch (mDstFormat) {
     case OMX_COLOR_Format16bitRGB565:
-        libyuv::NV12ToRGB565(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
-                dst.mStride, src.cropWidth(), src.cropHeight());
+    {
+        // Note: We don't seem to have similar function for NV21
+        libyuv::NV12ToRGB565Matrix(src_y,
+                src_stride_y,
+                src_u,
+                src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
         break;
-
+    }
     case OMX_COLOR_Format32bitBGRA8888:
-        libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
-                dst.mStride, src.cropWidth(), src.cropHeight());
+    {
+        if (src_stride_u != src_stride_v) {
+            return ERROR_UNSUPPORTED;
+        }
+
+        libyuvFunc = isNV21 ? libyuv:: NV21ToARGBMatrix : libyuv:: NV12ToARGBMatrix;
+
+        libyuvFunc(src_y,
+                src_stride_y,
+                isNV21 ? src_v: src_u,
+                // src_stride_v should be equal to src_stride_u
+                // but this is done like this for readability
+                isNV21 ? src_stride_v : src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yuv,
+                src.cropWidth(),
+                src.cropHeight());
         break;
+    }
 
     case OMX_COLOR_Format32BitRGBA8888:
-        libyuv::NV12ToABGR(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
-                dst.mStride, src.cropWidth(), src.cropHeight());
+    {
+
+        if (src_stride_u != src_stride_v) {
+            return ERROR_UNSUPPORTED;
+        }
+
+        libyuvFunc = isNV21 ? libyuv::NV12ToARGBMatrix : libyuv::NV21ToARGBMatrix;
+
+        libyuvFunc(src_y,
+                src_stride_y,
+                isNV21 ? src_v : src_u,
+                // src_stride_v should be equal to src_stride_u
+                isNV21 ? src_stride_v : src_stride_u,
+                (uint8_t*)dst_ptr,
+                dst.mStride,
+                yuvConstants.yvu,
+                src.cropWidth(),
+                src.cropHeight());
         break;
+    }
 
     default:
         return ERROR_UNSUPPORTED;
@@ -647,27 +1039,75 @@
 }
 
 std::function<void (void *, void *, void *, size_t,
-                    signed *, signed *, signed *, signed *)>
-getReadFromSrc(OMX_COLOR_FORMATTYPE srcFormat) {
-    switch(srcFormat) {
-    case OMX_COLOR_FormatYUV420Planar:
-        return [](void *src_y, void *src_u, void *src_v, size_t x,
-                  signed *y1, signed *y2, signed *u, signed *v) {
-            *y1 = ((uint8_t*)src_y)[x];
-            *y2 = ((uint8_t*)src_y)[x + 1];
-            *u = ((uint8_t*)src_u)[x / 2] - 128;
-            *v = ((uint8_t*)src_v)[x / 2] - 128;
-        };
-    case OMX_COLOR_FormatYUV420Planar16:
+        signed *, signed *, signed *, signed *)>
+getReadFromChromaHorizSubsampled2Image8b(std::optional<MediaImage2> image,
+        OMX_COLOR_FORMATTYPE srcFormat) {
+    // this function is for reading src only
+    // when both chromas are horizontally subsampled by 2
+    // this returns 2 luma for one chroma.
+    if (image) {
+        uint32_t uColInc =
+                image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+        uint32_t vColInc =
+                image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+        uint32_t uHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+         uint32_t vHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+        if (!(uHorizSubsampling == 2 && vHorizSubsampling == 2)) {
+            return nullptr;
+        }
+
+        if (image->mBitDepthAllocated == 8) {
+
+            return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+                    (void *src_y, void *src_u, void *src_v, size_t x,
+                    signed *y1, signed *y2, signed *u, signed *v) {
+                *y1 = ((uint8_t *)src_y)[x];
+                *y2 = ((uint8_t *)src_y)[x + 1];
+                *u  = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+                *v  = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+            };
+        }
+    }
+    if (srcFormat == OMX_COLOR_FormatYUV420Planar16) {
+        // OMX_COLOR_FormatYUV420Planar16
         return [](void *src_y, void *src_u, void *src_v, size_t x,
                 signed *y1, signed *y2, signed *u, signed *v) {
-            *y1 = (signed)(((uint16_t*)src_y)[x] >> 2);
-            *y2 = (signed)(((uint16_t*)src_y)[x + 1] >> 2);
-            *u = (signed)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
-            *v = (signed)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
+            *y1 = (uint8_t)(((uint16_t*)src_y)[x] >> 2);
+            *y2 = (uint8_t)(((uint16_t*)src_y)[x + 1] >> 2);
+            *u = (uint8_t)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
+            *v = (uint8_t)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
         };
-    default:
-        TRESPASS();
+    }
+    return nullptr;
+}
+
+std::function<void (void *, void *, void *, size_t,
+        signed *, signed *, signed *)>
+getReadFromImage(std::optional<MediaImage2> image, OMX_COLOR_FORMATTYPE &srcFormat) {
+    (void)srcFormat;
+    if (image) {
+        uint32_t uColInc =
+                image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+        uint32_t vColInc =
+                image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+        uint32_t uHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+         uint32_t vHorizSubsampling =
+                image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+        if (image->mBitDepthAllocated == 8) {
+
+            return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+                    (void *src_y, void *src_u, void *src_v, size_t x,
+                    signed *y1, signed *u, signed *v) {
+                *y1 = ((uint8_t *)src_y)[x];
+                *u  = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+                *v  = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+            };
+        }
     }
     return nullptr;
 }
@@ -766,8 +1206,178 @@
     return nullptr;
 }
 
-status_t ColorConverter::convertYUV420Planar(
+status_t ColorConverter::convertYUVMediaImage(
         const BitmapParams &src, const BitmapParams &dst) {
+    // first see if we can do this as a 420Planar or 420SemiPlanar 8b
+
+    if(!mSrcImage ||
+            mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV
+            || mSrcImage->getMediaImage2().mNumPlanes != 3) {
+        ALOGE("Cannot convert without MediaImage2 or MediaImage is not Valid YUV");
+        return ERROR_UNSUPPORTED;
+    }
+    if (mSrcImage->getBitDepth() == ImageBitDepth8
+            && mSrcImage->getSampling() == ImageSamplingYUV420) {
+        Layout_t layout = mSrcImage->getLayout();
+        switch (layout) {
+            case Layout_t::ImageLayout420Planar:
+            {
+                return convertYUV420PlanarUseLibYUV(src, dst);
+                break;
+            }
+
+            case Layout_t::ImageLayout420SemiPlanar:
+            {
+                // Note: libyuv doesn't support NV21 -> RGB565
+                if (!(mSrcImage->isNV21() && mDstFormat == OMX_COLOR_Format16bitRGB565)) {
+                    status_t ret = convertYUV420SemiPlanarUseLibYUV(src, dst);
+                    // This function may fail if some specific conditions are not
+                    // met for semiPlanar formats like strideU != strideV.
+                    // if failed, this will fail before attempting conversion, so
+                    // no additional memcpy will be involved here.
+                    // Upon failure, this will fall into pixel based processing below.
+                    if (ret == OK) {
+                        return ret;
+                    }
+                }
+                break;
+            }
+            default:
+                // we will handle this case below.
+                break;
+        }
+    }
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+            + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+
+    uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+    size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+    if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+            &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+        return ERROR_UNSUPPORTED;
+    }
+    uint32_t uVertSubsampling =
+            mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::U].mVertSubsampling;
+    uint32_t vVertSubsampling =
+            mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::V].mVertSubsampling;
+
+    //TODO: optimize for chroma sampling, reading and writing multiple pixels
+    //      within the same loop
+    signed _c16 = 0;
+    void *kAdjustedClip = nullptr;
+    if (mSrcImage->getBitDepth() != ImageBitDepth8) {
+        ALOGE("BitDepth != 8 for MediaImage2");
+        return ERROR_UNSUPPORTED;
+    }
+    _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+    kAdjustedClip = initClip();
+
+    auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
+    uint8_t *src_y = (uint8_t *)src.mBits + y_offset;
+    uint8_t *src_u = (uint8_t *)src.mBits + u_offset;
+    uint8_t *src_v = (uint8_t *)src.mBits + v_offset;
+
+    switch (mSrcImage->getSampling()) {
+
+        case ImageSamplingYUV420:
+        {
+            // get read function that can read
+            // chroma sampling 2 with image
+            auto readFromSrcImage = getReadFromChromaHorizSubsampled2Image8b(
+                    mSrcImage->getMediaImage2(), mSrcFormat);
+            if (readFromSrcImage == nullptr) {
+                ALOGE("Cannot get a read function for this MediaImage2");
+                return ERROR_UNSUPPORTED;
+            }
+            for (size_t y = 0; y < src.cropHeight(); ++y) {
+                for (size_t x = 0; x < src.cropWidth(); x += 2) {
+                    signed y1, y2, u, v;
+                    readFromSrcImage(src_y, src_u, src_v, x, &y1, &y2, &u, &v);
+
+                    signed u_b = u * _b_u;
+                    signed u_g = u * _neg_g_u;
+                    signed v_g = v * _neg_g_v;
+                    signed v_r = v * _r_v;
+
+                    y1 = y1 - _c16;
+                    signed tmp1 = y1 * _y + 128;
+                    signed b1 = (tmp1 + u_b) / 256;
+                    signed g1 = (tmp1 + v_g + u_g) / 256;
+                    signed r1 = (tmp1 + v_r) / 256;
+
+                    y2 = y2 - _c16;
+                    signed tmp2 = y2 * _y + 128;
+                    signed b2 = (tmp2 + u_b) / 256;
+                    signed g2 = (tmp2 + v_g + u_g) / 256;
+                    signed r2 = (tmp2 + v_r) / 256;
+
+                    bool uncropped = x + 1 < src.cropWidth();
+                    writeToDst(dst_ptr + x * dst.mBpp, uncropped, r1, g1, b1, r2, g2, b2);
+                }
+                src_y += src_stride_y;
+                src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+                src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+                dst_ptr += dst.mStride;
+            }
+            break;
+        }
+
+        default:
+        {
+            // Interleaved or any other formats.
+            auto readFromSrcImage = getReadFromImage(mSrcImage->getMediaImage2(), mSrcFormat);
+            if (readFromSrcImage == nullptr) {
+                ALOGE("Cannot get a read function for this MediaImage2");
+                return ERROR_UNSUPPORTED;
+            }
+            for (size_t y = 0; y < src.cropHeight(); ++y) {
+                for (size_t x = 0; x < src.cropWidth(); x += 1) {
+                    signed y1, y2, u, v;
+                    readFromSrcImage(src_y, src_u, src_v, x, &y1, &u, &v);
+
+                    signed u_b = u * _b_u;
+                    signed u_g = u * _neg_g_u;
+                    signed v_g = v * _neg_g_v;
+                    signed v_r = v * _r_v;
+
+                    y1 = y1 - _c16;
+                    signed tmp1 = y1 * _y + 128;
+                    signed b1 = (tmp1 + u_b) / 256;
+                    signed g1 = (tmp1 + v_g + u_g) / 256;
+                    signed r1 = (tmp1 + v_r) / 256;
+
+                    writeToDst(dst_ptr + x * dst.mBpp, false, r1, g1, b1, 0, 0, 0);
+                }
+                src_y += src_stride_y;
+                src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+                src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+                dst_ptr += dst.mStride;
+            }
+        }
+    }
+    return OK;
+}
+
+status_t ColorConverter::convertYUV420Planar16(
+        const BitmapParams &src, const BitmapParams &dst) {
+    if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
+        return convertYUV420Planar16ToY410(src, dst);
+    }
+
     const struct Coeffs *matrix = getMatrix();
     if (!matrix) {
         return ERROR_UNSUPPORTED;
@@ -782,7 +1392,7 @@
 
     uint8_t *kAdjustedClip = initClip();
 
-    auto readFromSrc = getReadFromSrc(mSrcFormat);
+    auto readFromSrc = getReadFromChromaHorizSubsampled2Image8b(std::nullopt, mSrcFormat);
     auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
@@ -829,19 +1439,9 @@
 
         dst_ptr += dst.mStride;
     }
-
     return OK;
 }
 
-status_t ColorConverter::convertYUV420Planar16(
-        const BitmapParams &src, const BitmapParams &dst) {
-    if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
-        return convertYUV420Planar16ToY410(src, dst);
-    }
-
-    return convertYUV420Planar(src, dst);
-}
-
 status_t ColorConverter::convertYUVP010(
         const BitmapParams &src, const BitmapParams &dst) {
     if (mDstFormat == COLOR_Format32bitABGR2101010) {
@@ -1120,117 +1720,6 @@
 
 #endif // USE_NEON_Y410
 
-status_t ColorConverter::convertQCOMYUV420SemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
-
-    const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * src.mHeight
-        + src.mCropTop * src.mWidth + src.mCropLeft;
-
-    /* QCOMYUV420SemiPlanar is NV21, while MediaCodec uses NV12 */
-    return convertYUV420SemiPlanarBase(
-            src, dst, src_y, src_u, src.mWidth /* row_inc */, true /* isNV21 */);
-}
-
-status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
-
-    const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * (src.mHeight - src.mCropTop / 2);
-
-    return convertYUV420SemiPlanarBase(
-            src, dst, src_y, src_u, src.mWidth /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
-
-    const uint8_t *src_u =
-        (const uint8_t *)src.mBits + src.mHeight * src.mStride +
-        (src.mCropTop / 2) * src.mStride + src.mCropLeft;
-
-    return convertYUV420SemiPlanarBase(
-            src, dst, src_y, src_u, src.mStride /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanarBase(
-        const BitmapParams &src, const BitmapParams &dst,
-        const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21) {
-    const struct Coeffs *matrix = getMatrix();
-    if (!matrix) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    signed _b_u = matrix->_b_u;
-    signed _neg_g_u = -matrix->_g_u;
-    signed _neg_g_v = -matrix->_g_v;
-    signed _r_v = matrix->_r_v;
-    signed _y = matrix->_y;
-    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
-
-    uint8_t *kAdjustedClip = initClip();
-
-    uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
-            dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
-
-    for (size_t y = 0; y < src.cropHeight(); ++y) {
-        for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - _c16;
-            signed y2 = (signed)src_y[x + 1] - _c16;
-
-            signed u = (signed)src_u[(x & ~1) + isNV21] - 128;
-            signed v = (signed)src_u[(x & ~1) + !isNV21] - 128;
-
-            signed u_b = u * _b_u;
-            signed u_g = u * _neg_g_u;
-            signed v_g = v * _neg_g_v;
-            signed v_r = v * _r_v;
-
-            signed tmp1 = y1 * _y + 128;
-            signed b1 = (tmp1 + u_b) / 256;
-            signed g1 = (tmp1 + v_g + u_g) / 256;
-            signed r1 = (tmp1 + v_r) / 256;
-
-            signed tmp2 = y2 * _y + 128;
-            signed b2 = (tmp2 + u_b) / 256;
-            signed g2 = (tmp2 + v_g + u_g) / 256;
-            signed r2 = (tmp2 + v_r) / 256;
-
-            uint32_t rgb1 =
-                ((kAdjustedClip[r1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[b1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[r2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[b2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
-        }
-
-        src_y += row_inc;
-
-        if (y & 1) {
-            src_u += row_inc;
-        }
-
-        dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
-    }
-
-    return OK;
-}
-
 uint8_t *ColorConverter::initClip() {
     if (mClip == NULL) {
         mClip = new uint8_t[CLIP_RANGE_MAX_8BIT - CLIP_RANGE_MIN_8BIT + 1];
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 4711315..2409315 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -441,7 +441,7 @@
                     ((dataSpace & ~HAL_DATASPACE_RANGE_MASK) | HAL_DATASPACE_RANGE_FULL);
         }
 
-        ALOGD("setting dataspace on output surface to #%x", dataSpace);
+        ALOGD("setting dataspace on output surface to %#x", dataSpace);
         if ((err = native_window_set_buffers_data_space(mNativeWindow.get(), dataSpace))) {
             ALOGW("failed to set dataspace on surface (%d)", err);
         }
diff --git a/media/libstagefright/colorconversion/fuzzer/Android.bp b/media/libstagefright/colorconversion/fuzzer/Android.bp
new file mode 100644
index 0000000..76b054a
--- /dev/null
+++ b/media/libstagefright/colorconversion/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_colorconversion_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_colorconversion_license",
+    ],
+}
+
+cc_defaults {
+    name: "libcolorconversion_fuzzer_defaults",
+    static_libs: [
+        "libyuv_static",
+        "libstagefright_color_conversion",
+        "libstagefright",
+        "liblog",
+    ],
+    header_libs: [
+        "libstagefright_headers",
+        "libgui_headers",
+    ],
+    shared_libs: [
+        "libui",
+        "libnativewindow",
+        "libstagefright_codecbase",
+        "libstagefright_foundation",
+        "libutils",
+        "libgui",
+        "libbinder",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "color_conversion_fuzzer",
+    srcs: [
+        "color_conversion_fuzzer.cpp",
+    ],
+    defaults: [
+         "libcolorconversion_fuzzer_defaults",
+    ],
+}
diff --git a/media/libstagefright/colorconversion/fuzzer/README.md b/media/libstagefright/colorconversion/fuzzer/README.md
new file mode 100644
index 0000000..220f749
--- /dev/null
+++ b/media/libstagefright/colorconversion/fuzzer/README.md
@@ -0,0 +1,28 @@
+# Fuzzers for libstagefright_color_conversion
+
+## Table of contents
++ [color_conversion_fuzzer](#ColorConversion)
+
+
+# <a name="ColorConversion"></a> Fuzzer for  Colorconversion
+
+ColorConversion supports the following parameters:
+1. SrcColorFormatType (parameter name: "kSrcFormatType")
+2. DstColorFormatType (parameter name: "kDstFormatType")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kSrcFormatType`| 0. `OMX_COLOR_FormatYUV420Planar`<br/>1. `OMX_COLOR_FormatYUV420Planar16`<br/>2. `OMX_COLOR_FormatYUV420SemiPlanar` <br/>3. `OMX_TI_COLOR_FormatYUV420PackedSemiPlanar` <br/>4.`OMX_COLOR_FormatCbYCrY`<br/>5.`OMX_QCOM_COLOR_FormatYVU420SemiPlanar`<br/>6.`COLOR_FormatYUVP010`|Value obtained from FuzzedDataProvider|
+|`kDstFormatType`| 0. `OMX_COLOR_Format16bitRGB565`<br/>1. `OMX_COLOR_Format32BitRGBA8888`<br/>2. `OMX_COLOR_Format32bitBGRA8888` <br/>3. `OMX_COLOR_Format16bitRGB565` <br/>4. `OMX_COLOR_Format32bitBGRA8888`<br/>5.`OMX_COLOR_FormatYUV444Y410`<br/>6. `COLOR_Format32bitABGR2101010`|Value obtained from FuzzedDataProvider|
+
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) color_conversion_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/color_conversion_fuzzer/color_conversion_fuzzer
+```
diff --git a/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp b/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp
new file mode 100644
index 0000000..b91f7dc
--- /dev/null
+++ b/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <iostream>
+#include <vector>
+#include "fuzzer/FuzzedDataProvider.h"
+
+using namespace android;
+using ::android::sp;
+
+static constexpr int32_t kMinFrameSize = 2;
+static constexpr int32_t kMaxFrameSize = 8192;
+
+static constexpr int32_t kSrcFormatType[] = {OMX_COLOR_FormatYUV420Planar,
+                                             OMX_COLOR_FormatYUV420Planar16,
+                                             OMX_COLOR_FormatYUV420SemiPlanar,
+                                             OMX_TI_COLOR_FormatYUV420PackedSemiPlanar,
+                                             OMX_COLOR_FormatCbYCrY,
+                                             OMX_QCOM_COLOR_FormatYVU420SemiPlanar,
+                                             COLOR_FormatYUVP010};
+
+static constexpr int32_t kDstFormatType[] = {
+        OMX_COLOR_Format16bitRGB565, OMX_COLOR_Format32BitRGBA8888, OMX_COLOR_Format32bitBGRA8888,
+        OMX_COLOR_FormatYUV444Y410, COLOR_Format32bitABGR2101010};
+
+class ColorConversionFuzzer {
+  public:
+    ColorConversionFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+    int32_t getFrameSize(OMX_COLOR_FORMATTYPE colorFormat, int32_t stride, int32_t height);
+    bool isValidFormat(OMX_COLOR_FORMATTYPE srcFormat, OMX_COLOR_FORMATTYPE dstFormat);
+};
+
+int32_t ColorConversionFuzzer::getFrameSize(OMX_COLOR_FORMATTYPE colorFormat, int32_t stride,
+                                          int32_t height) {
+    int32_t frameSize;
+    switch ((int32_t)colorFormat) {
+        case OMX_COLOR_FormatCbYCrY:  // Interleaved YUV422
+        case OMX_COLOR_Format16bitRGB565: {
+            frameSize = 2 * stride * height;
+            break;
+        }
+        case OMX_COLOR_FormatYUV420Planar16:
+        case COLOR_FormatYUVP010:
+        case OMX_COLOR_FormatYUV444Y410: {
+            frameSize = 3 * stride * height;
+            break;
+        }
+        case OMX_COLOR_Format32bitBGRA8888:
+        case OMX_COLOR_Format32BitRGBA8888:
+        case COLOR_Format32bitABGR2101010: {
+            frameSize = 4 * stride * height;
+            break;
+        }
+        case OMX_COLOR_FormatYUV420Planar:
+        case OMX_COLOR_FormatYUV420SemiPlanar:
+        case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+        default: {
+            frameSize = stride * height + 2 * (((stride + 1) / 2) * ((height + 1) / 2));
+            break;
+        }
+    }
+    return frameSize;
+}
+
+void ColorConversionFuzzer::process() {
+    OMX_COLOR_FORMATTYPE srcColorFormat =
+            static_cast<OMX_COLOR_FORMATTYPE>(mFdp.PickValueInArray(kSrcFormatType));
+    OMX_COLOR_FORMATTYPE dstColorFormat =
+            static_cast<OMX_COLOR_FORMATTYPE>(mFdp.PickValueInArray(kDstFormatType));
+    std::unique_ptr<ColorConverter> converter(new ColorConverter(srcColorFormat, dstColorFormat));
+    if (converter->isValid()) {
+        int32_t srcLeft, srcTop, srcRight, srcBottom, width, height, stride;
+        width = mFdp.ConsumeIntegralInRange<int32_t>(kMinFrameSize, kMaxFrameSize);
+        height = mFdp.ConsumeIntegralInRange<int32_t>(kMinFrameSize, kMaxFrameSize);
+        stride = mFdp.ConsumeIntegralInRange<int32_t>(width, 2 * kMaxFrameSize);
+
+        srcLeft = mFdp.ConsumeIntegralInRange<int32_t>(0, width - 1);
+        srcTop = mFdp.ConsumeIntegralInRange<int32_t>(0, height - 1);
+        srcRight = mFdp.ConsumeIntegralInRange<int32_t>(srcLeft, width - 1);
+        srcBottom = mFdp.ConsumeIntegralInRange<int32_t>(srcTop, height - 1);
+
+        int32_t dstLeft, dstTop, dstRight, dstBottom;
+        dstLeft = mFdp.ConsumeIntegralInRange<int32_t>(0, width - 1);
+        dstTop = mFdp.ConsumeIntegralInRange<int32_t>(0, height - 1);
+        dstRight = mFdp.ConsumeIntegralInRange<int32_t>(dstLeft, width - 1);
+        dstBottom = mFdp.ConsumeIntegralInRange<int32_t>(dstTop, height - 1);
+
+        int32_t srcFrameSize = getFrameSize(srcColorFormat, stride, height);
+        int32_t dstFrameSize = getFrameSize(dstColorFormat, stride, height);
+        std::vector<uint8_t> srcFrame(srcFrameSize), dstFrame(dstFrameSize);
+        mFdp.ConsumeData(srcFrame.data(), srcFrameSize);
+        converter->convert(srcFrame.data(), width, height, stride, srcLeft, srcTop, srcRight,
+                           srcBottom, dstFrame.data(), width, height, stride, dstLeft, dstTop,
+                           dstRight, dstBottom);
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    ColorConversionFuzzer colorConversionFuzzer(data, size);
+    colorConversionFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 3509ef8..137b282 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -60,7 +60,7 @@
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8">
             <Alias name="OMX.google.vp8.decoder" />
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-16384" />
             <Limit name="blocks-per-second" range="1-1000000" />
@@ -70,21 +70,30 @@
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9">
             <Alias name="OMX.google.vp9.decoder" />
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-16384" />
             <Limit name="blocks-per-second" range="1-500000" />
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1.decoder" type="video/av01">
-            <Limit name="size" min="96x96" max="1920x1080" />
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="blocks-per-second" min="24" max="2073600" />
-            <Limit name="bitrate" range="1-120000000" />
-            <Limit name="frame-rate" range="1-60" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
     </Decoders>
 
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index d7e2d18..d3fd790 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -27,66 +27,77 @@
             <Limit name="channel-count" max="2" />
             <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
             <Limit name="bitrate" range="8000-320000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.amrnb.decoder" type="audio/3gpp">
             <Alias name="OMX.google.amrnb.decoder" />
             <Limit name="channel-count" max="1" />
             <Limit name="sample-rate" ranges="8000" />
             <Limit name="bitrate" range="4750-12200" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.amrwb.decoder" type="audio/amr-wb">
             <Alias name="OMX.google.amrwb.decoder" />
             <Limit name="channel-count" max="1" />
             <Limit name="sample-rate" ranges="16000" />
             <Limit name="bitrate" range="6600-23850" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.aac.decoder" type="audio/mp4a-latm">
             <Alias name="OMX.google.aac.decoder" />
             <Limit name="channel-count" max="8" />
             <Limit name="sample-rate" ranges="7350,8000,11025,12000,16000,22050,24000,32000,44100,48000" />
             <Limit name="bitrate" range="8000-960000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.g711.alaw.decoder" type="audio/g711-alaw">
             <Alias name="OMX.google.g711.alaw.decoder" />
             <Limit name="channel-count" max="6" />
             <Limit name="sample-rate" ranges="8000-48000" />
             <Limit name="bitrate" range="64000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.g711.mlaw.decoder" type="audio/g711-mlaw">
             <Alias name="OMX.google.g711.mlaw.decoder" />
             <Limit name="channel-count" max="6" />
             <Limit name="sample-rate" ranges="8000-48000" />
             <Limit name="bitrate" range="64000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vorbis.decoder" type="audio/vorbis">
             <Alias name="OMX.google.vorbis.decoder" />
             <Limit name="channel-count" max="8" />
             <Limit name="sample-rate" ranges="8000-96000" />
             <Limit name="bitrate" range="32000-500000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.opus.decoder" type="audio/opus">
             <Alias name="OMX.google.opus.decoder" />
             <Limit name="channel-count" max="8" />
             <Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
             <Limit name="bitrate" range="6000-510000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.raw.decoder" type="audio/raw">
             <Alias name="OMX.google.raw.decoder" />
             <Limit name="channel-count" max="8" />
-            <Limit name="sample-rate" ranges="8000-96000" />
+            <Limit name="sample-rate" ranges="8000-192000" />
             <Limit name="bitrate" range="1-10000000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.flac.decoder" type="audio/flac">
             <Alias name="OMX.google.flac.decoder" />
             <Limit name="channel-count" max="8" />
             <Limit name="sample-rate" ranges="1-655350" />
             <Limit name="bitrate" range="1-21000000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.gsm.decoder" type="audio/gsm" domain="telephony">
             <Alias name="OMX.google.gsm.decoder" />
             <Limit name="channel-count" max="1" />
             <Limit name="sample-rate" ranges="8000" />
             <Limit name="bitrate" range="13000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.mpeg4.decoder" type="video/mp4v-es">
             <Alias name="OMX.google.mpeg4.decoder" />
@@ -97,6 +108,7 @@
             <Limit name="blocks-per-second" range="1-432000" />
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.h263.decoder" type="video/3gpp">
             <Alias name="OMX.google.h263.decoder" />
@@ -106,6 +118,7 @@
             <Limit name="alignment" value="2x2" />
             <Limit name="bitrate" range="1-384000" />
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.avc.decoder" type="video/avc" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.h264.decoder" />
@@ -126,6 +139,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.hevc.decoder" />
@@ -146,11 +160,12 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp8.decoder" />
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
                 <Limit name="block-count" range="1-16384" />
@@ -163,10 +178,11 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp9.decoder" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Variant name="!slow-cpu">
                 <Limit name="size" min="2x2" max="2048x2048" />
@@ -181,15 +197,26 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="!slow-cpu">
-            <Limit name="size" min="2x2" max="2048x2048" />
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
+            <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
-            <Limit name="block-count" range="1-16384" />
-            <Limit name="blocks-per-second" range="1-2073600" />
-            <Limit name="bitrate" range="1-120000000" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
             <Alias name="OMX.google.mpeg2.decoder" />
@@ -200,6 +227,7 @@
             <Limit name="blocks-per-second" range="1-244800" />
             <Limit name="bitrate" range="1-20000000" />
             <Feature name="adaptive-playback" />
+            <Attribute name="software-codec" />
         </MediaCodec>
     </Decoders>
     <Encoders>
@@ -209,6 +237,7 @@
             <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
             <!-- also may support 64000, 88200  and 96000 Hz -->
             <Limit name="bitrate" range="8000-960000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.amrnb.encoder" type="audio/3gpp">
             <Alias name="OMX.google.amrnb.encoder" />
@@ -216,6 +245,7 @@
             <Limit name="sample-rate" ranges="8000" />
             <Limit name="bitrate" range="4750-12200" />
             <Feature name="bitrate-modes" value="CBR" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.amrwb.encoder" type="audio/amr-wb">
             <Alias name="OMX.google.amrwb.encoder" />
@@ -223,6 +253,7 @@
             <Limit name="sample-rate" ranges="16000" />
             <Limit name="bitrate" range="6600-23850" />
             <Feature name="bitrate-modes" value="CBR" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.flac.encoder" type="audio/flac">
             <Alias name="OMX.google.flac.encoder" />
@@ -231,6 +262,7 @@
             <Limit name="bitrate" range="1-21000000" />
             <Limit name="complexity" range="0-8"  default="5" />
             <Feature name="bitrate-modes" value="CQ" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.opus.encoder" type="audio/opus">
             <Limit name="channel-count" max="2" />
@@ -238,6 +270,7 @@
             <Limit name="bitrate" range="500-512000" />
             <Limit name="complexity" range="0-10"  default="5" />
             <Feature name="bitrate-modes" value="CBR,VBR" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
             <Alias name="OMX.google.h263.encoder" />
@@ -245,6 +278,7 @@
             <Limit name="size" min="176x144" max="176x144" />
             <Limit name="alignment" value="16x16" />
             <Limit name="bitrate" range="1-128000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.mpeg4.encoder" type="video/mp4v-es">
             <Alias name="OMX.google.mpeg4.encoder" />
@@ -254,6 +288,7 @@
             <Limit name="block-size" value="16x16" />
             <Limit name="blocks-per-second" range="12-1485" />
             <Limit name="bitrate" range="1-64000" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.avc.encoder" type="video/avc" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.h264.encoder" />
@@ -277,6 +312,7 @@
             <!-- Video Quality control -->
                     <!-- supports QP bounding with standard keys -->
             <Feature name="qp-bounds" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
             <Alias name="OMX.google.vp8.encoder" />
@@ -296,6 +332,7 @@
                 <Limit name="bitrate" range="1-20000000" />
             </Variant>
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
             <!-- profiles and levels:  ProfileMain : MainTierLevel51 -->
@@ -309,6 +346,7 @@
             <Limit name="complexity" range="0-10"  default="0" />
             <Limit name="quality" range="0-100"  default="80" />
             <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
             <Alias name="OMX.google.vp9.encoder" />
@@ -320,6 +358,25 @@
             <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
             <Limit name="bitrate" range="1-40000000" />
             <Feature name="bitrate-modes" value="VBR,CBR" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
+            <Limit name="alignment" value="2x2" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="1920x1920" />
+                <Limit name="block-count" range="1-8100" /> <!-- max 1920x1080 -->
+                <Limit name="bitrate" range="1-20000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="720x720" />
+                <Limit name="block-count" range="1-1350" /> <!-- max 720x480 -->
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Limit name="quality" range="0-100"  default="80" />
+            <Limit name="complexity" range="0-5"  default="0" />
+            <Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+            <Attribute name="software-codec" />
         </MediaCodec>
     </Encoders>
 </MediaCodecs>
diff --git a/media/libstagefright/exports.lds b/media/libstagefright/exports.lds
index f5ddf1e..7fe6d6c 100644
--- a/media/libstagefright/exports.lds
+++ b/media/libstagefright/exports.lds
@@ -8,19 +8,19 @@
         _ZNK7android14ColorConverter*;
         _ZN7android16SoftwareRenderer*;
         ABGRToARGB;
-        ABGRToI420;
+        ABGRToI420*;
         ABGRToUVRow_Any_NEON*;
         ABGRToUVRow_C;
         ABGRToUVRow_NEON*;
         ABGRToYRow_Any_NEON*;
         ABGRToYRow_C;
         ABGRToYRow_NEON*;
-        Android420ToI420;
+        Android420ToI420*;
         ARGB1555ToARGB;
         ARGB1555ToARGBRow_Any_NEON*;
         ARGB1555ToARGBRow_C;
         ARGB1555ToARGBRow_NEON*;
-        ARGB1555ToI420;
+        ARGB1555ToI420*;
         ARGB1555ToUVRow_Any_NEON*;
         ARGB1555ToUVRow_C;
         ARGB1555ToUVRow_NEON*;
@@ -31,7 +31,7 @@
         ARGB4444ToARGBRow_Any_NEON*;
         ARGB4444ToARGBRow_C;
         ARGB4444ToARGBRow_NEON*;
-        ARGB4444ToI420;
+        ARGB4444ToI420*;
         ARGB4444ToUVRow_Any_NEON*;
         ARGB4444ToUVRow_C;
         ARGB4444ToUVRow_NEON*;
@@ -115,7 +115,7 @@
         ARGBToARGB4444Row_C;
         ARGBToARGB4444Row_NEON*;
         ARGBToBGRA;
-        ARGBToI420;
+        ARGBToI420*;
         ARGBToRAWRow_Any_NEON*;
         ARGBToRAWRow_C;
         ARGBToRAWRow_NEON*;
@@ -147,7 +147,7 @@
         ARGBUnattenuateRow_C;
         ArmCpuCaps*;
         BGRAToARGB;
-        BGRAToI420;
+        BGRAToI420*;
         BGRAToUVRow_Any_NEON*;
         BGRAToUVRow_C;
         BGRAToUVRow_NEON*;
@@ -156,9 +156,9 @@
         BGRAToYRow_NEON*;
         BlendPlane;
         BlendPlaneRow_C;
-        CanonicalFourCC;
+        CanonicalFourCC*;
         ComputeCumulativeSumRow_C;
-        ConvertFromI420;
+        ConvertFromI420*;
         CopyPlane;
         CopyPlane_16;
         CopyRow_16_C;
@@ -182,40 +182,40 @@
         HalfFloatRow_Any_NEON*;
         HalfFloatRow_C;
         HalfFloatRow_NEON*;
-        I400Copy;
+        I400Copy*;
         I400Mirror;
         I400ToARGB;
         I400ToARGBRow_Any_NEON*;
         I400ToARGBRow_C;
         I400ToARGBRow_NEON*;
         I400ToI400;
-        I400ToI420;
+        I400ToI420*;
         I420AlphaToABGR;
         I420AlphaToARGB;
         I420Blend;
-        I420Copy;
+        I420Copy*;
         I420Interpolate;
         I420Mirror;
         I420Rect;
-        I420Scale;
-        I420Scale_16;
+        I420Scale*;
+        I420Scale_16*;
         I420ToABGR;
         I420ToARGB;
         I420ToARGB1555;
         I420ToARGB4444;
         I420ToBGRA;
         I420ToI400;
-        I420ToI422;
-        I420ToI444;
-        I420ToNV12;
-        I420ToNV21;
+        I420ToI422*;
+        I420ToI444*;
+        I420ToNV12*;
+        I420ToNV21*;
         I420ToRAW;
         I420ToRGB24;
         I420ToRGB565;
         I420ToRGB565Dither;
         I420ToRGBA;
-        I420ToUYVY;
-        I420ToYUY2;
+        I420ToUYVY*;
+        I420ToYUY2*;
         I422AlphaToARGBRow_Any_NEON*;
         I422AlphaToARGBRow_C;
         I422AlphaToARGBRow_NEON*;
@@ -232,7 +232,7 @@
         I422ToARGBRow_C;
         I422ToARGBRow_NEON*;
         I422ToBGRA;
-        I422ToI420;
+        I422ToI420*;
         I422ToRGB24Row_Any_NEON*;
         I422ToRGB24Row_C;
         I422ToRGB24Row_NEON*;
@@ -244,11 +244,11 @@
         I422ToRGBARow_Any_NEON*;
         I422ToRGBARow_C;
         I422ToRGBARow_NEON*;
-        I422ToUYVY;
+        I422ToUYVY*;
         I422ToUYVYRow_Any_NEON*;
         I422ToUYVYRow_C;
         I422ToUYVYRow_NEON*;
-        I422ToYUY2;
+        I422ToYUY2*;
         I422ToYUY2Row_Any_NEON*;
         I422ToYUY2Row_C;
         I422ToYUY2Row_NEON*;
@@ -258,7 +258,7 @@
         I444ToARGBRow_Any_NEON*;
         I444ToARGBRow_C;
         I444ToARGBRow_NEON*;
-        I444ToI420;
+        I444ToI420*;
         InitCpuFlags*;
         InterpolatePlane;
         InterpolateRow_16_C;
@@ -280,8 +280,8 @@
         kYvuH709Constants;
         kYvuI601Constants;
         kYvuJPEGConstants;
-        M420ToARGB;
-        M420ToI420;
+        M420ToARGB*;
+        M420ToI420*;
         MaskCpuFlags*;
         MergeUVPlane;
         MergeUVRow_Any_NEON*;
@@ -297,7 +297,7 @@
         NV12ToARGBRow_Any_NEON*;
         NV12ToARGBRow_C;
         NV12ToARGBRow_NEON*;
-        NV12ToI420;
+        NV12ToI420*;
         NV12ToRGB565;
         NV12ToRGB565Row_Any_NEON*;
         NV12ToRGB565Row_C;
@@ -306,12 +306,12 @@
         NV21ToARGBRow_Any_NEON*;
         NV21ToARGBRow_C;
         NV21ToARGBRow_NEON*;
-        NV21ToI420;
+        NV21ToI420*;
         RAWToARGB;
         RAWToARGBRow_Any_NEON*;
         RAWToARGBRow_C;
         RAWToARGBRow_NEON*;
-        RAWToI420;
+        RAWToI420*;
         RAWToRGB24;
         RAWToRGB24Row_Any_NEON*;
         RAWToRGB24Row_C;
@@ -326,7 +326,7 @@
         RGB24ToARGBRow_Any_NEON*;
         RGB24ToARGBRow_C;
         RGB24ToARGBRow_NEON*;
-        RGB24ToI420;
+        RGB24ToI420*;
         RGB24ToUVRow_Any_NEON*;
         RGB24ToUVRow_C;
         RGB24ToUVRow_NEON*;
@@ -337,7 +337,7 @@
         RGB565ToARGBRow_Any_NEON*;
         RGB565ToARGBRow_C;
         RGB565ToARGBRow_NEON*;
-        RGB565ToI420;
+        RGB565ToI420*;
         RGB565ToUVRow_Any_NEON*;
         RGB565ToUVRow_C;
         RGB565ToUVRow_NEON*;
@@ -345,7 +345,7 @@
         RGB565ToYRow_C;
         RGB565ToYRow_NEON*;
         RGBAToARGB;
-        RGBAToI420;
+        RGBAToI420*;
         RGBAToUVRow_Any_NEON*;
         RGBAToUVRow_C;
         RGBAToUVRow_NEON*;
@@ -355,7 +355,7 @@
         RGBColorMatrix;
         RGBColorTable;
         RGBColorTableRow_C;
-        Scale;
+        Scale*;
         ScaleAddRow_16_C;
         ScaleAddRow_C;
         ScaleAddRows_NEON*;
@@ -395,12 +395,12 @@
         ScaleFilterCols_NEON*;
         ScaleFilterReduce;
         ScaleFilterRows_NEON*;
-        ScalePlane;
-        ScalePlane_16;
-        ScalePlaneBilinearDown;
-        ScalePlaneBilinearDown_16;
-        ScalePlaneBilinearUp;
-        ScalePlaneBilinearUp_16;
+        ScalePlane*;
+        ScalePlane_16*;
+        ScalePlaneBilinearDown*;
+        ScalePlaneBilinearDown_16*;
+        ScalePlaneBilinearUp*;
+        ScalePlaneBilinearUp_16*;
         ScalePlaneVertical;
         ScalePlaneVertical_16;
         ScaleRowDown2_16_C;
@@ -475,7 +475,7 @@
         UYVYToARGBRow_Any_NEON*;
         UYVYToARGBRow_C;
         UYVYToARGBRow_NEON*;
-        UYVYToI420;
+        UYVYToI420*;
         UYVYToI422;
         UYVYToNV12;
         UYVYToUV422Row_Any_NEON*;
@@ -491,7 +491,7 @@
         YUY2ToARGBRow_Any_NEON*;
         YUY2ToARGBRow_C;
         YUY2ToARGBRow_NEON*;
-        YUY2ToI420;
+        YUY2ToI420*;
         YUY2ToI422;
         YUY2ToNV12;
         YUY2ToUV422Row_Any_NEON*;
diff --git a/media/libstagefright/filters/Android.bp b/media/libstagefright/filters/Android.bp
deleted file mode 100644
index e6d59ad..0000000
--- a/media/libstagefright/filters/Android.bp
+++ /dev/null
@@ -1,52 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_static {
-    name: "libstagefright_mediafilter",
-
-    srcs: [
-        "ColorConvert.cpp",
-        "GraphicBufferListener.cpp",
-        "IntrinsicBlurFilter.cpp",
-        "MediaFilter.cpp",
-        "RSFilter.cpp",
-        "SaturationFilter.cpp",
-        "saturationARGB.rscript",
-        "SimpleFilter.cpp",
-        "ZeroFilter.cpp",
-    ],
-
-    export_include_dirs: [
-        "include",
-    ],
-
-    local_include_dirs: [
-        "include/filters",
-    ],
-
-    cflags: [
-        "-Wno-multichar",
-        "-Werror",
-        "-Wall",
-    ],
-
-    header_libs: [
-        "libmediadrm_headers",
-    ],
-
-    shared_libs: [
-        "libgui",
-        "libmedia",
-        "libhidlmemory",
-    ],
-
-    sanitize: {
-        cfi: true,
-    },
-}
diff --git a/media/libstagefright/filters/ColorConvert.cpp b/media/libstagefright/filters/ColorConvert.cpp
deleted file mode 100644
index a8d5dd2..0000000
--- a/media/libstagefright/filters/ColorConvert.cpp
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ColorConvert.h"
-
-#ifndef max
-#define max(a,b) ((a) > (b) ? (a) : (b))
-#endif
-#ifndef min
-#define min(a,b) ((a) < (b) ? (a) : (b))
-#endif
-
-namespace android {
-
-void YUVToRGB(
-        int32_t y, int32_t u, int32_t v,
-        int32_t* r, int32_t* g, int32_t* b) {
-    y -= 16;
-    u -= 128;
-    v -= 128;
-
-    *b = 1192 * y + 2066 * u;
-    *g = 1192 * y - 833 * v - 400 * u;
-    *r = 1192 * y + 1634 * v;
-
-    *r = min(262143, max(0, *r));
-    *g = min(262143, max(0, *g));
-    *b = min(262143, max(0, *b));
-
-    *r >>= 10;
-    *g >>= 10;
-    *b >>= 10;
-}
-
-void convertYUV420spToARGB(
-        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
-        uint8_t *dest) {
-    const int32_t bytes_per_pixel = 2;
-
-    for (int32_t i = 0; i < height; i++) {
-        for (int32_t j = 0; j < width; j++) {
-            int32_t y = *(pY + i * width + j);
-            int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
-            int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
-
-            int32_t r, g, b;
-            YUVToRGB(y, u, v, &r, &g, &b);
-
-            *dest++ = 0xFF;
-            *dest++ = r;
-            *dest++ = g;
-            *dest++ = b;
-        }
-    }
-}
-
-void convertYUV420spToRGB888(
-        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
-        uint8_t *dest) {
-    const int32_t bytes_per_pixel = 2;
-
-    for (int32_t i = 0; i < height; i++) {
-        for (int32_t j = 0; j < width; j++) {
-            int32_t y = *(pY + i * width + j);
-            int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
-            int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
-
-            int32_t r, g, b;
-            YUVToRGB(y, u, v, &r, &g, &b);
-
-            *dest++ = r;
-            *dest++ = g;
-            *dest++ = b;
-        }
-    }
-}
-
-// HACK - not even slightly optimized
-// TODO: remove when RGBA support is added to SoftwareRenderer
-void convertRGBAToARGB(
-        uint8_t *src, int32_t width, int32_t height, uint32_t stride,
-        uint8_t *dest) {
-    for (int32_t i = 0; i < height; ++i) {
-        for (int32_t j = 0; j < width; ++j) {
-            uint8_t r = *src++;
-            uint8_t g = *src++;
-            uint8_t b = *src++;
-            uint8_t a = *src++;
-            *dest++ = a;
-            *dest++ = r;
-            *dest++ = g;
-            *dest++ = b;
-        }
-        src += (stride - width) * 4;
-    }
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp
deleted file mode 100644
index db061c1..0000000
--- a/media/libstagefright/filters/GraphicBufferListener.cpp
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "GraphicBufferListener"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include <gui/BufferItem.h>
-#include <utils/String8.h>
-
-#include "GraphicBufferListener.h"
-
-namespace android {
-
-status_t GraphicBufferListener::init(
-        const sp<AMessage> &notify,
-        size_t bufferWidth, size_t bufferHeight, size_t bufferCount) {
-    mNotify = notify;
-
-    String8 name("GraphicBufferListener");
-    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-    mConsumer->setConsumerName(name);
-    mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
-    mConsumer->setConsumerUsageBits(GRALLOC_USAGE_SW_READ_OFTEN);
-
-    status_t err = mConsumer->setMaxAcquiredBufferCount(bufferCount);
-    if (err != NO_ERROR) {
-        ALOGE("Unable to set BQ max acquired buffer count to %zu: %d",
-                bufferCount, err);
-        return err;
-    }
-
-    wp<BufferQueue::ConsumerListener> listener =
-        static_cast<BufferQueue::ConsumerListener*>(this);
-    sp<BufferQueue::ProxyConsumerListener> proxy =
-        new BufferQueue::ProxyConsumerListener(listener);
-
-    err = mConsumer->consumerConnect(proxy, false);
-    if (err != NO_ERROR) {
-        ALOGE("Error connecting to BufferQueue: %s (%d)",
-                strerror(-err), err);
-        return err;
-    }
-
-    ALOGV("init() successful.");
-
-    return OK;
-}
-
-void GraphicBufferListener::onFrameAvailable(const BufferItem& /* item */) {
-    ALOGV("onFrameAvailable() called");
-
-    {
-        Mutex::Autolock autoLock(mMutex);
-        mNumFramesAvailable++;
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    mNotify->setWhat(kWhatFrameAvailable);
-    mNotify->post();
-}
-
-void GraphicBufferListener::onBuffersReleased() {
-    ALOGV("onBuffersReleased() called");
-    // nothing to do
-}
-
-void GraphicBufferListener::onSidebandStreamChanged() {
-    ALOGW("GraphicBufferListener cannot consume sideband streams.");
-    // nothing to do
-}
-
-BufferItem GraphicBufferListener::getBufferItem() {
-    BufferItem item;
-
-    {
-        Mutex::Autolock autoLock(mMutex);
-        if (mNumFramesAvailable <= 0) {
-            ALOGE("getBuffer() called with no frames available");
-            return item;
-        }
-        mNumFramesAvailable--;
-    }
-
-    status_t err = mConsumer->acquireBuffer(&item, 0);
-    if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
-        // shouldn't happen, since we track num frames available
-        ALOGE("frame was not available");
-        item.mSlot = -1;
-        return item;
-    } else if (err != OK) {
-        ALOGE("acquireBuffer returned err=%d", err);
-        item.mSlot = -1;
-        return item;
-    }
-
-    // Wait for it to become available.
-    err = item.mFence->waitForever("GraphicBufferListener::getBufferItem");
-    if (err != OK) {
-        ALOGW("failed to wait for buffer fence: %d", err);
-        // keep going
-    }
-
-    // If this is the first time we're seeing this buffer, add it to our
-    // slot table.
-    if (item.mGraphicBuffer != NULL) {
-        ALOGV("setting mBufferSlot %d", item.mSlot);
-        mBufferSlot[item.mSlot] = item.mGraphicBuffer;
-    }
-
-    return item;
-}
-
-sp<GraphicBuffer> GraphicBufferListener::getBuffer(BufferItem item) {
-    sp<GraphicBuffer> buf;
-    if (item.mSlot < 0 || item.mSlot >= BufferQueue::NUM_BUFFER_SLOTS) {
-        ALOGE("getBuffer() received invalid BufferItem: mSlot==%d", item.mSlot);
-        return buf;
-    }
-
-    buf = mBufferSlot[item.mSlot];
-    CHECK(buf.get() != NULL);
-
-    return buf;
-}
-
-status_t GraphicBufferListener::releaseBuffer(BufferItem item) {
-    if (item.mSlot < 0 || item.mSlot >= BufferQueue::NUM_BUFFER_SLOTS) {
-        ALOGE("getBuffer() received invalid BufferItem: mSlot==%d", item.mSlot);
-        return ERROR_OUT_OF_RANGE;
-    }
-
-    mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber,
-            EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
-
-    return OK;
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.cpp b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
deleted file mode 100644
index e00afd9..0000000
--- a/media/libstagefright/filters/IntrinsicBlurFilter.cpp
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IntrinsicBlurFilter"
-
-#include <utils/Log.h>
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "IntrinsicBlurFilter.h"
-
-namespace android {
-
-status_t IntrinsicBlurFilter::configure(const sp<AMessage> &msg) {
-    status_t err = SimpleFilter::configure(msg);
-    if (err != OK) {
-        return err;
-    }
-
-    if (!msg->findString("cacheDir", &mCacheDir)) {
-        ALOGE("Failed to find cache directory in config message.");
-        return NAME_NOT_FOUND;
-    }
-
-    return OK;
-}
-
-status_t IntrinsicBlurFilter::start() {
-    // TODO: use a single RS context object for entire application
-    mRS = new RSC::RS();
-
-    if (!mRS->init(mCacheDir.c_str())) {
-        ALOGE("Failed to initialize RenderScript context.");
-        return NO_INIT;
-    }
-
-    // 32-bit elements for ARGB8888
-    RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
-
-    RSC::Type::Builder tb(mRS, e);
-    tb.setX(mWidth);
-    tb.setY(mHeight);
-    RSC::sp<const RSC::Type> t = tb.create();
-
-    mAllocIn = RSC::Allocation::createTyped(mRS, t);
-    mAllocOut = RSC::Allocation::createTyped(mRS, t);
-
-    mBlur = RSC::ScriptIntrinsicBlur::create(mRS, e);
-    mBlur->setRadius(mBlurRadius);
-    mBlur->setInput(mAllocIn);
-
-    return OK;
-}
-
-void IntrinsicBlurFilter::reset() {
-    mBlur.clear();
-    mAllocOut.clear();
-    mAllocIn.clear();
-    mRS.clear();
-}
-
-status_t IntrinsicBlurFilter::setParameters(const sp<AMessage> &msg) {
-    sp<AMessage> params;
-    CHECK(msg->findMessage("params", &params));
-
-    float blurRadius;
-    if (params->findFloat("blur-radius", &blurRadius)) {
-        mBlurRadius = blurRadius;
-    }
-
-    return OK;
-}
-
-status_t IntrinsicBlurFilter::processBuffers(
-        const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
-    mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
-    mBlur->forEach(mAllocOut);
-    mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
-
-    return OK;
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp
deleted file mode 100644
index c7baa73..0000000
--- a/media/libstagefright/filters/MediaFilter.cpp
+++ /dev/null
@@ -1,840 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaFilter"
-
-#include <inttypes.h>
-#include <utils/Trace.h>
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include <media/stagefright/BufferProducerWrapper.h>
-#include <media/stagefright/MediaCodecConstants.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaFilter.h>
-
-#include <media/MediaCodecBuffer.h>
-
-#include <gui/BufferItem.h>
-
-#include "ColorConvert.h"
-#include "GraphicBufferListener.h"
-#include "IntrinsicBlurFilter.h"
-#include "RSFilter.h"
-#include "SaturationFilter.h"
-#include "ZeroFilter.h"
-
-namespace android {
-
-class MediaFilter::BufferChannel : public BufferChannelBase {
-public:
-    BufferChannel(const sp<AMessage> &in, const sp<AMessage> &out)
-        : mInputBufferFilled(in), mOutputBufferDrained(out) {
-    }
-
-    ~BufferChannel() override = default;
-
-    // BufferChannelBase
-
-    status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override {
-        sp<AMessage> msg = mInputBufferFilled->dup();
-        msg->setObject("buffer", buffer);
-        msg->post();
-        return OK;
-    }
-
-    status_t queueSecureInputBuffer(
-            const sp<MediaCodecBuffer> &,
-            bool,
-            const uint8_t *,
-            const uint8_t *,
-            CryptoPlugin::Mode,
-            CryptoPlugin::Pattern,
-            const CryptoPlugin::SubSample *,
-            size_t,
-            AString *) override {
-        return INVALID_OPERATION;
-    }
-
-    status_t renderOutputBuffer(
-            const sp<MediaCodecBuffer> &buffer, int64_t /* timestampNs */) override {
-        sp<AMessage> msg = mOutputBufferDrained->dup();
-        msg->setObject("buffer", buffer);
-        msg->post();
-        return OK;
-    }
-
-    status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override {
-        if (FindBufferIndex(&mInputBuffers, buffer) >= 0) {
-            sp<AMessage> msg = mInputBufferFilled->dup();
-            msg->setObject("buffer", buffer);
-            msg->post();
-            return OK;
-        }
-        sp<AMessage> msg = mOutputBufferDrained->dup();
-        msg->setObject("buffer", buffer);
-        msg->post();
-        return OK;
-    }
-
-    void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
-        if (!array) {
-            return;
-        }
-        array->clear();
-        array->appendVector(mInputBuffers);
-    }
-
-    void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
-        if (!array) {
-            return;
-        }
-        array->clear();
-        array->appendVector(mOutputBuffers);
-    }
-
-    // For MediaFilter
-
-    void fillThisBuffer(const sp<MediaCodecBuffer> &buffer) {
-        ssize_t index = FindBufferIndex(&mInputBuffers, buffer);
-        mCallback->onInputBufferAvailable(index, buffer);
-    }
-
-    void drainThisBuffer(const sp<MediaCodecBuffer> &buffer, int flags) {
-        ssize_t index = FindBufferIndex(&mOutputBuffers, buffer);
-        buffer->meta()->setInt32("flags", flags);
-        mCallback->onOutputBufferAvailable(index, buffer);
-    }
-
-    template <class T>
-    void setInputBuffers(T begin, T end) {
-        mInputBuffers.clear();
-        for (T it = begin; it != end; ++it) {
-            mInputBuffers.push_back(it->mData);
-        }
-    }
-
-    template <class T>
-    void setOutputBuffers(T begin, T end) {
-        mOutputBuffers.clear();
-        for (T it = begin; it != end; ++it) {
-            mOutputBuffers.push_back(it->mData);
-        }
-    }
-
-private:
-    sp<AMessage> mInputBufferFilled;
-    sp<AMessage> mOutputBufferDrained;
-    Vector<sp<MediaCodecBuffer>> mInputBuffers;
-    Vector<sp<MediaCodecBuffer>> mOutputBuffers;
-
-    static ssize_t FindBufferIndex(
-            Vector<sp<MediaCodecBuffer>> *array, const sp<MediaCodecBuffer> &buffer) {
-        for (size_t i = 0; i < array->size(); ++i) {
-            if (array->itemAt(i) == buffer) {
-                return i;
-            }
-        }
-        return -1;
-    }
-};
-
-// parameter: number of input and output buffers
-static const size_t kBufferCountActual = 4;
-
-MediaFilter::MediaFilter()
-    : mState(UNINITIALIZED),
-      mGeneration(0),
-      mGraphicBufferListener(NULL) {
-}
-
-MediaFilter::~MediaFilter() {
-}
-
-//////////////////// PUBLIC FUNCTIONS //////////////////////////////////////////
-
-std::shared_ptr<BufferChannelBase> MediaFilter::getBufferChannel() {
-    if (!mBufferChannel) {
-        mBufferChannel = std::make_shared<BufferChannel>(
-                new AMessage(kWhatInputBufferFilled, this),
-                new AMessage(kWhatOutputBufferDrained, this));
-    }
-    return mBufferChannel;
-}
-
-void MediaFilter::initiateAllocateComponent(const sp<AMessage> &msg) {
-    msg->setWhat(kWhatAllocateComponent);
-    msg->setTarget(this);
-    msg->post();
-}
-
-void MediaFilter::initiateConfigureComponent(const sp<AMessage> &msg) {
-    msg->setWhat(kWhatConfigureComponent);
-    msg->setTarget(this);
-    msg->post();
-}
-
-void MediaFilter::initiateCreateInputSurface() {
-    (new AMessage(kWhatCreateInputSurface, this))->post();
-}
-
-void MediaFilter::initiateSetInputSurface(
-        const sp<PersistentSurface> & /* surface */) {
-    ALOGW("initiateSetInputSurface() unsupported");
-}
-
-void MediaFilter::initiateStart() {
-    (new AMessage(kWhatStart, this))->post();
-}
-
-void MediaFilter::initiateShutdown(bool keepComponentAllocated) {
-    sp<AMessage> msg = new AMessage(kWhatShutdown, this);
-    msg->setInt32("keepComponentAllocated", keepComponentAllocated);
-    msg->post();
-}
-
-void MediaFilter::signalFlush() {
-    (new AMessage(kWhatFlush, this))->post();
-}
-
-void MediaFilter::signalResume() {
-    (new AMessage(kWhatResume, this))->post();
-}
-
-// nothing to do
-void MediaFilter::signalRequestIDRFrame() {
-    return;
-}
-
-void MediaFilter::signalSetParameters(const sp<AMessage> &params) {
-    sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
-    msg->setMessage("params", params);
-    msg->post();
-}
-
-void MediaFilter::signalEndOfInputStream() {
-    (new AMessage(kWhatSignalEndOfInputStream, this))->post();
-}
-
-void MediaFilter::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatAllocateComponent:
-        {
-            onAllocateComponent(msg);
-            break;
-        }
-        case kWhatConfigureComponent:
-        {
-            onConfigureComponent(msg);
-            break;
-        }
-        case kWhatStart:
-        {
-            onStart();
-            break;
-        }
-        case kWhatProcessBuffers:
-        {
-            processBuffers();
-            break;
-        }
-        case kWhatInputBufferFilled:
-        {
-            onInputBufferFilled(msg);
-            break;
-        }
-        case kWhatOutputBufferDrained:
-        {
-            onOutputBufferDrained(msg);
-            break;
-        }
-        case kWhatShutdown:
-        {
-            onShutdown(msg);
-            break;
-        }
-        case kWhatFlush:
-        {
-            onFlush();
-            break;
-        }
-        case kWhatResume:
-        {
-            // nothing to do
-            break;
-        }
-        case kWhatSetParameters:
-        {
-            onSetParameters(msg);
-            break;
-        }
-        case kWhatCreateInputSurface:
-        {
-            onCreateInputSurface();
-            break;
-        }
-        case GraphicBufferListener::kWhatFrameAvailable:
-        {
-            onInputFrameAvailable();
-            break;
-        }
-        case kWhatSignalEndOfInputStream:
-        {
-            onSignalEndOfInputStream();
-            break;
-        }
-        default:
-        {
-            ALOGE("Message not handled:\n%s", msg->debugString().c_str());
-            break;
-        }
-    }
-}
-
-//////////////////// HELPER FUNCTIONS //////////////////////////////////////////
-
-void MediaFilter::signalProcessBuffers() {
-    (new AMessage(kWhatProcessBuffers, this))->post();
-}
-
-void MediaFilter::signalError(status_t error) {
-    mCallback->onError(error, ACTION_CODE_FATAL);
-}
-
-status_t MediaFilter::allocateBuffersOnPort(OMX_U32 portIndex) {
-    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
-    const bool isInput = portIndex == kPortIndexInput;
-    const size_t bufferSize = isInput ? mMaxInputSize : mMaxOutputSize;
-
-    CHECK(mBuffers[portIndex].isEmpty());
-
-    ALOGV("Allocating %zu buffers of size %zu on %s port",
-            kBufferCountActual, bufferSize,
-            isInput ? "input" : "output");
-
-    // trigger output format change
-    sp<AMessage> outputFormat = mOutputFormat->dup();
-    for (size_t i = 0; i < kBufferCountActual; ++i) {
-        BufferInfo info;
-        info.mStatus = BufferInfo::OWNED_BY_US;
-        info.mBufferID = i;
-        info.mGeneration = mGeneration;
-        info.mOutputFlags = 0;
-        info.mData = new MediaCodecBuffer(
-                isInput ? mInputFormat : outputFormat,
-                new ABuffer(bufferSize));
-        info.mData->meta()->setInt64("timeUs", 0);
-
-        mBuffers[portIndex].push_back(info);
-
-        if (!isInput) {
-            mAvailableOutputBuffers.push(
-                    &mBuffers[portIndex].editItemAt(i));
-        }
-    }
-    if (isInput) {
-        mBufferChannel->setInputBuffers(
-                mBuffers[portIndex].begin(), mBuffers[portIndex].end());
-    } else {
-        mBufferChannel->setOutputBuffers(
-                mBuffers[portIndex].begin(), mBuffers[portIndex].end());
-    }
-
-    return OK;
-}
-
-MediaFilter::BufferInfo* MediaFilter::findBuffer(
-        uint32_t portIndex, const sp<MediaCodecBuffer> &buffer,
-        ssize_t *index) {
-    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
-        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
-
-        if (info->mData == buffer) {
-            if (index != NULL) {
-                *index = i;
-            }
-            return info;
-        }
-    }
-
-    TRESPASS();
-
-    return NULL;
-}
-
-void MediaFilter::postFillThisBuffer(BufferInfo *info) {
-    ALOGV("postFillThisBuffer on buffer %d", info->mBufferID);
-    if (mPortEOS[kPortIndexInput]) {
-        return;
-    }
-
-    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
-
-    info->mGeneration = mGeneration;
-
-    info->mData->meta()->clear();
-
-    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, this);
-    reply->setInt32("buffer-id", info->mBufferID);
-
-    info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
-
-    mBufferChannel->fillThisBuffer(info->mData);
-}
-
-void MediaFilter::postDrainThisBuffer(BufferInfo *info) {
-    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
-
-    info->mGeneration = mGeneration;
-
-    sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, this);
-    reply->setInt32("buffer-id", info->mBufferID);
-
-    mBufferChannel->drainThisBuffer(info->mData, info->mOutputFlags);
-
-    info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
-}
-
-void MediaFilter::postEOS() {
-    mCallback->onEos(ERROR_END_OF_STREAM);
-
-    ALOGV("Sent kWhatEOS.");
-}
-
-void MediaFilter::requestFillEmptyInput() {
-    if (mPortEOS[kPortIndexInput]) {
-        return;
-    }
-
-    for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
-        BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
-
-        if (info->mStatus == BufferInfo::OWNED_BY_US) {
-            postFillThisBuffer(info);
-        }
-    }
-}
-
-void MediaFilter::processBuffers() {
-    if (mAvailableInputBuffers.empty() || mAvailableOutputBuffers.empty()) {
-        ALOGV("Skipping process (buffers unavailable)");
-        return;
-    }
-
-    if (mPortEOS[kPortIndexOutput]) {
-        // TODO notify caller of queueInput error when it is supported
-        // in MediaCodec
-        ALOGW("Tried to process a buffer after EOS.");
-        return;
-    }
-
-    BufferInfo *inputInfo = mAvailableInputBuffers[0];
-    mAvailableInputBuffers.removeAt(0);
-    BufferInfo *outputInfo = mAvailableOutputBuffers[0];
-    mAvailableOutputBuffers.removeAt(0);
-
-    status_t err;
-    err = mFilter->processBuffers(inputInfo->mData, outputInfo->mData);
-    if (err != (status_t)OK) {
-        outputInfo->mData->meta()->setInt32("err", err);
-    }
-
-    int64_t timeUs;
-    CHECK(inputInfo->mData->meta()->findInt64("timeUs", &timeUs));
-    outputInfo->mData->meta()->setInt64("timeUs", timeUs);
-    outputInfo->mOutputFlags = 0;
-    int32_t eos = 0;
-    if (inputInfo->mData->meta()->findInt32("eos", &eos) && eos != 0) {
-        outputInfo->mOutputFlags |= BUFFER_FLAG_END_OF_STREAM;
-        mPortEOS[kPortIndexOutput] = true;
-        outputInfo->mData->meta()->setInt32("eos", eos);
-        postEOS();
-        ALOGV("Output stream saw EOS.");
-    }
-
-    ALOGV("Processed input buffer %u [%zu], output buffer %u [%zu]",
-                inputInfo->mBufferID, inputInfo->mData->size(),
-                outputInfo->mBufferID, outputInfo->mData->size());
-
-    if (mGraphicBufferListener != NULL) {
-        delete inputInfo;
-    } else {
-        postFillThisBuffer(inputInfo);
-    }
-    postDrainThisBuffer(outputInfo);
-
-    // prevent any corner case where buffers could get stuck in queue
-    signalProcessBuffers();
-}
-
-void MediaFilter::onAllocateComponent(const sp<AMessage> &msg) {
-    CHECK_EQ(mState, UNINITIALIZED);
-
-    CHECK(msg->findString("componentName", &mComponentName));
-    const char* name = mComponentName.c_str();
-    if (!strcasecmp(name, "android.filter.zerofilter")) {
-        mFilter = new ZeroFilter;
-    } else if (!strcasecmp(name, "android.filter.saturation")) {
-        mFilter = new SaturationFilter;
-    } else if (!strcasecmp(name, "android.filter.intrinsicblur")) {
-        mFilter = new IntrinsicBlurFilter;
-    } else if (!strcasecmp(name, "android.filter.RenderScript")) {
-        mFilter = new RSFilter;
-    } else {
-        ALOGE("Unrecognized filter name: %s", name);
-        signalError(NAME_NOT_FOUND);
-        return;
-    }
-
-    mCallback->onComponentAllocated(mComponentName.c_str());
-    mState = INITIALIZED;
-    ALOGV("Handled kWhatAllocateComponent.");
-}
-
-void MediaFilter::onConfigureComponent(const sp<AMessage> &msg) {
-    // TODO: generalize to allow audio filters as well as video
-
-    CHECK_EQ(mState, INITIALIZED);
-
-    // get params - at least mime, width & height
-    AString mime;
-    CHECK(msg->findString("mime", &mime));
-    if (strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) {
-        ALOGE("Bad mime: %s", mime.c_str());
-        signalError(BAD_VALUE);
-        return;
-    }
-
-    CHECK(msg->findInt32("width", &mWidth));
-    CHECK(msg->findInt32("height", &mHeight));
-    if (!msg->findInt32("stride", &mStride)) {
-        mStride = mWidth;
-    }
-    if (!msg->findInt32("slice-height", &mSliceHeight)) {
-        mSliceHeight = mHeight;
-    }
-
-    mMaxInputSize = mWidth * mHeight * 4;   // room for ARGB8888
-    int32_t maxInputSize;
-    if (msg->findInt32("max-input-size", &maxInputSize)
-            && (size_t)maxInputSize > mMaxInputSize) {
-        mMaxInputSize = maxInputSize;
-    }
-
-    if (!msg->findInt32("color-format", &mColorFormatIn)) {
-        // default to OMX_COLOR_Format32bitARGB8888
-        mColorFormatIn = OMX_COLOR_Format32bitARGB8888;
-        msg->setInt32("color-format", mColorFormatIn);
-    }
-    mColorFormatOut = mColorFormatIn;
-
-    mMaxOutputSize = mWidth * mHeight * 4;  // room for ARGB8888
-
-    AString cacheDir;
-    if (!msg->findString("cacheDir", &cacheDir)) {
-        ALOGE("Failed to find cache directory in config message.");
-        signalError(NAME_NOT_FOUND);
-        return;
-    }
-
-    status_t err;
-    err = mFilter->configure(msg);
-    if (err != (status_t)OK) {
-        ALOGE("Failed to configure filter component, err %d", err);
-        signalError(err);
-        return;
-    }
-
-    mInputFormat = new AMessage();
-    mInputFormat->setString("mime", mime.c_str());
-    mInputFormat->setInt32("stride", mStride);
-    mInputFormat->setInt32("slice-height", mSliceHeight);
-    mInputFormat->setInt32("color-format", mColorFormatIn);
-    mInputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
-    mInputFormat->setInt32("width", mWidth);
-    mInputFormat->setInt32("height", mHeight);
-
-    mOutputFormat = new AMessage();
-    mOutputFormat->setString("mime", mime.c_str());
-    mOutputFormat->setInt32("stride", mStride);
-    mOutputFormat->setInt32("slice-height", mSliceHeight);
-    mOutputFormat->setInt32("color-format", mColorFormatOut);
-    mOutputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
-    mOutputFormat->setInt32("width", mWidth);
-    mOutputFormat->setInt32("height", mHeight);
-    mOutputFormat->setInt32("using-sw-renderer", 1);
-
-    mCallback->onComponentConfigured(mInputFormat, mOutputFormat);
-    mState = CONFIGURED;
-    ALOGV("Handled kWhatConfigureComponent.");
-}
-
-void MediaFilter::onStart() {
-    CHECK_EQ(mState, CONFIGURED);
-
-    allocateBuffersOnPort(kPortIndexInput);
-
-    allocateBuffersOnPort(kPortIndexOutput);
-
-    mCallback->onStartCompleted();
-
-    status_t err = mFilter->start();
-    if (err != (status_t)OK) {
-        ALOGE("Failed to start filter component, err %d", err);
-        signalError(err);
-        return;
-    }
-
-    mPortEOS[kPortIndexInput] = false;
-    mPortEOS[kPortIndexOutput] = false;
-    mInputEOSResult = OK;
-    mState = STARTED;
-
-    requestFillEmptyInput();
-    ALOGV("Handled kWhatStart.");
-}
-
-void MediaFilter::onInputBufferFilled(const sp<AMessage> &msg) {
-    sp<RefBase> obj;
-    CHECK(msg->findObject("buffer", &obj));
-    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
-    ssize_t index = -1;
-    BufferInfo *info = findBuffer(kPortIndexInput, buffer, &index);
-
-    if (mState != STARTED) {
-        // we're not running, so we'll just keep that buffer...
-        info->mStatus = BufferInfo::OWNED_BY_US;
-        return;
-    }
-
-    if (info->mGeneration != mGeneration) {
-        ALOGV("Caught a stale input buffer [index %zd]", index);
-        // buffer is stale (taken before a flush/shutdown) - repost it
-        CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
-        postFillThisBuffer(info);
-        return;
-    }
-
-    CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
-    info->mStatus = BufferInfo::OWNED_BY_US;
-
-    int32_t err = OK;
-    bool eos = false;
-
-    int32_t isCSD;
-    if (buffer != NULL && buffer->meta()->findInt32("csd", &isCSD)
-            && isCSD != 0) {
-        // ignore codec-specific data buffers
-        ALOGW("MediaFilter received a codec-specific data buffer");
-        postFillThisBuffer(info);
-        return;
-    }
-
-    int32_t tmp;
-    if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
-        eos = true;
-        err = ERROR_END_OF_STREAM;
-    }
-
-    mAvailableInputBuffers.push_back(info);
-    processBuffers();
-
-    if (eos) {
-        mPortEOS[kPortIndexInput] = true;
-        mInputEOSResult = err;
-    }
-
-    ALOGV("Handled kWhatInputBufferFilled. [index %zd]", index);
-}
-
-void MediaFilter::onOutputBufferDrained(const sp<AMessage> &msg) {
-    sp<RefBase> obj;
-    CHECK(msg->findObject("buffer", &obj));
-    sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
-    ssize_t index = -1;
-    BufferInfo *info = findBuffer(kPortIndexOutput, buffer, &index);
-
-    if (mState != STARTED) {
-        // we're not running, so we'll just keep that buffer...
-        info->mStatus = BufferInfo::OWNED_BY_US;
-        return;
-    }
-
-    if (info->mGeneration != mGeneration) {
-        ALOGV("Caught a stale output buffer [index %zd]", index);
-        // buffer is stale (taken before a flush/shutdown) - keep it
-        CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
-        return;
-    }
-
-    CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
-    info->mStatus = BufferInfo::OWNED_BY_US;
-
-    mAvailableOutputBuffers.push_back(info);
-
-    processBuffers();
-
-    ALOGV("Handled kWhatOutputBufferDrained. [index %zd]", index);
-}
-
-void MediaFilter::onShutdown(const sp<AMessage> &msg) {
-    mGeneration++;
-
-    if (mState != UNINITIALIZED) {
-        mFilter->reset();
-    }
-
-    int32_t keepComponentAllocated;
-    CHECK(msg->findInt32("keepComponentAllocated", &keepComponentAllocated));
-    if (!keepComponentAllocated || mState == UNINITIALIZED) {
-        mState = UNINITIALIZED;
-    } else {
-        mState = INITIALIZED;
-    }
-
-    if (keepComponentAllocated) {
-        mCallback->onStopCompleted();
-    } else {
-        mCallback->onReleaseCompleted();
-    }
-}
-
-void MediaFilter::onFlush() {
-    mGeneration++;
-
-    mAvailableInputBuffers.clear();
-    for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
-        BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
-        info->mStatus = BufferInfo::OWNED_BY_US;
-    }
-    mAvailableOutputBuffers.clear();
-    for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
-        BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
-        info->mStatus = BufferInfo::OWNED_BY_US;
-        mAvailableOutputBuffers.push_back(info);
-    }
-
-    mPortEOS[kPortIndexInput] = false;
-    mPortEOS[kPortIndexOutput] = false;
-    mInputEOSResult = OK;
-
-    mCallback->onFlushCompleted();
-    ALOGV("Posted kWhatFlushCompleted");
-
-    // MediaCodec returns all input buffers after flush, so in
-    // onInputBufferFilled we call postFillThisBuffer on them
-}
-
-void MediaFilter::onSetParameters(const sp<AMessage> &msg) {
-    CHECK(mState != STARTED);
-
-    status_t err = mFilter->setParameters(msg);
-    if (err != (status_t)OK) {
-        ALOGE("setParameters returned err %d", err);
-    }
-}
-
-void MediaFilter::onCreateInputSurface() {
-    CHECK(mState == CONFIGURED);
-
-    mGraphicBufferListener = new GraphicBufferListener;
-
-    sp<AMessage> notify = new AMessage();
-    notify->setTarget(this);
-    status_t err = mGraphicBufferListener->init(
-            notify, mStride, mSliceHeight, kBufferCountActual);
-
-    if (err != OK) {
-        ALOGE("Failed to init mGraphicBufferListener: %d", err);
-        signalError(err);
-        return;
-    }
-
-    mCallback->onInputSurfaceCreated(
-            nullptr, nullptr,
-            new BufferProducerWrapper(
-                    mGraphicBufferListener->getIGraphicBufferProducer()));
-}
-
-void MediaFilter::onInputFrameAvailable() {
-    BufferItem item = mGraphicBufferListener->getBufferItem();
-    sp<GraphicBuffer> buf = mGraphicBufferListener->getBuffer(item);
-
-    // get pointer to graphic buffer
-    void* bufPtr;
-    buf->lock(GraphicBuffer::USAGE_SW_READ_OFTEN, &bufPtr);
-
-    // HACK - there is no OMX_COLOR_FORMATTYPE value for RGBA, so the format
-    // conversion is hardcoded until we add this.
-    // TODO: check input format and convert only if necessary
-    // copy RGBA graphic buffer into temporary ARGB input buffer
-    BufferInfo *inputInfo = new BufferInfo;
-    inputInfo->mData = new MediaCodecBuffer(
-            mInputFormat, new ABuffer(buf->getWidth() * buf->getHeight() * 4));
-    ALOGV("Copying surface data into temp buffer.");
-    convertRGBAToARGB(
-            (uint8_t*)bufPtr, buf->getWidth(), buf->getHeight(),
-            buf->getStride(), inputInfo->mData->data());
-    inputInfo->mBufferID = item.mSlot;
-    inputInfo->mGeneration = mGeneration;
-    inputInfo->mOutputFlags = 0;
-    inputInfo->mStatus = BufferInfo::OWNED_BY_US;
-    inputInfo->mData->meta()->setInt64("timeUs", item.mTimestamp / 1000);
-
-    mAvailableInputBuffers.push_back(inputInfo);
-
-    mGraphicBufferListener->releaseBuffer(item);
-
-    signalProcessBuffers();
-}
-
-void MediaFilter::onSignalEndOfInputStream() {
-    // if using input surface, need to send an EOS output buffer
-    if (mGraphicBufferListener != NULL) {
-        Vector<BufferInfo> *outputBufs = &mBuffers[kPortIndexOutput];
-        BufferInfo* eosBuf;
-        bool foundBuf = false;
-        for (size_t i = 0; i < kBufferCountActual; i++) {
-            eosBuf = &outputBufs->editItemAt(i);
-            if (eosBuf->mStatus == BufferInfo::OWNED_BY_US) {
-                foundBuf = true;
-                break;
-            }
-        }
-
-        if (!foundBuf) {
-            ALOGE("onSignalEndOfInputStream failed to find an output buffer");
-            return;
-        }
-
-        eosBuf->mOutputFlags = BUFFER_FLAG_END_OF_STREAM;
-        eosBuf->mGeneration = mGeneration;
-        eosBuf->mData->setRange(0, 0);
-        postDrainThisBuffer(eosBuf);
-        ALOGV("Posted EOS on output buffer %u", eosBuf->mBufferID);
-    }
-
-    mPortEOS[kPortIndexOutput] = true;
-    mCallback->onSignaledInputEOS(OK);
-
-    ALOGV("Output stream saw EOS.");
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/RSFilter.cpp b/media/libstagefright/filters/RSFilter.cpp
deleted file mode 100644
index 225a375..0000000
--- a/media/libstagefright/filters/RSFilter.cpp
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RSFilter"
-
-#include <utils/Log.h>
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "RSFilter.h"
-
-namespace android {
-
-RSFilter::RSFilter() {
-
-}
-
-RSFilter::~RSFilter() {
-
-}
-
-status_t RSFilter::configure(const sp<AMessage> &msg) {
-    status_t err = SimpleFilter::configure(msg);
-    if (err != OK) {
-        return err;
-    }
-
-    if (!msg->findString("cacheDir", &mCacheDir)) {
-        ALOGE("Failed to find cache directory in config message.");
-        return NAME_NOT_FOUND;
-    }
-
-    sp<RenderScriptWrapper> wrapper;
-    if (!msg->findObject("rs-wrapper", (sp<RefBase>*)&wrapper)) {
-        ALOGE("Failed to find RenderScriptWrapper in config message.");
-        return NAME_NOT_FOUND;
-    }
-
-    mRS = wrapper->mContext;
-    mCallback = wrapper->mCallback;
-
-    return OK;
-}
-
-status_t RSFilter::start() {
-    // 32-bit elements for ARGB8888
-    RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
-
-    RSC::Type::Builder tb(mRS, e);
-    tb.setX(mWidth);
-    tb.setY(mHeight);
-    RSC::sp<const RSC::Type> t = tb.create();
-
-    mAllocIn = RSC::Allocation::createTyped(mRS, t);
-    mAllocOut = RSC::Allocation::createTyped(mRS, t);
-
-    return OK;
-}
-
-void RSFilter::reset() {
-    mCallback.clear();
-    mAllocOut.clear();
-    mAllocIn.clear();
-    mRS.clear();
-}
-
-status_t RSFilter::setParameters(const sp<AMessage> &msg) {
-    return mCallback->handleSetParameters(msg);
-}
-
-status_t RSFilter::processBuffers(
-        const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
-    mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
-    mCallback->processBuffers(mAllocIn.get(), mAllocOut.get());
-    mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
-
-    return OK;
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/SaturationFilter.cpp b/media/libstagefright/filters/SaturationFilter.cpp
deleted file mode 100644
index 0a1df05..0000000
--- a/media/libstagefright/filters/SaturationFilter.cpp
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SaturationFilter"
-
-#include <utils/Log.h>
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "SaturationFilter.h"
-
-namespace android {
-
-status_t SaturationFilter::configure(const sp<AMessage> &msg) {
-    status_t err = SimpleFilter::configure(msg);
-    if (err != OK) {
-        return err;
-    }
-
-    if (!msg->findString("cacheDir", &mCacheDir)) {
-        ALOGE("Failed to find cache directory in config message.");
-        return NAME_NOT_FOUND;
-    }
-
-    return OK;
-}
-
-status_t SaturationFilter::start() {
-    // TODO: use a single RS context object for entire application
-    mRS = new RSC::RS();
-
-    if (!mRS->init(mCacheDir.c_str())) {
-        ALOGE("Failed to initialize RenderScript context.");
-        return NO_INIT;
-    }
-
-    // 32-bit elements for ARGB8888
-    RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
-
-    RSC::Type::Builder tb(mRS, e);
-    tb.setX(mWidth);
-    tb.setY(mHeight);
-    RSC::sp<const RSC::Type> t = tb.create();
-
-    mAllocIn = RSC::Allocation::createTyped(mRS, t);
-    mAllocOut = RSC::Allocation::createTyped(mRS, t);
-
-    mScript = new ScriptC_saturationARGB(mRS);
-
-    mScript->set_gSaturation(mSaturation);
-
-    return OK;
-}
-
-void SaturationFilter::reset() {
-    mScript.clear();
-    mAllocOut.clear();
-    mAllocIn.clear();
-    mRS.clear();
-}
-
-status_t SaturationFilter::setParameters(const sp<AMessage> &msg) {
-    sp<AMessage> params;
-    CHECK(msg->findMessage("params", &params));
-
-    float saturation;
-    if (params->findFloat("saturation", &saturation)) {
-        mSaturation = saturation;
-    }
-
-    return OK;
-}
-
-status_t SaturationFilter::processBuffers(
-        const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
-    mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
-    mScript->forEach_root(mAllocIn, mAllocOut);
-    mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
-
-    return OK;
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/SimpleFilter.cpp b/media/libstagefright/filters/SimpleFilter.cpp
deleted file mode 100644
index 6c1ca2c..0000000
--- a/media/libstagefright/filters/SimpleFilter.cpp
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "SimpleFilter.h"
-
-namespace android {
-
-status_t SimpleFilter::configure(const sp<AMessage> &msg) {
-    CHECK(msg->findInt32("width", &mWidth));
-    CHECK(msg->findInt32("height", &mHeight));
-    if (!msg->findInt32("stride", &mStride)) {
-        mStride = mWidth;
-    }
-    if (!msg->findInt32("slice-height", &mSliceHeight)) {
-        mSliceHeight = mHeight;
-    }
-    CHECK(msg->findInt32("color-format", &mColorFormatIn));
-    mColorFormatOut = mColorFormatIn;
-
-    return OK;
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/ZeroFilter.cpp b/media/libstagefright/filters/ZeroFilter.cpp
deleted file mode 100644
index 74b94b7..0000000
--- a/media/libstagefright/filters/ZeroFilter.cpp
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ZeroFilter"
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "ZeroFilter.h"
-
-namespace android {
-
-status_t ZeroFilter::setParameters(const sp<AMessage> &msg) {
-    sp<AMessage> params;
-    CHECK(msg->findMessage("params", &params));
-
-    int32_t invert;
-    if (params->findInt32("invert", &invert)) {
-        mInvertData = (invert != 0);
-    }
-
-    return OK;
-}
-
-status_t ZeroFilter::processBuffers(
-        const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
-    // assuming identical input & output buffers, since we're a copy filter
-    if (mInvertData) {
-        uint32_t* src = (uint32_t*)srcBuffer->data();
-        uint32_t* dest = (uint32_t*)outBuffer->data();
-        for (size_t i = 0; i < srcBuffer->size() / 4; ++i) {
-            *(dest++) = *(src++) ^ 0xFFFFFFFF;
-        }
-    } else {
-        memcpy(outBuffer->data(), srcBuffer->data(), srcBuffer->size());
-    }
-    outBuffer->setRange(0, srcBuffer->size());
-
-    return OK;
-}
-
-}   // namespace android
diff --git a/media/libstagefright/filters/include/filters/ColorConvert.h b/media/libstagefright/filters/include/filters/ColorConvert.h
deleted file mode 100644
index 13faa02..0000000
--- a/media/libstagefright/filters/include/filters/ColorConvert.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef COLOR_CONVERT_H_
-#define COLOR_CONVERT_H_
-
-#include <inttypes.h>
-
-namespace android {
-
-void YUVToRGB(
-        int32_t y, int32_t u, int32_t v,
-        int32_t* r, int32_t* g, int32_t* b);
-
-void convertYUV420spToARGB(
-        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
-        uint8_t *dest);
-
-void convertYUV420spToRGB888(
-        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
-        uint8_t *dest);
-
-// TODO: remove when RGBA support is added to SoftwareRenderer
-void convertRGBAToARGB(
-        uint8_t *src, int32_t width, int32_t height, uint32_t stride,
-        uint8_t *dest);
-
-}   // namespace android
-
-#endif  // COLOR_CONVERT_H_
diff --git a/media/libstagefright/filters/include/filters/GraphicBufferListener.h b/media/libstagefright/filters/include/filters/GraphicBufferListener.h
deleted file mode 100644
index 586bf65..0000000
--- a/media/libstagefright/filters/include/filters/GraphicBufferListener.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef GRAPHIC_BUFFER_LISTENER_H_
-#define GRAPHIC_BUFFER_LISTENER_H_
-
-#include <gui/BufferQueue.h>
-
-namespace android {
-
-struct AMessage;
-
-struct GraphicBufferListener : public BufferQueue::ConsumerListener {
-public:
-    GraphicBufferListener() {};
-
-    status_t init(
-            const sp<AMessage> &notify,
-            size_t bufferWidth, size_t bufferHeight, size_t bufferCount);
-
-    virtual void onFrameAvailable(const BufferItem& item);
-    virtual void onBuffersReleased();
-    virtual void onSidebandStreamChanged();
-
-    // Returns the handle to the producer side of the BufferQueue.  Buffers
-    // queued on this will be received by GraphicBufferListener.
-    sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
-        return mProducer;
-    }
-
-    BufferItem getBufferItem();
-    sp<GraphicBuffer> getBuffer(BufferItem item);
-    status_t releaseBuffer(BufferItem item);
-
-    enum {
-        kWhatFrameAvailable = 'frav',
-    };
-
-private:
-    sp<AMessage> mNotify;
-    size_t mNumFramesAvailable;
-
-    mutable Mutex mMutex;
-
-    // Our BufferQueue interfaces. mProducer is passed to the producer through
-    // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
-    // the buffers queued by the producer.
-    sp<IGraphicBufferProducer> mProducer;
-    sp<IGraphicBufferConsumer> mConsumer;
-
-    // Cache of GraphicBuffers from the buffer queue.
-    sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS];
-};
-
-}   // namespace android
-
-#endif  // GRAPHIC_BUFFER_LISTENER_H
diff --git a/media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h
deleted file mode 100644
index a2aabfa..0000000
--- a/media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef INTRINSIC_BLUR_FILTER_H_
-#define INTRINSIC_BLUR_FILTER_H_
-
-#include "RenderScript.h"
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct IntrinsicBlurFilter : public SimpleFilter {
-public:
-    IntrinsicBlurFilter() : mBlurRadius(1.f) {};
-
-    virtual status_t configure(const sp<AMessage> &msg);
-    virtual status_t start();
-    virtual void reset();
-    virtual status_t setParameters(const sp<AMessage> &msg);
-    virtual status_t processBuffers(
-            const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
-    virtual ~IntrinsicBlurFilter() {};
-
-private:
-    AString mCacheDir;
-    RSC::sp<RSC::RS> mRS;
-    RSC::sp<RSC::Allocation> mAllocIn;
-    RSC::sp<RSC::Allocation> mAllocOut;
-    RSC::sp<RSC::ScriptIntrinsicBlur> mBlur;
-    float mBlurRadius;
-};
-
-}   // namespace android
-
-#endif  // INTRINSIC_BLUR_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/RSFilter.h b/media/libstagefright/filters/include/filters/RSFilter.h
deleted file mode 100644
index 3326284..0000000
--- a/media/libstagefright/filters/include/filters/RSFilter.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RS_FILTER_H_
-#define RS_FILTER_H_
-
-#include <media/stagefright/RenderScriptWrapper.h>
-#include <RenderScript.h>
-
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct AString;
-
-struct RSFilter : public SimpleFilter {
-public:
-    RSFilter();
-
-    virtual status_t configure(const sp<AMessage> &msg);
-    virtual status_t start();
-    virtual void reset();
-    virtual status_t setParameters(const sp<AMessage> &msg);
-    virtual status_t processBuffers(
-            const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
-    virtual ~RSFilter();
-
-private:
-    AString mCacheDir;
-    sp<RenderScriptWrapper::RSFilterCallback> mCallback;
-    RSC::sp<RSC::RS> mRS;
-    RSC::sp<RSC::Allocation> mAllocIn;
-    RSC::sp<RSC::Allocation> mAllocOut;
-};
-
-}   // namespace android
-
-#endif  // RS_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/SaturationFilter.h b/media/libstagefright/filters/include/filters/SaturationFilter.h
deleted file mode 100644
index 317e469..0000000
--- a/media/libstagefright/filters/include/filters/SaturationFilter.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SATURATION_FILTER_H_
-#define SATURATION_FILTER_H_
-
-#include <RenderScript.h>
-
-#include "ScriptC_saturationARGB.h"
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct SaturationFilter : public SimpleFilter {
-public:
-    SaturationFilter() : mSaturation(1.f) {};
-
-    virtual status_t configure(const sp<AMessage> &msg);
-    virtual status_t start();
-    virtual void reset();
-    virtual status_t setParameters(const sp<AMessage> &msg);
-    virtual status_t processBuffers(
-            const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
-    virtual ~SaturationFilter() {};
-
-private:
-    AString mCacheDir;
-    RSC::sp<RSC::RS> mRS;
-    RSC::sp<RSC::Allocation> mAllocIn;
-    RSC::sp<RSC::Allocation> mAllocOut;
-    RSC::sp<ScriptC_saturationARGB> mScript;
-    float mSaturation;
-};
-
-}   // namespace android
-
-#endif  // SATURATION_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/SimpleFilter.h b/media/libstagefright/filters/include/filters/SimpleFilter.h
deleted file mode 100644
index a3c2d76..0000000
--- a/media/libstagefright/filters/include/filters/SimpleFilter.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SIMPLE_FILTER_H_
-#define SIMPLE_FILTER_H_
-
-#include <stdint.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-namespace android {
-
-struct AMessage;
-class MediaCodecBuffer;
-
-struct SimpleFilter : public RefBase {
-public:
-    SimpleFilter() : mWidth(0), mHeight(0), mStride(0), mSliceHeight(0),
-            mColorFormatIn(0), mColorFormatOut(0) {};
-
-    virtual status_t configure(const sp<AMessage> &msg);
-
-    virtual status_t start() = 0;
-    virtual void reset() = 0;
-    virtual status_t setParameters(const sp<AMessage> &msg) = 0;
-    virtual status_t processBuffers(
-            const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) = 0;
-
-protected:
-    int32_t mWidth, mHeight;
-    int32_t mStride, mSliceHeight;
-    int32_t mColorFormatIn, mColorFormatOut;
-
-    virtual ~SimpleFilter() {};
-};
-
-}   // namespace android
-
-#endif  // SIMPLE_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/ZeroFilter.h b/media/libstagefright/filters/include/filters/ZeroFilter.h
deleted file mode 100644
index f941cc8..0000000
--- a/media/libstagefright/filters/include/filters/ZeroFilter.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ZERO_FILTER_H_
-#define ZERO_FILTER_H_
-
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct ZeroFilter : public SimpleFilter {
-public:
-    ZeroFilter() : mInvertData(false) {};
-
-    virtual status_t start() { return OK; };
-    virtual void reset() {};
-    virtual status_t setParameters(const sp<AMessage> &msg);
-    virtual status_t processBuffers(
-            const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
-    virtual ~ZeroFilter() {};
-
-private:
-    bool mInvertData;
-};
-
-}   // namespace android
-
-#endif  // ZERO_FILTER_H_
diff --git a/media/libstagefright/filters/saturation.rscript b/media/libstagefright/filters/saturation.rscript
deleted file mode 100644
index 2c867ac..0000000
--- a/media/libstagefright/filters/saturation.rscript
+++ /dev/null
@@ -1,40 +0,0 @@
-// Sample script for RGB888 support (compare to saturationARGB.rs)
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-
-// global variables (parameters accessible to application code)
-float gSaturation = 1.0f;
-
-void root(const uchar3 *v_in, uchar3 *v_out) {
-    // scale 0-255 uchar to 0-1.0 float
-    float3 in = {v_in->r * 0.003921569f, v_in->g * 0.003921569f,
-            v_in->b * 0.003921569f};
-
-    // apply saturation filter
-    float3 result = dot(in, gMonoMult);
-    result = mix(result, in, gSaturation);
-
-    // convert to uchar, copied from rsPackColorTo8888
-    v_out->x = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
-    v_out->y = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
-    v_out->z = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/media/libstagefright/filters/saturationARGB.rscript b/media/libstagefright/filters/saturationARGB.rscript
deleted file mode 100644
index 1de9dd8..0000000
--- a/media/libstagefright/filters/saturationARGB.rscript
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-
-// global variables (parameters accessible to application code)
-float gSaturation = 1.0f;
-
-void root(const uchar4 *v_in, uchar4 *v_out) {
-    v_out->x = v_in->x; // don't modify A
-
-    // get RGB, scale 0-255 uchar to 0-1.0 float
-    float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
-            v_in->w * 0.003921569f};
-
-    // apply saturation filter
-    float3 result = dot(rgb, gMonoMult);
-    result = mix(result, rgb, gSaturation);
-
-    v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
-    v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
-    v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/media/libstagefright/foundation/tests/AMessage_test.cpp b/media/libstagefright/foundation/tests/AMessage_test.cpp
deleted file mode 100644
index 2b11326..0000000
--- a/media/libstagefright/foundation/tests/AMessage_test.cpp
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "AData_test"
-
-#include <gtest/gtest.h>
-#include <utils/RefBase.h>
-
-#include <media/stagefright/foundation/AMessage.h>
-
-using namespace android;
-
-class AMessageTest : public ::testing::Test {
-};
-
-
-TEST(AMessage_tests, item_manipulation) {
-  sp<AMessage> m1 = new AMessage();
-
-  m1->setInt32("value", 2);
-  m1->setInt32("bar", 3);
-
-  int32_t i32;
-  EXPECT_TRUE(m1->findInt32("value", &i32));
-  EXPECT_EQ(2, i32);
-
-  EXPECT_TRUE(m1->findInt32("bar", &i32));
-  EXPECT_EQ(3, i32);
-
-
-  m1->setInt64("big", INT64_MAX);
-  m1->setInt64("smaller", INT64_MAX - 2);
-  m1->setInt64("smallest", 257);
-
-  int64_t i64;
-  EXPECT_TRUE(m1->findInt64("big", &i64));
-  EXPECT_EQ(INT64_MAX, i64);
-
-  EXPECT_TRUE(m1->findInt64("smaller", &i64));
-  EXPECT_EQ(INT64_MAX - 2, i64);
-
-  m1->setSize("size1", 257);
-  m1->setSize("size2", 1023);
-
-  size_t sizing;
-  EXPECT_TRUE(m1->findSize("size2", &sizing));
-  EXPECT_EQ(1023, sizing);
-  EXPECT_TRUE(m1->findSize("size1", &sizing));
-  EXPECT_EQ(257, sizing);
-
-  m1->setDouble("precise", 10.5);
-  m1->setDouble("small", 0.125);
-
-  double d;
-  EXPECT_TRUE(m1->findDouble("precise", &d));
-  EXPECT_EQ(10.5, d);
-
-  EXPECT_TRUE(m1->findDouble("small", &d));
-  EXPECT_EQ(0.125, d);
-
-  // should be unchanged from the top of the test
-  EXPECT_TRUE(m1->findInt32("bar", &i32));
-  EXPECT_EQ(3, i32);
-
-  EXPECT_FALSE(m1->findInt32("nonesuch", &i32));
-  EXPECT_FALSE(m1->findInt64("nonesuch2", &i64));
-  // types disagree, not found
-  EXPECT_FALSE(m1->findInt32("big", &i32));
-  EXPECT_FALSE(m1->findInt32("precise", &i32));
-
-  // integral types should come back true
-  EXPECT_TRUE(m1->findAsInt64("big", &i64));
-  EXPECT_EQ(INT64_MAX, i64);
-  EXPECT_TRUE(m1->findAsInt64("bar", &i64));
-  EXPECT_EQ(3, i64);
-  EXPECT_FALSE(m1->findAsInt64("precise", &i64));
-
-  // recovers ints, size, and floating point values
-  float value;
-  EXPECT_TRUE(m1->findAsFloat("value", &value));
-  EXPECT_EQ(2, value);
-  EXPECT_TRUE(m1->findAsFloat("smallest", &value));
-  EXPECT_EQ(257, value);
-  EXPECT_TRUE(m1->findAsFloat("size2", &value));
-  EXPECT_EQ(1023, value);
-  EXPECT_TRUE(m1->findAsFloat("precise", &value));
-  EXPECT_EQ(10.5, value);
-  EXPECT_TRUE(m1->findAsFloat("small", &value));
-  EXPECT_EQ(0.125, value);
-
-
-  // need to handle still:
-  // strings
-  // Object
-  // Buffer
-  // Message (nested)
-  //
-
-  // removal
-  m1->setInt32("shortlived", 2);
-  m1->setInt32("alittlelonger", 2);
-  EXPECT_EQ(OK, m1->removeEntryByName("shortlived"));
-  EXPECT_EQ(BAD_VALUE, m1->removeEntryByName(nullptr));
-  EXPECT_EQ(BAD_INDEX, m1->removeEntryByName("themythicalnonesuch"));
-  EXPECT_FALSE(m1->findInt32("shortlived", &i32));
-  EXPECT_TRUE(m1->findInt32("alittlelonger", &i32));
-
-  EXPECT_NE(OK, m1->removeEntryByName("notpresent"));
-
-}
-
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index 85fd8b7..dd49714 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -62,5 +62,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_httplive",
+        vector: "remote",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index da962d1..903280f 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -94,9 +94,11 @@
             size_t offset,
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
-            const sp<MediaCodecBuffer> &buffer) override;
+            const sp<MediaCodecBuffer> &buffer,
+            AString* errorDetailMsg) override;
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+    virtual void pollForRenderedBuffers() override;
     virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
     virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
     virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 5a21755..08c7917 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -17,7 +17,10 @@
 #ifndef A_CODEC_H_
 #define A_CODEC_H_
 
+#include <set>
 #include <stdint.h>
+#include <list>
+#include <vector>
 #include <android/native_window.h>
 #include <media/hardware/MetadataBufferType.h>
 #include <media/MediaCodecInfo.h>
@@ -265,11 +268,12 @@
     sp<AMessage> mBaseOutputFormat;
 
     FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
-    Vector<BufferInfo> mBuffers[2];
+    std::vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
+    std::set<int64_t> mDecodeOnlyTimesUs;
 
-    List<sp<AMessage> > mDeferredQueue;
+    std::list<sp<AMessage>> mDeferredQueue;
 
     sp<AMessage> mLastOutputFormat;
     bool mIsVideo;
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 5e84977..65d5246 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -21,7 +21,6 @@
 #include <media/AudioRecord.h>
 #include <media/AudioSystem.h>
 #include <media/stagefright/MediaSource.h>
-#include <media/MicrophoneInfo.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <utils/List.h>
 
@@ -83,7 +82,7 @@
     status_t addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
     status_t removeAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
 
-    status_t getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
+    status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones);
     status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
     status_t setPreferredMicrophoneFieldDimension(float zoom);
 
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 48721ec..916d41e 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -385,7 +385,8 @@
             size_t offset,
             const CryptoPlugin::SubSample *subSamples,
             size_t numSubSamples,
-            const sp<MediaCodecBuffer> &buffer) {
+            const sp<MediaCodecBuffer> &buffer,
+            AString* errorDetailMsg) {
         (void)memory;
         (void)secure;
         (void)key;
@@ -396,6 +397,7 @@
         (void)subSamples;
         (void)numSubSamples;
         (void)buffer;
+        (void)errorDetailMsg;
         return -ENOSYS;
     }
     /**
@@ -407,6 +409,14 @@
      */
     virtual status_t renderOutputBuffer(
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) = 0;
+
+    /**
+     * Poll for updates about rendered buffers.
+     *
+     * Triggers callbacks to CodecCallback::onOutputFramesRendered.
+     */
+    virtual void pollForRenderedBuffers() = 0;
+
     /**
      * Discard a buffer to the underlying CodecBase object.
      *
diff --git a/media/libstagefright/include/media/stagefright/CodecErrorLog.h b/media/libstagefright/include/media/stagefright/CodecErrorLog.h
new file mode 100644
index 0000000..673117a
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/CodecErrorLog.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_ERROR_LOG_H_
+
+#define CODEC_ERROR_LOG_H_
+
+#include <sstream>
+#include <string>
+
+#include <android-base/thread_annotations.h>
+
+#include <media/stagefright/foundation/AString.h>
+
+namespace android {
+
+/**
+ * CodecErrorLog gathers what happened during codec failures, and make them
+ * available to clients for debugging purpose.
+ */
+class CodecErrorLog {
+public:
+    CodecErrorLog() = default;
+
+    /**
+     * Log a line of message.
+     *
+     * \note the message should be readable to developers who may not be
+     *       familiar with MediaCodec internals
+     */
+    void log(const char *tag, const char *message);
+    void log(const char *tag, const std::string &message);
+
+    /**
+     * Extract the accumulated log as string. This operation clears the log.
+     */
+    std::string extract();
+
+    /**
+     * Clears the previous log.
+     */
+    void clear();
+
+private:
+    mutable std::mutex mLock;
+    std::stringstream mStream GUARDED_BY(mLock);
+};
+
+}  // namespace android
+
+#endif  // CODEC_ERROR_LOG_H_
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 7a05f00..e8b89c7 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -23,7 +23,10 @@
 #include <stdint.h>
 #include <utils/Errors.h>
 
+#include <optional>
+
 #include <OMX_Video.h>
+#include <media/hardware/VideoAPI.h>
 
 namespace android {
 
@@ -35,6 +38,8 @@
 
     bool isDstRGB() const;
 
+    void setSrcMediaImage2(MediaImage2 img);
+
     void setSrcColorSpace(uint32_t standard, uint32_t range, uint32_t transfer);
 
     status_t convert(
@@ -49,18 +54,91 @@
 
     struct Coeffs; // matrix coefficients
 
-private:
     struct ColorSpace {
         uint32_t mStandard;
         uint32_t mRange;
         uint32_t mTransfer;
 
-        bool isBt2020() const;
-
+        bool isLimitedRange() const;
         // libyuv helper methods
-        bool isH420() const;
-        bool isI420() const;
-        bool isJ420() const;
+        //   BT.2020 limited Range
+        bool isBt2020() const;
+        // BT.2020 full range
+        bool isBtV2020() const;
+        // 709 limited range
+        bool isH709() const;
+        // 709 full range
+        bool isF709() const;
+        // 601 limited range
+        bool isI601() const;
+        // 601 full range
+        // also called "JPEG" in libyuv
+        bool isJ601() const;
+    };
+
+private:
+
+    typedef enum : uint8_t {
+        ImageLayoutUnknown = 0x0,
+        ImageLayout420SemiPlanar = 0x1,
+        ImageLayout420Planar = 0x2
+    } Layout_t;
+
+    typedef enum : uint8_t {
+        ImageSamplingUnknown = 0x0,
+        ImageSamplingYUV420 = 0x1,
+    } Sampling_t;
+
+    //this is the actual usable bit
+    typedef enum : uint8_t {
+        ImageBitDepthInvalid = 0x0,
+        ImageBitDepth8 = 0x1,
+        ImageBitDepth10 = 0x2,
+        ImageBitDepth12 = 0x3,
+        ImageBitDepth16 = 0x4
+    } BitDepth_t;
+
+    struct BitmapParams;
+
+
+    class Image {
+    public:
+        Image(const MediaImage2& img);
+        virtual ~Image() {}
+
+        const MediaImage2 getMediaImage2() const {
+            return mImage;
+        }
+
+        Layout_t getLayout() const {
+            return mLayout;
+        }
+        Sampling_t getSampling() const {
+            return mSampling;
+        }
+        BitDepth_t getBitDepth() const {
+            return mBitDepth;
+        }
+
+        // Returns the plane offset for this image
+        // after accounting for the src Crop offsets
+        status_t getYUVPlaneOffsetAndStride(
+                const BitmapParams &src,
+                uint32_t *y_offset,
+                uint32_t *u_offset,
+                uint32_t *v_offset,
+                size_t *y_stride,
+                size_t *u_stride,
+                size_t *v_stride
+                ) const;
+
+        bool isNV21() const;
+
+    private:
+        MediaImage2 mImage;
+        Layout_t mLayout;
+        Sampling_t mSampling;
+        BitDepth_t mBitDepth;
     };
 
     struct BitmapParams {
@@ -74,6 +152,8 @@
         size_t cropWidth() const;
         size_t cropHeight() const;
 
+        bool isValid() const;
+
         void *mBits;
         OMX_COLOR_FORMATTYPE mColorFormat;
         size_t mWidth, mHeight;
@@ -82,6 +162,7 @@
     };
 
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
+    std::optional<Image> mSrcImage;
     ColorSpace mSrcColorSpace;
     uint8_t *mClip;
     uint16_t *mClip10Bit;
@@ -89,14 +170,30 @@
     uint8_t *initClip();
     uint16_t *initClip10Bit();
 
+    // resolve YUVFormat from YUV420Flexible
+    bool isValidForMediaImage2() const;
+
+    // get plane offsets from Formats
+    status_t getSrcYUVPlaneOffsetAndStride(
+            const BitmapParams &src,
+            uint32_t *y_offset,
+            uint32_t *u_offset,
+            uint32_t *v_offset,
+            size_t *y_stride,
+            size_t *u_stride,
+            size_t *v_stride) const;
+
+    status_t convertYUVMediaImage(
+        const BitmapParams &src, const BitmapParams &dst);
+
     // returns the YUV2RGB matrix coefficients according to the color aspects and bit depth
     const struct Coeffs *getMatrix() const;
 
     status_t convertCbYCrY(
             const BitmapParams &src, const BitmapParams &dst);
 
-    status_t convertYUV420Planar(
-            const BitmapParams &src, const BitmapParams &dst);
+    // status_t convertYUV420Planar(
+    //        const BitmapParams &src, const BitmapParams &dst);
 
     status_t convertYUV420PlanarUseLibYUV(
             const BitmapParams &src, const BitmapParams &dst);
@@ -113,19 +210,6 @@
     status_t convertYUV420Planar16ToRGB(
             const BitmapParams &src, const BitmapParams &dst);
 
-    status_t convertQCOMYUV420SemiPlanar(
-            const BitmapParams &src, const BitmapParams &dst);
-
-    status_t convertYUV420SemiPlanar(
-            const BitmapParams &src, const BitmapParams &dst);
-
-    status_t convertYUV420SemiPlanarBase(
-            const BitmapParams &src, const BitmapParams &dst,
-            const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21 = false);
-
-    status_t convertTIYUV420PackedSemiPlanar(
-            const BitmapParams &src, const BitmapParams &dst);
-
     status_t convertYUVP010(
                 const BitmapParams &src, const BitmapParams &dst);
 
@@ -133,6 +217,7 @@
                 const BitmapParams &src, const BitmapParams &dst);
 
     ColorConverter(const ColorConverter &);
+
     ColorConverter &operator=(const ColorConverter &);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/CryptoAsync.h b/media/libstagefright/include/media/stagefright/CryptoAsync.h
new file mode 100644
index 0000000..b675518
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/CryptoAsync.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CRYPTO_ASYNC_H_
+#define CRYPTO_ASYNC_H_
+
+#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/foundation/Mutexed.h>
+namespace android {
+
+class CryptoAsync: public AHandler {
+public:
+
+    class CryptoAsyncCallback {
+    public:
+
+        virtual ~CryptoAsyncCallback() = default;
+
+        /*
+         * Callback with result for queuing the decrypted buffer to the
+         * underlying codec. Cannot block this function
+         */
+        virtual void onDecryptComplete(const sp<AMessage>& result) = 0;
+
+        /*
+         * Callback with error information while decryption. Cannot block
+         * this call. The return should contain the error information
+         * and the buffer the caused the error.
+         */
+        virtual void onDecryptError(const std::list<sp<AMessage>>& errorMsg) = 0;
+    };
+
+    // Ideally we should be returning the output of the decryption in
+    // onDecryptComple() calback and let the next module take over the
+    // rest of the processing. In the current state, the next step will
+    // be to queue the output the codec which is done using BufferChannel
+
+    // In order to prevent thread hop to just do that, we have created
+    // a dependency on BufferChannel here to queue the buffer to the codec
+    // immediately after decryption.
+    CryptoAsync(std::weak_ptr<BufferChannelBase> bufferChannel)
+        :mState(kCryptoAsyncActive) {
+        mBufferChannel = std::move(bufferChannel);
+    }
+
+    // Destructor
+    virtual ~CryptoAsync();
+
+    inline void setCallback(std::unique_ptr<CryptoAsyncCallback>&& callback) {
+        mCallback = std::move(callback);
+    }
+
+    // Call this function to decrypt the buffer in the message.
+    status_t decrypt(sp<AMessage>& msg);
+
+    // This function stops further processing in the thread and returns
+    // with any unprocessed buffers from the queue.
+    // We can use this method in case of flush or clearing the queue
+    // upon error. When the processing hits an error, the self processing
+    // in this looper stops and in-fact., there is a need to clear (call stop())
+    // for the queue to become operational again. Also acts like a rest.
+    void stop(std::list<sp<AMessage>> * const buffers = nullptr);
+
+    // Describes two actions for decrypt();
+    // kActionDecrypt - decrypts the buffer and queues to codec
+    // kActionAttachEncryptedBuffer - decrypts and attaches the buffer
+    //                               and queues to the codec.
+    // TODO: kActionAttachEncryptedBuffer is meant to work with
+    // BLOCK_MODEL which is not yet implemented.
+    enum : uint32_t {
+        // decryption types
+        kActionDecrypt                 = (1 <<  0),
+        kActionAttachEncryptedBuffer   = (1 <<  1)
+    };
+protected:
+
+    // Message types for the looper
+    enum : uint32_t {
+        // used with decrypt()
+        // Exact decryption type as described by the above enum
+        // decides what "action" to take. The "action" should be
+        // part of this message
+        kWhatDecrypt         = 1,
+        // used with stop()
+        kWhatStop            = 2,
+        // place holder
+        kWhatDoNothing       = 10
+    };
+
+    // Defines the staste of this thread.
+    typedef enum : uint32_t {
+        // kCryptoAsyncActive as long as we have not encountered
+        // any errors during processing. Any errors will
+        // put the state to error and the thread now refuses to
+        // do further processing until the error state is cleared
+        // with a call to stop()
+
+        kCryptoAsyncActive  = (0 <<  0),
+        // state of the looper when encountered with error during
+        // processing
+        kCryptoAsyncError   = (1 <<  8)
+    } CryptoAsyncState;
+
+    // Implements kActionDecrypt
+    status_t decryptAndQueue(sp<AMessage>& msg);
+
+    // Implements kActionAttachEncryptedBuffer
+    status_t attachEncryptedBufferAndQueue(sp<AMessage>& msg);
+
+    // Implements the Looper
+    void onMessageReceived(const sp<AMessage>& msg) override;
+
+    std::unique_ptr<CryptoAsyncCallback> mCallback;
+private:
+
+    CryptoAsyncState mState;
+
+    // Queue holding any pending buffers
+    Mutexed<std::list<sp<AMessage>>> mPendingBuffers;
+
+    std::weak_ptr<BufferChannelBase> mBufferChannel;
+};
+
+}  // namespace android
+
+#endif  // CRYPTO_ASYNC_H_
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 7c3eca6..cf76606 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -196,7 +196,9 @@
     typedef key_value_pair_t< const char *, Vector<uint16_t> > ItemRefs;
     typedef struct _ItemInfo {
         bool isGrid() const { return !strcmp("grid", itemType); }
-        bool isImage() const { return !strcmp("hvc1", itemType) || isGrid(); }
+        bool isImage() const {
+            return !strcmp("hvc1", itemType) || !strcmp("av01", itemType) || isGrid();
+        }
         const char *itemType;
         uint16_t itemId;
         bool isPrimary;
@@ -224,10 +226,11 @@
         int32_t width;
         int32_t height;
         int32_t rotation;
-        sp<ABuffer> hvcc;
+        sp<ABuffer> data;
     } ItemProperty;
 
     bool mHasFileLevelMeta;
+    bool mIsAvif; // used to differentiate HEIC and AVIF under the same OUTPUT_FORMAT_HEIF
     uint64_t mFileLevelMetaDataSize;
     bool mHasMoovBox;
     uint32_t mPrimaryItemId;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 1d2d711..52d7d3d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -18,6 +18,7 @@
 
 #define MEDIA_CODEC_H_
 
+#include <list>
 #include <memory>
 #include <vector>
 
@@ -27,7 +28,11 @@
 #include <media/MediaMetrics.h>
 #include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/CodecErrorLog.h>
 #include <media/stagefright/FrameRenderTracker.h>
+#include <media/stagefright/MediaHistogram.h>
+#include <media/stagefright/PlaybackDurationAccumulator.h>
+#include <media/stagefright/VideoRenderQualityTracker.h>
 #include <utils/Vector.h>
 
 class C2Buffer;
@@ -38,6 +43,7 @@
 namespace android {
 namespace media {
 class MediaResourceParcel;
+class ClientConfigParcel;
 } // media
 } // android
 } // aidl
@@ -54,12 +60,12 @@
 struct CodecParameterDescriptor;
 class IBatteryStats;
 struct ICrypto;
+class CryptoAsync;
 class MediaCodecBuffer;
 class IMemory;
 struct PersistentSurface;
 class SoftwareRenderer;
 class Surface;
-class PlaybackDurationAccumulator;
 namespace hardware {
 namespace cas {
 namespace native {
@@ -69,6 +75,7 @@
 
 using hardware::cas::native::V1_0::IDescrambler;
 using aidl::android::media::MediaResourceParcel;
+using aidl::android::media::ClientConfigParcel;
 
 struct MediaCodec : public AHandler {
     enum Domain {
@@ -81,6 +88,7 @@
     enum ConfigureFlags {
         CONFIGURE_FLAG_ENCODE           = 1,
         CONFIGURE_FLAG_USE_BLOCK_MODEL  = 2,
+        CONFIGURE_FLAG_USE_CRYPTO_ASYNC = 4,
     };
 
     enum BufferFlags {
@@ -89,6 +97,7 @@
         BUFFER_FLAG_EOS           = 4,
         BUFFER_FLAG_PARTIAL_FRAME = 8,
         BUFFER_FLAG_MUXER_DATA    = 16,
+        BUFFER_FLAG_DECODE_ONLY   = 32,
     };
 
     enum CVODegree {
@@ -104,6 +113,7 @@
         CB_ERROR = 3,
         CB_OUTPUT_FORMAT_CHANGED = 4,
         CB_RESOURCE_RECLAIMED = 5,
+        CB_CRYPTO_ERROR = 6,
     };
 
     static const pid_t kNoPid = -1;
@@ -296,6 +306,8 @@
         T value;
     };
 
+    inline CodecErrorLog &getErrorLog() { return mErrorLog; }
+
 protected:
     virtual ~MediaCodec();
     virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -320,6 +332,7 @@
         RELEASING,
     };
     std::string stateString(State state);
+    std::string apiStateString();
 
     enum {
         kPortIndexInput         = 0,
@@ -356,6 +369,7 @@
         kWhatSetNotification                = 'setN',
         kWhatDrmReleaseCrypto               = 'rDrm',
         kWhatCheckBatteryStats              = 'chkB',
+        kWhatGetMetrics                     = 'getM',
     };
 
     enum {
@@ -373,6 +387,7 @@
         kFlagIsComponentAllocated       = 2048,
         kFlagPushBlankBuffersOnShutdown = 4096,
         kFlagUseBlockModel              = 8192,
+        kFlagUseCryptoAsync             = 16384,
     };
 
     struct BufferInfo {
@@ -407,6 +422,13 @@
         kBufferRendered,
     };
 
+    enum class DequeueOutputResult {
+        kNoBuffer,
+        kDiscardedBuffer,
+        kRepliedWithError,
+        kSuccess,
+    };
+
     struct ResourceManagerServiceProxy;
 
     State mState;
@@ -426,16 +448,22 @@
     sp<Surface> mSurface;
     SoftwareRenderer *mSoftRenderer;
 
+    Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
+    bool mMetricsToUpload = false;
     nsecs_t mLifetimeStartNs = 0;
     void initMediametrics();
     void updateMediametrics();
     void flushMediametrics();
+    void resetMetricsFields();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    void onGetMetrics(const sp<AMessage>& msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
-    void updatePlaybackDuration(const sp<AMessage> &msg);
+    void processRenderedFrames(const sp<AMessage> &msg);
+
+    inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
 
     sp<AMessage> mOutputFormat;
     sp<AMessage> mInputFormat;
@@ -453,12 +481,41 @@
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
-    int32_t mConfigColorTransfer;
-    bool mHDRStaticInfo;
-    bool mHDR10PlusInfo;
-    void updateHDRFormatMetric();
-    hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
-            const AString &mediaType);
+    enum {
+        kFlagHasHdrStaticInfo   = 1,
+        kFlagHasHdr10PlusInfo   = 2,
+    };
+    uint32_t mHdrInfoFlags;
+    void updateHdrMetrics(bool isConfig);
+    hdr_format getHdrFormat(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    hdr_format getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    hdr_format getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    bool profileSupport10Bits(const AString &mime, const int32_t profile);
+
+    struct ApiUsageMetrics {
+        bool isArrayMode;
+        enum OperationMode {
+            kUnknownMode = 0,
+            kSynchronousMode = 1,
+            kAsynchronousMode = 2,
+            kBlockMode = 3,
+        };
+        OperationMode operationMode;
+        bool isUsingOutputSurface;
+        struct InputBufferSize {
+            int32_t appMax;  // max size configured by the app
+            int32_t usedMax;  // max size actually used
+            int32_t codecMax;  // max size suggested by the codec
+        } inputBufferSize;
+    } mApiUsageMetrics;
+    struct ReliabilityContextMetrics {
+        int32_t flushCount;
+        int32_t setOutputSurfaceCount;
+        int32_t resolutionChangeCount;
+    } mReliabilityContextMetrics;
 
     // initial create parameters
     AString mInitName;
@@ -471,7 +528,8 @@
     // the (possibly) updated format is returned in place.
     status_t shapeMediaFormat(
             const sp<AMessage> &format,
-            uint32_t flags);
+            uint32_t flags,
+            mediametrics_handle_t handle);
 
     // populate the format shaper library with information for this codec encoding
     // for the indicated media type
@@ -483,7 +541,7 @@
     // stop/flush/reset/release.
     Mutex mBufferLock;
 
-    List<size_t> mAvailPortBuffers[2];
+    std::list<size_t> mAvailPortBuffers[2];
     std::vector<BufferInfo> mPortBuffers[2];
 
     int32_t mDequeueInputTimeoutGeneration;
@@ -501,7 +559,7 @@
 
     sp<IDescrambler> mDescrambler;
 
-    List<sp<ABuffer> > mCSD;
+    std::list<sp<ABuffer> > mCSD;
 
     sp<AMessage> mActivityNotify;
 
@@ -510,9 +568,12 @@
     bool mCpuBoostRequested;
 
     std::shared_ptr<BufferChannelBase> mBufferChannel;
+    sp<CryptoAsync> mCryptoAsync;
+    sp<ALooper> mCryptoLooper;
 
-    std::unique_ptr<PlaybackDurationAccumulator> mPlaybackDurationAccumulator;
-    bool mIsSurfaceToScreen;
+    bool mIsSurfaceToDisplay;
+    PlaybackDurationAccumulator mPlaybackDurationAccumulator;
+    VideoRenderQualityTracker mVideoRenderQualityTracker;
 
     MediaCodec(
             const sp<ALooper> &looper, pid_t pid, uid_t uid,
@@ -543,7 +604,9 @@
             sp<MediaCodecBuffer> *buffer, sp<AMessage> *format);
 
     bool handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
-    bool handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false);
+    DequeueOutputResult handleDequeueOutputBuffer(
+            const sp<AReplyToken> &replyID,
+            bool newRequest = false);
     void cancelPendingDequeueOperations();
 
     void extractCSD(const sp<AMessage> &format);
@@ -561,6 +624,7 @@
 
     void onInputBufferAvailable();
     void onOutputBufferAvailable();
+    void onCryptoError(const sp<AMessage> &msg);
     void onError(status_t err, int32_t actionCode, const char *detail = NULL);
     void onOutputFormatChanged();
 
@@ -617,6 +681,9 @@
                                                  // when low latency is on
     int64_t mInputBufferCounter;  // number of input buffers queued since last reset/flush
 
+    // A rescheduleable message that periodically polls for rendered buffers
+    sp<AMessage> mMsgPollForRenderedBuffers;
+
     class ReleaseSurface;
     std::unique_ptr<ReleaseSurface> mReleaseSurface;
 
@@ -627,6 +694,7 @@
 
     void statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
     void statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
+    bool discardDecodeOnlyOutputBuffer(size_t index);
 
     enum {
         // the default shape of our latency histogram buckets
@@ -647,36 +715,17 @@
     int mRecentHead;
     Mutex mRecentLock;
 
-    class Histogram {
-      public:
-        Histogram() : mFloor(0), mWidth(0), mBelow(0), mAbove(0),
-                      mMin(INT64_MAX), mMax(INT64_MIN), mSum(0), mCount(0),
-                      mBucketCount(0), mBuckets(NULL) {};
-        ~Histogram() { clear(); };
-        void clear() { if (mBuckets != NULL) free(mBuckets); mBuckets = NULL; };
-        bool setup(int nbuckets, int64_t width, int64_t floor = 0);
-        void insert(int64_t sample);
-        int64_t getMin() const { return mMin; }
-        int64_t getMax() const { return mMax; }
-        int64_t getCount() const { return mCount; }
-        int64_t getSum() const { return mSum; }
-        int64_t getAvg() const { return mSum / (mCount == 0 ? 1 : mCount); }
-        std::string emit();
-      private:
-        int64_t mFloor, mCeiling, mWidth;
-        int64_t mBelow, mAbove;
-        int64_t mMin, mMax, mSum, mCount;
+    MediaHistogram<int64_t> mLatencyHist;
 
-        int mBucketCount;
-        int64_t *mBuckets;
-    };
-
-    Histogram mLatencyHist;
+    // An unique ID for the codec - Used by the metrics.
+    uint64_t mCodecId = 0;
 
     std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
     std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
     friend class MediaTestHelper;
 
+    CodecErrorLog mErrorLog;
+
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 78792c5..7334639 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -21,15 +21,15 @@
 namespace {
 
 // from MediaCodecInfo.java
-constexpr int32_t AVCProfileBaseline = 0x01;
-constexpr int32_t AVCProfileMain     = 0x02;
-constexpr int32_t AVCProfileExtended = 0x04;
-constexpr int32_t AVCProfileHigh     = 0x08;
-constexpr int32_t AVCProfileHigh10   = 0x10;
-constexpr int32_t AVCProfileHigh422  = 0x20;
-constexpr int32_t AVCProfileHigh444  = 0x40;
-constexpr int32_t AVCProfileConstrainedBaseline = 0x10000;
-constexpr int32_t AVCProfileConstrainedHigh     = 0x80000;
+inline constexpr int32_t AVCProfileBaseline = 0x01;
+inline constexpr int32_t AVCProfileMain     = 0x02;
+inline constexpr int32_t AVCProfileExtended = 0x04;
+inline constexpr int32_t AVCProfileHigh     = 0x08;
+inline constexpr int32_t AVCProfileHigh10   = 0x10;
+inline constexpr int32_t AVCProfileHigh422  = 0x20;
+inline constexpr int32_t AVCProfileHigh444  = 0x40;
+inline constexpr int32_t AVCProfileConstrainedBaseline = 0x10000;
+inline constexpr int32_t AVCProfileConstrainedHigh     = 0x80000;
 
 inline static const char *asString_AVCProfile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -46,26 +46,26 @@
     }
 }
 
-constexpr int32_t AVCLevel1       = 0x01;
-constexpr int32_t AVCLevel1b      = 0x02;
-constexpr int32_t AVCLevel11      = 0x04;
-constexpr int32_t AVCLevel12      = 0x08;
-constexpr int32_t AVCLevel13      = 0x10;
-constexpr int32_t AVCLevel2       = 0x20;
-constexpr int32_t AVCLevel21      = 0x40;
-constexpr int32_t AVCLevel22      = 0x80;
-constexpr int32_t AVCLevel3       = 0x100;
-constexpr int32_t AVCLevel31      = 0x200;
-constexpr int32_t AVCLevel32      = 0x400;
-constexpr int32_t AVCLevel4       = 0x800;
-constexpr int32_t AVCLevel41      = 0x1000;
-constexpr int32_t AVCLevel42      = 0x2000;
-constexpr int32_t AVCLevel5       = 0x4000;
-constexpr int32_t AVCLevel51      = 0x8000;
-constexpr int32_t AVCLevel52      = 0x10000;
-constexpr int32_t AVCLevel6       = 0x20000;
-constexpr int32_t AVCLevel61      = 0x40000;
-constexpr int32_t AVCLevel62      = 0x80000;
+inline constexpr int32_t AVCLevel1       = 0x01;
+inline constexpr int32_t AVCLevel1b      = 0x02;
+inline constexpr int32_t AVCLevel11      = 0x04;
+inline constexpr int32_t AVCLevel12      = 0x08;
+inline constexpr int32_t AVCLevel13      = 0x10;
+inline constexpr int32_t AVCLevel2       = 0x20;
+inline constexpr int32_t AVCLevel21      = 0x40;
+inline constexpr int32_t AVCLevel22      = 0x80;
+inline constexpr int32_t AVCLevel3       = 0x100;
+inline constexpr int32_t AVCLevel31      = 0x200;
+inline constexpr int32_t AVCLevel32      = 0x400;
+inline constexpr int32_t AVCLevel4       = 0x800;
+inline constexpr int32_t AVCLevel41      = 0x1000;
+inline constexpr int32_t AVCLevel42      = 0x2000;
+inline constexpr int32_t AVCLevel5       = 0x4000;
+inline constexpr int32_t AVCLevel51      = 0x8000;
+inline constexpr int32_t AVCLevel52      = 0x10000;
+inline constexpr int32_t AVCLevel6       = 0x20000;
+inline constexpr int32_t AVCLevel61      = 0x40000;
+inline constexpr int32_t AVCLevel62      = 0x80000;
 
 inline static const char *asString_AVCLevel(int32_t i, const char *def = "??") {
     switch (i) {
@@ -93,15 +93,15 @@
     }
 }
 
-constexpr int32_t H263ProfileBaseline             = 0x01;
-constexpr int32_t H263ProfileH320Coding           = 0x02;
-constexpr int32_t H263ProfileBackwardCompatible   = 0x04;
-constexpr int32_t H263ProfileISWV2                = 0x08;
-constexpr int32_t H263ProfileISWV3                = 0x10;
-constexpr int32_t H263ProfileHighCompression      = 0x20;
-constexpr int32_t H263ProfileInternet             = 0x40;
-constexpr int32_t H263ProfileInterlace            = 0x80;
-constexpr int32_t H263ProfileHighLatency          = 0x100;
+inline constexpr int32_t H263ProfileBaseline             = 0x01;
+inline constexpr int32_t H263ProfileH320Coding           = 0x02;
+inline constexpr int32_t H263ProfileBackwardCompatible   = 0x04;
+inline constexpr int32_t H263ProfileISWV2                = 0x08;
+inline constexpr int32_t H263ProfileISWV3                = 0x10;
+inline constexpr int32_t H263ProfileHighCompression      = 0x20;
+inline constexpr int32_t H263ProfileInternet             = 0x40;
+inline constexpr int32_t H263ProfileInterlace            = 0x80;
+inline constexpr int32_t H263ProfileHighLatency          = 0x100;
 
 inline static const char *asString_H263Profile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -118,14 +118,14 @@
     }
 }
 
-constexpr int32_t H263Level10      = 0x01;
-constexpr int32_t H263Level20      = 0x02;
-constexpr int32_t H263Level30      = 0x04;
-constexpr int32_t H263Level40      = 0x08;
-constexpr int32_t H263Level45      = 0x10;
-constexpr int32_t H263Level50      = 0x20;
-constexpr int32_t H263Level60      = 0x40;
-constexpr int32_t H263Level70      = 0x80;
+inline constexpr int32_t H263Level10      = 0x01;
+inline constexpr int32_t H263Level20      = 0x02;
+inline constexpr int32_t H263Level30      = 0x04;
+inline constexpr int32_t H263Level40      = 0x08;
+inline constexpr int32_t H263Level45      = 0x10;
+inline constexpr int32_t H263Level50      = 0x20;
+inline constexpr int32_t H263Level60      = 0x40;
+inline constexpr int32_t H263Level70      = 0x80;
 
 inline static const char *asString_H263Level(int32_t i, const char *def = "??") {
     switch (i) {
@@ -141,22 +141,22 @@
     }
 }
 
-constexpr int32_t MPEG4ProfileSimple              = 0x01;
-constexpr int32_t MPEG4ProfileSimpleScalable      = 0x02;
-constexpr int32_t MPEG4ProfileCore                = 0x04;
-constexpr int32_t MPEG4ProfileMain                = 0x08;
-constexpr int32_t MPEG4ProfileNbit                = 0x10;
-constexpr int32_t MPEG4ProfileScalableTexture     = 0x20;
-constexpr int32_t MPEG4ProfileSimpleFace          = 0x40;
-constexpr int32_t MPEG4ProfileSimpleFBA           = 0x80;
-constexpr int32_t MPEG4ProfileBasicAnimated       = 0x100;
-constexpr int32_t MPEG4ProfileHybrid              = 0x200;
-constexpr int32_t MPEG4ProfileAdvancedRealTime    = 0x400;
-constexpr int32_t MPEG4ProfileCoreScalable        = 0x800;
-constexpr int32_t MPEG4ProfileAdvancedCoding      = 0x1000;
-constexpr int32_t MPEG4ProfileAdvancedCore        = 0x2000;
-constexpr int32_t MPEG4ProfileAdvancedScalable    = 0x4000;
-constexpr int32_t MPEG4ProfileAdvancedSimple      = 0x8000;
+inline constexpr int32_t MPEG4ProfileSimple              = 0x01;
+inline constexpr int32_t MPEG4ProfileSimpleScalable      = 0x02;
+inline constexpr int32_t MPEG4ProfileCore                = 0x04;
+inline constexpr int32_t MPEG4ProfileMain                = 0x08;
+inline constexpr int32_t MPEG4ProfileNbit                = 0x10;
+inline constexpr int32_t MPEG4ProfileScalableTexture     = 0x20;
+inline constexpr int32_t MPEG4ProfileSimpleFace          = 0x40;
+inline constexpr int32_t MPEG4ProfileSimpleFBA           = 0x80;
+inline constexpr int32_t MPEG4ProfileBasicAnimated       = 0x100;
+inline constexpr int32_t MPEG4ProfileHybrid              = 0x200;
+inline constexpr int32_t MPEG4ProfileAdvancedRealTime    = 0x400;
+inline constexpr int32_t MPEG4ProfileCoreScalable        = 0x800;
+inline constexpr int32_t MPEG4ProfileAdvancedCoding      = 0x1000;
+inline constexpr int32_t MPEG4ProfileAdvancedCore        = 0x2000;
+inline constexpr int32_t MPEG4ProfileAdvancedScalable    = 0x4000;
+inline constexpr int32_t MPEG4ProfileAdvancedSimple      = 0x8000;
 
 inline static const char *asString_MPEG4Profile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -180,16 +180,16 @@
     }
 }
 
-constexpr int32_t MPEG4Level0      = 0x01;
-constexpr int32_t MPEG4Level0b     = 0x02;
-constexpr int32_t MPEG4Level1      = 0x04;
-constexpr int32_t MPEG4Level2      = 0x08;
-constexpr int32_t MPEG4Level3      = 0x10;
-constexpr int32_t MPEG4Level3b     = 0x18;
-constexpr int32_t MPEG4Level4      = 0x20;
-constexpr int32_t MPEG4Level4a     = 0x40;
-constexpr int32_t MPEG4Level5      = 0x80;
-constexpr int32_t MPEG4Level6      = 0x100;
+inline constexpr int32_t MPEG4Level0      = 0x01;
+inline constexpr int32_t MPEG4Level0b     = 0x02;
+inline constexpr int32_t MPEG4Level1      = 0x04;
+inline constexpr int32_t MPEG4Level2      = 0x08;
+inline constexpr int32_t MPEG4Level3      = 0x10;
+inline constexpr int32_t MPEG4Level3b     = 0x18;
+inline constexpr int32_t MPEG4Level4      = 0x20;
+inline constexpr int32_t MPEG4Level4a     = 0x40;
+inline constexpr int32_t MPEG4Level5      = 0x80;
+inline constexpr int32_t MPEG4Level6      = 0x100;
 
 inline static const char *asString_MPEG4Level(int32_t i, const char *def = "??") {
     switch (i) {
@@ -207,12 +207,12 @@
     }
 }
 
-constexpr int32_t MPEG2ProfileSimple              = 0x00;
-constexpr int32_t MPEG2ProfileMain                = 0x01;
-constexpr int32_t MPEG2Profile422                 = 0x02;
-constexpr int32_t MPEG2ProfileSNR                 = 0x03;
-constexpr int32_t MPEG2ProfileSpatial             = 0x04;
-constexpr int32_t MPEG2ProfileHigh                = 0x05;
+inline constexpr int32_t MPEG2ProfileSimple              = 0x00;
+inline constexpr int32_t MPEG2ProfileMain                = 0x01;
+inline constexpr int32_t MPEG2Profile422                 = 0x02;
+inline constexpr int32_t MPEG2ProfileSNR                 = 0x03;
+inline constexpr int32_t MPEG2ProfileSpatial             = 0x04;
+inline constexpr int32_t MPEG2ProfileHigh                = 0x05;
 
 inline static const char *asString_MPEG2Profile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -226,11 +226,11 @@
     }
 }
 
-constexpr int32_t MPEG2LevelLL     = 0x00;
-constexpr int32_t MPEG2LevelML     = 0x01;
-constexpr int32_t MPEG2LevelH14    = 0x02;
-constexpr int32_t MPEG2LevelHL     = 0x03;
-constexpr int32_t MPEG2LevelHP     = 0x04;
+inline constexpr int32_t MPEG2LevelLL     = 0x00;
+inline constexpr int32_t MPEG2LevelML     = 0x01;
+inline constexpr int32_t MPEG2LevelH14    = 0x02;
+inline constexpr int32_t MPEG2LevelHL     = 0x03;
+inline constexpr int32_t MPEG2LevelHP     = 0x04;
 
 inline static const char *asString_MPEG2Level(int32_t i, const char *def = "??") {
     switch (i) {
@@ -243,18 +243,18 @@
     }
 }
 
-constexpr int32_t AACObjectMain       = 1;
-constexpr int32_t AACObjectLC         = 2;
-constexpr int32_t AACObjectSSR        = 3;
-constexpr int32_t AACObjectLTP        = 4;
-constexpr int32_t AACObjectHE         = 5;
-constexpr int32_t AACObjectScalable   = 6;
-constexpr int32_t AACObjectERLC       = 17;
-constexpr int32_t AACObjectERScalable = 20;
-constexpr int32_t AACObjectLD         = 23;
-constexpr int32_t AACObjectHE_PS      = 29;
-constexpr int32_t AACObjectELD        = 39;
-constexpr int32_t AACObjectXHE        = 42;
+inline constexpr int32_t AACObjectMain       = 1;
+inline constexpr int32_t AACObjectLC         = 2;
+inline constexpr int32_t AACObjectSSR        = 3;
+inline constexpr int32_t AACObjectLTP        = 4;
+inline constexpr int32_t AACObjectHE         = 5;
+inline constexpr int32_t AACObjectScalable   = 6;
+inline constexpr int32_t AACObjectERLC       = 17;
+inline constexpr int32_t AACObjectERScalable = 20;
+inline constexpr int32_t AACObjectLD         = 23;
+inline constexpr int32_t AACObjectHE_PS      = 29;
+inline constexpr int32_t AACObjectELD        = 39;
+inline constexpr int32_t AACObjectXHE        = 42;
 
 inline static const char *asString_AACObject(int32_t i, const char *def = "??") {
     switch (i) {
@@ -274,10 +274,10 @@
     }
 }
 
-constexpr int32_t VP8Level_Version0 = 0x01;
-constexpr int32_t VP8Level_Version1 = 0x02;
-constexpr int32_t VP8Level_Version2 = 0x04;
-constexpr int32_t VP8Level_Version3 = 0x08;
+inline constexpr int32_t VP8Level_Version0 = 0x01;
+inline constexpr int32_t VP8Level_Version1 = 0x02;
+inline constexpr int32_t VP8Level_Version2 = 0x04;
+inline constexpr int32_t VP8Level_Version3 = 0x08;
 
 inline static const char *asString_VP8Level(int32_t i, const char *def = "??") {
     switch (i) {
@@ -289,7 +289,7 @@
     }
 }
 
-constexpr int32_t VP8ProfileMain = 0x01;
+inline constexpr int32_t VP8ProfileMain = 0x01;
 
 inline static const char *asString_VP8Profile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -298,14 +298,14 @@
     }
 }
 
-constexpr int32_t VP9Profile0 = 0x01;
-constexpr int32_t VP9Profile1 = 0x02;
-constexpr int32_t VP9Profile2 = 0x04;
-constexpr int32_t VP9Profile3 = 0x08;
-constexpr int32_t VP9Profile2HDR = 0x1000;
-constexpr int32_t VP9Profile3HDR = 0x2000;
-constexpr int32_t VP9Profile2HDR10Plus = 0x4000;
-constexpr int32_t VP9Profile3HDR10Plus = 0x8000;
+inline constexpr int32_t VP9Profile0 = 0x01;
+inline constexpr int32_t VP9Profile1 = 0x02;
+inline constexpr int32_t VP9Profile2 = 0x04;
+inline constexpr int32_t VP9Profile3 = 0x08;
+inline constexpr int32_t VP9Profile2HDR = 0x1000;
+inline constexpr int32_t VP9Profile3HDR = 0x2000;
+inline constexpr int32_t VP9Profile2HDR10Plus = 0x4000;
+inline constexpr int32_t VP9Profile3HDR10Plus = 0x8000;
 
 inline static const char *asString_VP9Profile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -321,20 +321,20 @@
     }
 }
 
-constexpr int32_t VP9Level1  = 0x1;
-constexpr int32_t VP9Level11 = 0x2;
-constexpr int32_t VP9Level2  = 0x4;
-constexpr int32_t VP9Level21 = 0x8;
-constexpr int32_t VP9Level3  = 0x10;
-constexpr int32_t VP9Level31 = 0x20;
-constexpr int32_t VP9Level4  = 0x40;
-constexpr int32_t VP9Level41 = 0x80;
-constexpr int32_t VP9Level5  = 0x100;
-constexpr int32_t VP9Level51 = 0x200;
-constexpr int32_t VP9Level52 = 0x400;
-constexpr int32_t VP9Level6  = 0x800;
-constexpr int32_t VP9Level61 = 0x1000;
-constexpr int32_t VP9Level62 = 0x2000;
+inline constexpr int32_t VP9Level1  = 0x1;
+inline constexpr int32_t VP9Level11 = 0x2;
+inline constexpr int32_t VP9Level2  = 0x4;
+inline constexpr int32_t VP9Level21 = 0x8;
+inline constexpr int32_t VP9Level3  = 0x10;
+inline constexpr int32_t VP9Level31 = 0x20;
+inline constexpr int32_t VP9Level4  = 0x40;
+inline constexpr int32_t VP9Level41 = 0x80;
+inline constexpr int32_t VP9Level5  = 0x100;
+inline constexpr int32_t VP9Level51 = 0x200;
+inline constexpr int32_t VP9Level52 = 0x400;
+inline constexpr int32_t VP9Level6  = 0x800;
+inline constexpr int32_t VP9Level61 = 0x1000;
+inline constexpr int32_t VP9Level62 = 0x2000;
 
 inline static const char *asString_VP9Level(int32_t i, const char *def = "??") {
     switch (i) {
@@ -356,10 +356,10 @@
     }
 }
 
-constexpr int32_t AV1ProfileMain8 = 0x1;
-constexpr int32_t AV1ProfileMain10 = 0x2;
-constexpr int32_t AV1ProfileMain10HDR10 = 0x1000;
-constexpr int32_t AV1ProfileMain10HDR10Plus = 0x2000;
+inline constexpr int32_t AV1ProfileMain8 = 0x1;
+inline constexpr int32_t AV1ProfileMain10 = 0x2;
+inline constexpr int32_t AV1ProfileMain10HDR10 = 0x1000;
+inline constexpr int32_t AV1ProfileMain10HDR10Plus = 0x2000;
 
 inline static const char *asString_AV1Profile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -371,30 +371,30 @@
     }
 }
 
-constexpr int32_t AV1Level2  = 0x1;
-constexpr int32_t AV1Level21 = 0x2;
-constexpr int32_t AV1Level22 = 0x4;
-constexpr int32_t AV1Level23 = 0x8;
-constexpr int32_t AV1Level3  = 0x10;
-constexpr int32_t AV1Level31 = 0x20;
-constexpr int32_t AV1Level32 = 0x40;
-constexpr int32_t AV1Level33 = 0x80;
-constexpr int32_t AV1Level4  = 0x100;
-constexpr int32_t AV1Level41 = 0x200;
-constexpr int32_t AV1Level42 = 0x400;
-constexpr int32_t AV1Level43 = 0x800;
-constexpr int32_t AV1Level5  = 0x1000;
-constexpr int32_t AV1Level51 = 0x2000;
-constexpr int32_t AV1Level52 = 0x4000;
-constexpr int32_t AV1Level53 = 0x8000;
-constexpr int32_t AV1Level6  = 0x10000;
-constexpr int32_t AV1Level61 = 0x20000;
-constexpr int32_t AV1Level62 = 0x40000;
-constexpr int32_t AV1Level63 = 0x80000;
-constexpr int32_t AV1Level7  = 0x100000;
-constexpr int32_t AV1Level71 = 0x200000;
-constexpr int32_t AV1Level72 = 0x400000;
-constexpr int32_t AV1Level73 = 0x800000;
+inline constexpr int32_t AV1Level2  = 0x1;
+inline constexpr int32_t AV1Level21 = 0x2;
+inline constexpr int32_t AV1Level22 = 0x4;
+inline constexpr int32_t AV1Level23 = 0x8;
+inline constexpr int32_t AV1Level3  = 0x10;
+inline constexpr int32_t AV1Level31 = 0x20;
+inline constexpr int32_t AV1Level32 = 0x40;
+inline constexpr int32_t AV1Level33 = 0x80;
+inline constexpr int32_t AV1Level4  = 0x100;
+inline constexpr int32_t AV1Level41 = 0x200;
+inline constexpr int32_t AV1Level42 = 0x400;
+inline constexpr int32_t AV1Level43 = 0x800;
+inline constexpr int32_t AV1Level5  = 0x1000;
+inline constexpr int32_t AV1Level51 = 0x2000;
+inline constexpr int32_t AV1Level52 = 0x4000;
+inline constexpr int32_t AV1Level53 = 0x8000;
+inline constexpr int32_t AV1Level6  = 0x10000;
+inline constexpr int32_t AV1Level61 = 0x20000;
+inline constexpr int32_t AV1Level62 = 0x40000;
+inline constexpr int32_t AV1Level63 = 0x80000;
+inline constexpr int32_t AV1Level7  = 0x100000;
+inline constexpr int32_t AV1Level71 = 0x200000;
+inline constexpr int32_t AV1Level72 = 0x400000;
+inline constexpr int32_t AV1Level73 = 0x800000;
 
 inline static const char *asString_AV1Level(int32_t i, const char *def = "??") {
     switch (i) {
@@ -426,11 +426,11 @@
     }
 }
 
-constexpr int32_t HEVCProfileMain        = 0x01;
-constexpr int32_t HEVCProfileMain10      = 0x02;
-constexpr int32_t HEVCProfileMainStill   = 0x04;
-constexpr int32_t HEVCProfileMain10HDR10 = 0x1000;
-constexpr int32_t HEVCProfileMain10HDR10Plus = 0x2000;
+inline constexpr int32_t HEVCProfileMain        = 0x01;
+inline constexpr int32_t HEVCProfileMain10      = 0x02;
+inline constexpr int32_t HEVCProfileMainStill   = 0x04;
+inline constexpr int32_t HEVCProfileMain10HDR10 = 0x1000;
+inline constexpr int32_t HEVCProfileMain10HDR10Plus = 0x2000;
 
 inline static const char *asString_HEVCProfile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -443,32 +443,32 @@
     }
 }
 
-constexpr int32_t HEVCMainTierLevel1  = 0x1;
-constexpr int32_t HEVCHighTierLevel1  = 0x2;
-constexpr int32_t HEVCMainTierLevel2  = 0x4;
-constexpr int32_t HEVCHighTierLevel2  = 0x8;
-constexpr int32_t HEVCMainTierLevel21 = 0x10;
-constexpr int32_t HEVCHighTierLevel21 = 0x20;
-constexpr int32_t HEVCMainTierLevel3  = 0x40;
-constexpr int32_t HEVCHighTierLevel3  = 0x80;
-constexpr int32_t HEVCMainTierLevel31 = 0x100;
-constexpr int32_t HEVCHighTierLevel31 = 0x200;
-constexpr int32_t HEVCMainTierLevel4  = 0x400;
-constexpr int32_t HEVCHighTierLevel4  = 0x800;
-constexpr int32_t HEVCMainTierLevel41 = 0x1000;
-constexpr int32_t HEVCHighTierLevel41 = 0x2000;
-constexpr int32_t HEVCMainTierLevel5  = 0x4000;
-constexpr int32_t HEVCHighTierLevel5  = 0x8000;
-constexpr int32_t HEVCMainTierLevel51 = 0x10000;
-constexpr int32_t HEVCHighTierLevel51 = 0x20000;
-constexpr int32_t HEVCMainTierLevel52 = 0x40000;
-constexpr int32_t HEVCHighTierLevel52 = 0x80000;
-constexpr int32_t HEVCMainTierLevel6  = 0x100000;
-constexpr int32_t HEVCHighTierLevel6  = 0x200000;
-constexpr int32_t HEVCMainTierLevel61 = 0x400000;
-constexpr int32_t HEVCHighTierLevel61 = 0x800000;
-constexpr int32_t HEVCMainTierLevel62 = 0x1000000;
-constexpr int32_t HEVCHighTierLevel62 = 0x2000000;
+inline constexpr int32_t HEVCMainTierLevel1  = 0x1;
+inline constexpr int32_t HEVCHighTierLevel1  = 0x2;
+inline constexpr int32_t HEVCMainTierLevel2  = 0x4;
+inline constexpr int32_t HEVCHighTierLevel2  = 0x8;
+inline constexpr int32_t HEVCMainTierLevel21 = 0x10;
+inline constexpr int32_t HEVCHighTierLevel21 = 0x20;
+inline constexpr int32_t HEVCMainTierLevel3  = 0x40;
+inline constexpr int32_t HEVCHighTierLevel3  = 0x80;
+inline constexpr int32_t HEVCMainTierLevel31 = 0x100;
+inline constexpr int32_t HEVCHighTierLevel31 = 0x200;
+inline constexpr int32_t HEVCMainTierLevel4  = 0x400;
+inline constexpr int32_t HEVCHighTierLevel4  = 0x800;
+inline constexpr int32_t HEVCMainTierLevel41 = 0x1000;
+inline constexpr int32_t HEVCHighTierLevel41 = 0x2000;
+inline constexpr int32_t HEVCMainTierLevel5  = 0x4000;
+inline constexpr int32_t HEVCHighTierLevel5  = 0x8000;
+inline constexpr int32_t HEVCMainTierLevel51 = 0x10000;
+inline constexpr int32_t HEVCHighTierLevel51 = 0x20000;
+inline constexpr int32_t HEVCMainTierLevel52 = 0x40000;
+inline constexpr int32_t HEVCHighTierLevel52 = 0x80000;
+inline constexpr int32_t HEVCMainTierLevel6  = 0x100000;
+inline constexpr int32_t HEVCHighTierLevel6  = 0x200000;
+inline constexpr int32_t HEVCMainTierLevel61 = 0x400000;
+inline constexpr int32_t HEVCHighTierLevel61 = 0x800000;
+inline constexpr int32_t HEVCMainTierLevel62 = 0x1000000;
+inline constexpr int32_t HEVCHighTierLevel62 = 0x2000000;
 
 inline static const char *asString_HEVCTierLevel(int32_t i, const char *def = "??") {
     switch (i) {
@@ -502,17 +502,17 @@
     }
 }
 
-constexpr int32_t DolbyVisionProfileDvavPer = 0x1;
-constexpr int32_t DolbyVisionProfileDvavPen = 0x2;
-constexpr int32_t DolbyVisionProfileDvheDer = 0x4;
-constexpr int32_t DolbyVisionProfileDvheDen = 0x8;
-constexpr int32_t DolbyVisionProfileDvheDtr = 0x10;
-constexpr int32_t DolbyVisionProfileDvheStn = 0x20;
-constexpr int32_t DolbyVisionProfileDvheDth = 0x40;
-constexpr int32_t DolbyVisionProfileDvheDtb = 0x80;
-constexpr int32_t DolbyVisionProfileDvheSt  = 0x100;
-constexpr int32_t DolbyVisionProfileDvavSe  = 0x200;
-constexpr int32_t DolbyVisionProfileDvav110 = 0x400;
+inline constexpr int32_t DolbyVisionProfileDvavPer = 0x1;
+inline constexpr int32_t DolbyVisionProfileDvavPen = 0x2;
+inline constexpr int32_t DolbyVisionProfileDvheDer = 0x4;
+inline constexpr int32_t DolbyVisionProfileDvheDen = 0x8;
+inline constexpr int32_t DolbyVisionProfileDvheDtr = 0x10;
+inline constexpr int32_t DolbyVisionProfileDvheStn = 0x20;
+inline constexpr int32_t DolbyVisionProfileDvheDth = 0x40;
+inline constexpr int32_t DolbyVisionProfileDvheDtb = 0x80;
+inline constexpr int32_t DolbyVisionProfileDvheSt  = 0x100;
+inline constexpr int32_t DolbyVisionProfileDvavSe  = 0x200;
+inline constexpr int32_t DolbyVisionProfileDvav110 = 0x400;
 
 inline static const char *asString_DolbyVisionProfile(int32_t i, const char *def = "??") {
     switch (i) {
@@ -531,18 +531,18 @@
     }
 }
 
-constexpr int32_t DolbyVisionLevelHd24    = 0x1;
-constexpr int32_t DolbyVisionLevelHd30    = 0x2;
-constexpr int32_t DolbyVisionLevelFhd24   = 0x4;
-constexpr int32_t DolbyVisionLevelFhd30   = 0x8;
-constexpr int32_t DolbyVisionLevelFhd60   = 0x10;
-constexpr int32_t DolbyVisionLevelUhd24   = 0x20;
-constexpr int32_t DolbyVisionLevelUhd30   = 0x40;
-constexpr int32_t DolbyVisionLevelUhd48   = 0x80;
-constexpr int32_t DolbyVisionLevelUhd60   = 0x100;
-constexpr int32_t DolbyVisionLevelUhd120  = 0x200;
-constexpr int32_t DolbyVisionLevel8k30    = 0x400;
-constexpr int32_t DolbyVisionLevel8k60    = 0x800;
+inline constexpr int32_t DolbyVisionLevelHd24    = 0x1;
+inline constexpr int32_t DolbyVisionLevelHd30    = 0x2;
+inline constexpr int32_t DolbyVisionLevelFhd24   = 0x4;
+inline constexpr int32_t DolbyVisionLevelFhd30   = 0x8;
+inline constexpr int32_t DolbyVisionLevelFhd60   = 0x10;
+inline constexpr int32_t DolbyVisionLevelUhd24   = 0x20;
+inline constexpr int32_t DolbyVisionLevelUhd30   = 0x40;
+inline constexpr int32_t DolbyVisionLevelUhd48   = 0x80;
+inline constexpr int32_t DolbyVisionLevelUhd60   = 0x100;
+inline constexpr int32_t DolbyVisionLevelUhd120  = 0x200;
+inline constexpr int32_t DolbyVisionLevel8k30    = 0x400;
+inline constexpr int32_t DolbyVisionLevel8k60    = 0x800;
 
 inline static const char *asString_DolbyVisionLevel(int32_t i, const char *def = "??") {
     switch (i) {
@@ -562,10 +562,10 @@
     }
 }
 
-constexpr int32_t BITRATE_MODE_CBR = 2;
-constexpr int32_t BITRATE_MODE_CBR_FD = 3;
-constexpr int32_t BITRATE_MODE_CQ = 0;
-constexpr int32_t BITRATE_MODE_VBR = 1;
+inline constexpr int32_t BITRATE_MODE_CBR = 2;
+inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
+inline constexpr int32_t BITRATE_MODE_CQ = 0;
+inline constexpr int32_t BITRATE_MODE_VBR = 1;
 
 inline static const char *asString_BitrateMode(int32_t i, const char *def = "??") {
     switch (i) {
@@ -577,61 +577,61 @@
     }
 }
 
-constexpr int32_t COLOR_Format12bitRGB444             = 3;
-constexpr int32_t COLOR_Format16bitARGB1555           = 5;
-constexpr int32_t COLOR_Format16bitARGB4444           = 4;
-constexpr int32_t COLOR_Format16bitBGR565             = 7;
-constexpr int32_t COLOR_Format16bitRGB565             = 6;
-constexpr int32_t COLOR_Format18bitARGB1665           = 9;
-constexpr int32_t COLOR_Format18BitBGR666             = 41;
-constexpr int32_t COLOR_Format18bitRGB666             = 8;
-constexpr int32_t COLOR_Format19bitARGB1666           = 10;
-constexpr int32_t COLOR_Format24BitABGR6666           = 43;
-constexpr int32_t COLOR_Format24bitARGB1887           = 13;
-constexpr int32_t COLOR_Format24BitARGB6666           = 42;
-constexpr int32_t COLOR_Format24bitBGR888             = 12;
-constexpr int32_t COLOR_Format24bitRGB888             = 11;
-constexpr int32_t COLOR_Format25bitARGB1888           = 14;
-constexpr int32_t COLOR_Format32bitABGR2101010        = 0x7F00AAA2;
-constexpr int32_t COLOR_Format32bitABGR8888           = 0x7F00A000;
-constexpr int32_t COLOR_Format32bitARGB8888           = 16;
-constexpr int32_t COLOR_Format32bitBGRA8888           = 15;
-constexpr int32_t COLOR_Format64bitABGRFloat          = 0x7F000F16;
-constexpr int32_t COLOR_Format8bitRGB332              = 2;
-constexpr int32_t COLOR_FormatCbYCrY                  = 27;
-constexpr int32_t COLOR_FormatCrYCbY                  = 28;
-constexpr int32_t COLOR_FormatL16                     = 36;
-constexpr int32_t COLOR_FormatL2                      = 33;
-constexpr int32_t COLOR_FormatL24                     = 37;
-constexpr int32_t COLOR_FormatL32                     = 38;
-constexpr int32_t COLOR_FormatL4                      = 34;
-constexpr int32_t COLOR_FormatL8                      = 35;
-constexpr int32_t COLOR_FormatMonochrome              = 1;
-constexpr int32_t COLOR_FormatRawBayer10bit           = 31;
-constexpr int32_t COLOR_FormatRawBayer8bit            = 30;
-constexpr int32_t COLOR_FormatRawBayer8bitcompressed  = 32;
-constexpr int32_t COLOR_FormatRGBAFlexible            = 0x7F36A888;
-constexpr int32_t COLOR_FormatRGBFlexible             = 0x7F36B888;
-constexpr int32_t COLOR_FormatSurface                 = 0x7F000789;
-constexpr int32_t COLOR_FormatYCbYCr                  = 25;
-constexpr int32_t COLOR_FormatYCrYCb                  = 26;
-constexpr int32_t COLOR_FormatYUV411PackedPlanar      = 18;
-constexpr int32_t COLOR_FormatYUV411Planar            = 17;
-constexpr int32_t COLOR_FormatYUV420Flexible          = 0x7F420888;
-constexpr int32_t COLOR_FormatYUV420PackedPlanar      = 20;
-constexpr int32_t COLOR_FormatYUV420PackedSemiPlanar  = 39;
-constexpr int32_t COLOR_FormatYUV420Planar            = 19;
-constexpr int32_t COLOR_FormatYUV420SemiPlanar        = 21;
-constexpr int32_t COLOR_FormatYUV422Flexible          = 0x7F422888;
-constexpr int32_t COLOR_FormatYUV422PackedPlanar      = 23;
-constexpr int32_t COLOR_FormatYUV422PackedSemiPlanar  = 40;
-constexpr int32_t COLOR_FormatYUV422Planar            = 22;
-constexpr int32_t COLOR_FormatYUV422SemiPlanar        = 24;
-constexpr int32_t COLOR_FormatYUV444Flexible          = 0x7F444888;
-constexpr int32_t COLOR_FormatYUV444Interleaved       = 29;
-constexpr int32_t COLOR_FormatYUVP010                 = 54;
-constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar   = 0x7fa30c00;
-constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
+inline constexpr int32_t COLOR_Format12bitRGB444             = 3;
+inline constexpr int32_t COLOR_Format16bitARGB1555           = 5;
+inline constexpr int32_t COLOR_Format16bitARGB4444           = 4;
+inline constexpr int32_t COLOR_Format16bitBGR565             = 7;
+inline constexpr int32_t COLOR_Format16bitRGB565             = 6;
+inline constexpr int32_t COLOR_Format18bitARGB1665           = 9;
+inline constexpr int32_t COLOR_Format18BitBGR666             = 41;
+inline constexpr int32_t COLOR_Format18bitRGB666             = 8;
+inline constexpr int32_t COLOR_Format19bitARGB1666           = 10;
+inline constexpr int32_t COLOR_Format24BitABGR6666           = 43;
+inline constexpr int32_t COLOR_Format24bitARGB1887           = 13;
+inline constexpr int32_t COLOR_Format24BitARGB6666           = 42;
+inline constexpr int32_t COLOR_Format24bitBGR888             = 12;
+inline constexpr int32_t COLOR_Format24bitRGB888             = 11;
+inline constexpr int32_t COLOR_Format25bitARGB1888           = 14;
+inline constexpr int32_t COLOR_Format32bitABGR2101010        = 0x7F00AAA2;
+inline constexpr int32_t COLOR_Format32bitABGR8888           = 0x7F00A000;
+inline constexpr int32_t COLOR_Format32bitARGB8888           = 16;
+inline constexpr int32_t COLOR_Format32bitBGRA8888           = 15;
+inline constexpr int32_t COLOR_Format64bitABGRFloat          = 0x7F000F16;
+inline constexpr int32_t COLOR_Format8bitRGB332              = 2;
+inline constexpr int32_t COLOR_FormatCbYCrY                  = 27;
+inline constexpr int32_t COLOR_FormatCrYCbY                  = 28;
+inline constexpr int32_t COLOR_FormatL16                     = 36;
+inline constexpr int32_t COLOR_FormatL2                      = 33;
+inline constexpr int32_t COLOR_FormatL24                     = 37;
+inline constexpr int32_t COLOR_FormatL32                     = 38;
+inline constexpr int32_t COLOR_FormatL4                      = 34;
+inline constexpr int32_t COLOR_FormatL8                      = 35;
+inline constexpr int32_t COLOR_FormatMonochrome              = 1;
+inline constexpr int32_t COLOR_FormatRawBayer10bit           = 31;
+inline constexpr int32_t COLOR_FormatRawBayer8bit            = 30;
+inline constexpr int32_t COLOR_FormatRawBayer8bitcompressed  = 32;
+inline constexpr int32_t COLOR_FormatRGBAFlexible            = 0x7F36A888;
+inline constexpr int32_t COLOR_FormatRGBFlexible             = 0x7F36B888;
+inline constexpr int32_t COLOR_FormatSurface                 = 0x7F000789;
+inline constexpr int32_t COLOR_FormatYCbYCr                  = 25;
+inline constexpr int32_t COLOR_FormatYCrYCb                  = 26;
+inline constexpr int32_t COLOR_FormatYUV411PackedPlanar      = 18;
+inline constexpr int32_t COLOR_FormatYUV411Planar            = 17;
+inline constexpr int32_t COLOR_FormatYUV420Flexible          = 0x7F420888;
+inline constexpr int32_t COLOR_FormatYUV420PackedPlanar      = 20;
+inline constexpr int32_t COLOR_FormatYUV420PackedSemiPlanar  = 39;
+inline constexpr int32_t COLOR_FormatYUV420Planar            = 19;
+inline constexpr int32_t COLOR_FormatYUV420SemiPlanar        = 21;
+inline constexpr int32_t COLOR_FormatYUV422Flexible          = 0x7F422888;
+inline constexpr int32_t COLOR_FormatYUV422PackedPlanar      = 23;
+inline constexpr int32_t COLOR_FormatYUV422PackedSemiPlanar  = 40;
+inline constexpr int32_t COLOR_FormatYUV422Planar            = 22;
+inline constexpr int32_t COLOR_FormatYUV422SemiPlanar        = 24;
+inline constexpr int32_t COLOR_FormatYUV444Flexible          = 0x7F444888;
+inline constexpr int32_t COLOR_FormatYUV444Interleaved       = 29;
+inline constexpr int32_t COLOR_FormatYUVP010                 = 54;
+inline constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar   = 0x7fa30c00;
+inline constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
 
 inline static const char *asString_ColorFormat(int32_t i, const char *def = "??") {
     switch (i) {
@@ -694,198 +694,200 @@
     }
 }
 
-constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
-constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
-constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
-constexpr char FEATURE_PartialFrame[] = "partial-frame";
-constexpr char FEATURE_QpBounds[] = "qp-bounds";
-constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
-constexpr char FEATURE_TunneledPlayback[]       = "tunneled-playback";
+inline constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
+inline constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
+inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
+inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
+inline constexpr char FEATURE_SecurePlayback[]         = "secure-playback";
+inline constexpr char FEATURE_TunneledPlayback[]       = "tunneled-playback";
 
 // from MediaFormat.java
-constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
-constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
-constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
-constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
-constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
-constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
-constexpr char MIMETYPE_VIDEO_H263[] = "video/3gpp";
-constexpr char MIMETYPE_VIDEO_MPEG2[] = "video/mpeg2";
-constexpr char MIMETYPE_VIDEO_RAW[] = "video/raw";
-constexpr char MIMETYPE_VIDEO_DOLBY_VISION[] = "video/dolby-vision";
-constexpr char MIMETYPE_VIDEO_SCRAMBLED[] = "video/scrambled";
+inline constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
+inline constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
+inline constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
+inline constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
+inline constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
+inline constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
+inline constexpr char MIMETYPE_VIDEO_H263[] = "video/3gpp";
+inline constexpr char MIMETYPE_VIDEO_MPEG2[] = "video/mpeg2";
+inline constexpr char MIMETYPE_VIDEO_RAW[] = "video/raw";
+inline constexpr char MIMETYPE_VIDEO_DOLBY_VISION[] = "video/dolby-vision";
+inline constexpr char MIMETYPE_VIDEO_SCRAMBLED[] = "video/scrambled";
 
-constexpr char MIMETYPE_AUDIO_AMR_NB[] = "audio/3gpp";
-constexpr char MIMETYPE_AUDIO_AMR_WB[] = "audio/amr-wb";
-constexpr char MIMETYPE_AUDIO_MPEG[] = "audio/mpeg";
-constexpr char MIMETYPE_AUDIO_AAC[] = "audio/mp4a-latm";
-constexpr char MIMETYPE_AUDIO_QCELP[] = "audio/qcelp";
-constexpr char MIMETYPE_AUDIO_VORBIS[] = "audio/vorbis";
-constexpr char MIMETYPE_AUDIO_OPUS[] = "audio/opus";
-constexpr char MIMETYPE_AUDIO_G711_ALAW[] = "audio/g711-alaw";
-constexpr char MIMETYPE_AUDIO_G711_MLAW[] = "audio/g711-mlaw";
-constexpr char MIMETYPE_AUDIO_RAW[] = "audio/raw";
-constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
-constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
-constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
-constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
-constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
+inline constexpr char MIMETYPE_AUDIO_AMR_NB[] = "audio/3gpp";
+inline constexpr char MIMETYPE_AUDIO_AMR_WB[] = "audio/amr-wb";
+inline constexpr char MIMETYPE_AUDIO_MPEG[] = "audio/mpeg";
+inline constexpr char MIMETYPE_AUDIO_AAC[] = "audio/mp4a-latm";
+inline constexpr char MIMETYPE_AUDIO_QCELP[] = "audio/qcelp";
+inline constexpr char MIMETYPE_AUDIO_VORBIS[] = "audio/vorbis";
+inline constexpr char MIMETYPE_AUDIO_OPUS[] = "audio/opus";
+inline constexpr char MIMETYPE_AUDIO_G711_ALAW[] = "audio/g711-alaw";
+inline constexpr char MIMETYPE_AUDIO_G711_MLAW[] = "audio/g711-mlaw";
+inline constexpr char MIMETYPE_AUDIO_RAW[] = "audio/raw";
+inline constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
+inline constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
+inline constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
+inline constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
+inline constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
 
-constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
+inline constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
 
-constexpr char MIMETYPE_TEXT_CEA_608[] = "text/cea-608";
-constexpr char MIMETYPE_TEXT_CEA_708[] = "text/cea-708";
-constexpr char MIMETYPE_TEXT_SUBRIP[] = "application/x-subrip";
-constexpr char MIMETYPE_TEXT_VTT[] = "text/vtt";
+inline constexpr char MIMETYPE_TEXT_CEA_608[] = "text/cea-608";
+inline constexpr char MIMETYPE_TEXT_CEA_708[] = "text/cea-708";
+inline constexpr char MIMETYPE_TEXT_SUBRIP[] = "application/x-subrip";
+inline constexpr char MIMETYPE_TEXT_VTT[] = "text/vtt";
 
-constexpr int32_t COLOR_RANGE_FULL = 1;
-constexpr int32_t COLOR_RANGE_LIMITED = 2;
-constexpr int32_t COLOR_STANDARD_BT2020 = 6;
-constexpr int32_t COLOR_STANDARD_BT601_NTSC = 4;
-constexpr int32_t COLOR_STANDARD_BT601_PAL = 2;
-constexpr int32_t COLOR_STANDARD_BT709 = 1;
-constexpr int32_t COLOR_TRANSFER_HLG = 7;
-constexpr int32_t COLOR_TRANSFER_LINEAR = 1;
-constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
-constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
+inline constexpr int32_t COLOR_RANGE_FULL = 1;
+inline constexpr int32_t COLOR_RANGE_LIMITED = 2;
+inline constexpr int32_t COLOR_STANDARD_BT2020 = 6;
+inline constexpr int32_t COLOR_STANDARD_BT601_NTSC = 4;
+inline constexpr int32_t COLOR_STANDARD_BT601_PAL = 2;
+inline constexpr int32_t COLOR_STANDARD_BT709 = 1;
+inline constexpr int32_t COLOR_TRANSFER_HLG = 7;
+inline constexpr int32_t COLOR_TRANSFER_LINEAR = 1;
+inline constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
+inline constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
 
-constexpr int32_t PICTURE_TYPE_I = 1;
-constexpr int32_t PICTURE_TYPE_P = 2;
-constexpr int32_t PICTURE_TYPE_B = 3;
-constexpr int32_t PICTURE_TYPE_UNKNOWN = 0;
+inline constexpr int32_t PICTURE_TYPE_I = 1;
+inline constexpr int32_t PICTURE_TYPE_P = 2;
+inline constexpr int32_t PICTURE_TYPE_B = 3;
+inline constexpr int32_t PICTURE_TYPE_UNKNOWN = 0;
 
-constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
-constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
+inline constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
+inline constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
 
-constexpr char KEY_AAC_DRC_ALBUM_MODE[] = "aac-drc-album-mode";
-constexpr char KEY_AAC_DRC_ATTENUATION_FACTOR[] = "aac-drc-cut-level";
-constexpr char KEY_AAC_DRC_BOOST_FACTOR[] = "aac-drc-boost-level";
-constexpr char KEY_AAC_DRC_EFFECT_TYPE[] = "aac-drc-effect-type";
-constexpr char KEY_AAC_DRC_HEAVY_COMPRESSION[] = "aac-drc-heavy-compression";
-constexpr char KEY_AAC_DRC_OUTPUT_LOUDNESS[] = "aac-drc-output-loudness";
-constexpr char KEY_AAC_DRC_TARGET_REFERENCE_LEVEL[] = "aac-target-ref-level";
-constexpr char KEY_AAC_ENCODED_TARGET_LEVEL[] = "aac-encoded-target-level";
-constexpr char KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT[] = "aac-max-output-channel_count";
-constexpr char KEY_AAC_PROFILE[] = "aac-profile";
-constexpr char KEY_AAC_SBR_MODE[] = "aac-sbr-mode";
-constexpr char KEY_ALLOW_FRAME_DROP[] = "allow-frame-drop";
-constexpr char KEY_AUDIO_SESSION_ID[] = "audio-session-id";
-constexpr char KEY_BIT_RATE[] = "bitrate";
-constexpr char KEY_BITRATE_MODE[] = "bitrate-mode";
-constexpr char KEY_CA_SESSION_ID[] = "ca-session-id";
-constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
-constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
-constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
-constexpr char KEY_CHANNEL_COUNT[] = "channel-count";   // value N, eq to range 1..N
-constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
-constexpr char KEY_COLOR_FORMAT[] = "color-format";
-constexpr char KEY_COLOR_RANGE[] = "color-range";
-constexpr char KEY_COLOR_STANDARD[] = "color-standard";
-constexpr char KEY_COLOR_TRANSFER[] = "color-transfer";
-constexpr char KEY_COMPLEXITY[] = "complexity";
-constexpr char KEY_CREATE_INPUT_SURFACE_SUSPENDED[] = "create-input-buffers-suspended";
-constexpr char KEY_DURATION[] = "durationUs";
-constexpr char KEY_FEATURE_[] = "feature-";
-constexpr char KEY_FLAC_COMPRESSION_LEVEL[] = "flac-compression-level";
-constexpr char KEY_FRAME_RATE[] = "frame-rate";
-constexpr char KEY_GRID_COLUMNS[] = "grid-cols";
-constexpr char KEY_GRID_ROWS[] = "grid-rows";
-constexpr char KEY_HDR_STATIC_INFO[] = "hdr-static-info";
-constexpr char KEY_HDR10_PLUS_INFO[] = "hdr10-plus-info";
-constexpr char KEY_HEIGHT[] = "height";
-constexpr char KEY_I_FRAME_INTERVAL[] = "i-frame-interval";
-constexpr char KEY_INTRA_REFRESH_PERIOD[] = "intra-refresh-period";
-constexpr char KEY_IS_ADTS[] = "is-adts";
-constexpr char KEY_IS_AUTOSELECT[] = "is-autoselect";
-constexpr char KEY_IS_DEFAULT[] = "is-default";
-constexpr char KEY_IS_FORCED_SUBTITLE[] = "is-forced-subtitle";
-constexpr char KEY_IS_TIMED_TEXT[] = "is-timed-text";
-constexpr char KEY_LANGUAGE[] = "language";
-constexpr char KEY_LATENCY[] = "latency";
-constexpr char KEY_LEVEL[] = "level";
-constexpr char KEY_LOW_LATENCY[] = "low-latency";
-constexpr char KEY_MAX_B_FRAMES[] = "max-bframes";
-constexpr char KEY_MAX_BIT_RATE[] = "max-bitrate";
-constexpr char KEY_MAX_FPS_TO_ENCODER[] = "max-fps-to-encoder";
-constexpr char KEY_MAX_HEIGHT[] = "max-height";
-constexpr char KEY_MAX_INPUT_SIZE[] = "max-input-size";
-constexpr char KEY_MAX_OUTPUT_CHANNEL_COUNT[] = "max-output-channel-count";
-constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
-constexpr char KEY_MAX_WIDTH[] = "max-width";
-constexpr char KEY_MIME[] = "mime";
-constexpr char KEY_OPERATING_RATE[] = "operating-rate";
-constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
-constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
-constexpr char KEY_PICTURE_TYPE[] = "picture-type";
-constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
-constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
-constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
-constexpr char KEY_PRIORITY[] = "priority";
-constexpr char KEY_PROFILE[] = "profile";
-constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
-constexpr char KEY_QUALITY[] = "quality";
-constexpr char KEY_REPEAT_PREVIOUS_FRAME_AFTER[] = "repeat-previous-frame-after";
-constexpr char KEY_ROTATION[] = "rotation-degrees";
-constexpr char KEY_SAMPLE_RATE[] = "sample-rate";
-constexpr char KEY_SLICE_HEIGHT[] = "slice-height";
-constexpr char KEY_STRIDE[] = "stride";
-constexpr char KEY_TEMPORAL_LAYERING[] = "ts-schema";
-constexpr char KEY_TILE_HEIGHT[] = "tile-height";
-constexpr char KEY_TILE_WIDTH[] = "tile-width";
-constexpr char KEY_TRACK_ID[] = "track-id";
-constexpr char KEY_VIDEO_ENCODING_STATISTICS_LEVEL[] = "video-encoding-statistics-level";
-constexpr char KEY_VIDEO_QP_AVERAGE[] = "video-qp-average";
-constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
-constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
-constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
-constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
-constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
-constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
-constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
-constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
-constexpr char KEY_WIDTH[] = "width";
+inline constexpr char KEY_AAC_DRC_ALBUM_MODE[] = "aac-drc-album-mode";
+inline constexpr char KEY_AAC_DRC_ATTENUATION_FACTOR[] = "aac-drc-cut-level";
+inline constexpr char KEY_AAC_DRC_BOOST_FACTOR[] = "aac-drc-boost-level";
+inline constexpr char KEY_AAC_DRC_EFFECT_TYPE[] = "aac-drc-effect-type";
+inline constexpr char KEY_AAC_DRC_HEAVY_COMPRESSION[] = "aac-drc-heavy-compression";
+inline constexpr char KEY_AAC_DRC_OUTPUT_LOUDNESS[] = "aac-drc-output-loudness";
+inline constexpr char KEY_AAC_DRC_TARGET_REFERENCE_LEVEL[] = "aac-target-ref-level";
+inline constexpr char KEY_AAC_ENCODED_TARGET_LEVEL[] = "aac-encoded-target-level";
+inline constexpr char KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT[] = "aac-max-output-channel_count";
+inline constexpr char KEY_AAC_PROFILE[] = "aac-profile";
+inline constexpr char KEY_AAC_SBR_MODE[] = "aac-sbr-mode";
+inline constexpr char KEY_ALLOW_FRAME_DROP[] = "allow-frame-drop";
+inline constexpr char KEY_AUDIO_SESSION_ID[] = "audio-session-id";
+inline constexpr char KEY_BIT_RATE[] = "bitrate";
+inline constexpr char KEY_BITRATE_MODE[] = "bitrate-mode";
+inline constexpr char KEY_CA_SESSION_ID[] = "ca-session-id";
+inline constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
+inline constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
+inline constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
+inline constexpr char KEY_CHANNEL_COUNT[] = "channel-count";   // value N, eq to range 1..N
+inline constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
+inline constexpr char KEY_COLOR_FORMAT[] = "color-format";
+inline constexpr char KEY_COLOR_RANGE[] = "color-range";
+inline constexpr char KEY_COLOR_STANDARD[] = "color-standard";
+inline constexpr char KEY_COLOR_TRANSFER[] = "color-transfer";
+inline constexpr char KEY_COMPLEXITY[] = "complexity";
+inline constexpr char KEY_CREATE_INPUT_SURFACE_SUSPENDED[] = "create-input-buffers-suspended";
+inline constexpr char KEY_DURATION[] = "durationUs";
+inline constexpr char KEY_FEATURE_[] = "feature-";
+inline constexpr char KEY_FLAC_COMPRESSION_LEVEL[] = "flac-compression-level";
+inline constexpr char KEY_FRAME_RATE[] = "frame-rate";
+inline constexpr char KEY_GRID_COLUMNS[] = "grid-cols";
+inline constexpr char KEY_GRID_ROWS[] = "grid-rows";
+inline constexpr char KEY_HDR_STATIC_INFO[] = "hdr-static-info";
+inline constexpr char KEY_HDR10_PLUS_INFO[] = "hdr10-plus-info";
+inline constexpr char KEY_HEIGHT[] = "height";
+inline constexpr char KEY_I_FRAME_INTERVAL[] = "i-frame-interval";
+inline constexpr char KEY_INTRA_REFRESH_PERIOD[] = "intra-refresh-period";
+inline constexpr char KEY_IS_ADTS[] = "is-adts";
+inline constexpr char KEY_IS_AUTOSELECT[] = "is-autoselect";
+inline constexpr char KEY_IS_DEFAULT[] = "is-default";
+inline constexpr char KEY_IS_FORCED_SUBTITLE[] = "is-forced-subtitle";
+inline constexpr char KEY_IS_TIMED_TEXT[] = "is-timed-text";
+inline constexpr char KEY_LANGUAGE[] = "language";
+inline constexpr char KEY_LATENCY[] = "latency";
+inline constexpr char KEY_LEVEL[] = "level";
+inline constexpr char KEY_LOW_LATENCY[] = "low-latency";
+inline constexpr char KEY_MAX_B_FRAMES[] = "max-bframes";
+inline constexpr char KEY_MAX_BIT_RATE[] = "max-bitrate";
+inline constexpr char KEY_MAX_FPS_TO_ENCODER[] = "max-fps-to-encoder";
+inline constexpr char KEY_MAX_HEIGHT[] = "max-height";
+inline constexpr char KEY_MAX_INPUT_SIZE[] = "max-input-size";
+inline constexpr char KEY_MAX_OUTPUT_CHANNEL_COUNT[] = "max-output-channel-count";
+inline constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
+inline constexpr char KEY_MAX_WIDTH[] = "max-width";
+inline constexpr char KEY_MIME[] = "mime";
+inline constexpr char KEY_OPERATING_RATE[] = "operating-rate";
+inline constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
+inline constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
+inline constexpr char KEY_PICTURE_TYPE[] = "picture-type";
+inline constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
+inline constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
+inline constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
+inline constexpr char KEY_PRIORITY[] = "priority";
+inline constexpr char KEY_PROFILE[] = "profile";
+inline constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
+inline constexpr char KEY_QUALITY[] = "quality";
+inline constexpr char KEY_REPEAT_PREVIOUS_FRAME_AFTER[] = "repeat-previous-frame-after";
+inline constexpr char KEY_ROTATION[] = "rotation-degrees";
+inline constexpr char KEY_SAMPLE_RATE[] = "sample-rate";
+inline constexpr char KEY_SLICE_HEIGHT[] = "slice-height";
+inline constexpr char KEY_STRIDE[] = "stride";
+inline constexpr char KEY_TEMPORAL_LAYERING[] = "ts-schema";
+inline constexpr char KEY_TILE_HEIGHT[] = "tile-height";
+inline constexpr char KEY_TILE_WIDTH[] = "tile-width";
+inline constexpr char KEY_TRACK_ID[] = "track-id";
+inline constexpr char KEY_VIDEO_ENCODING_STATISTICS_LEVEL[] = "video-encoding-statistics-level";
+inline constexpr char KEY_VIDEO_QP_AVERAGE[] = "video-qp-average";
+inline constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
+inline constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
+inline constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
+inline constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
+inline constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
+inline constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
+inline constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
+inline constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
+inline constexpr char KEY_WIDTH[] = "width";
 
 // from MediaCodec.java
-constexpr int32_t ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
-constexpr int32_t ERROR_INSUFFICIENT_RESOURCE = 1100;
-constexpr int32_t ERROR_KEY_EXPIRED = 2;
-constexpr int32_t ERROR_NO_KEY = 1;
-constexpr int32_t ERROR_RECLAIMED = 1101;
-constexpr int32_t ERROR_RESOURCE_BUSY = 3;
-constexpr int32_t ERROR_SESSION_NOT_OPENED = 5;
-constexpr int32_t ERROR_UNSUPPORTED_OPERATION = 6;
-constexpr char CODEC[] = "android.media.mediacodec.codec";
-constexpr char ENCODER[] = "android.media.mediacodec.encoder";
-constexpr char HEIGHT[] = "android.media.mediacodec.height";
-constexpr char MIME_TYPE[] = "android.media.mediacodec.mime";
-constexpr char MODE[] = "android.media.mediacodec.mode";
-constexpr char MODE_AUDIO[] = "audio";
-constexpr char MODE_VIDEO[] = "video";
-constexpr char ROTATION[] = "android.media.mediacodec.rotation";
-constexpr char SECURE[] = "android.media.mediacodec.secure";
-constexpr char WIDTH[] = "android.media.mediacodec.width";
+inline constexpr int32_t ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
+inline constexpr int32_t ERROR_INSUFFICIENT_RESOURCE = 1100;
+inline constexpr int32_t ERROR_KEY_EXPIRED = 2;
+inline constexpr int32_t ERROR_NO_KEY = 1;
+inline constexpr int32_t ERROR_RECLAIMED = 1101;
+inline constexpr int32_t ERROR_RESOURCE_BUSY = 3;
+inline constexpr int32_t ERROR_SESSION_NOT_OPENED = 5;
+inline constexpr int32_t ERROR_UNSUPPORTED_OPERATION = 6;
+inline constexpr char CODEC[] = "android.media.mediacodec.codec";
+inline constexpr char ENCODER[] = "android.media.mediacodec.encoder";
+inline constexpr char HEIGHT[] = "android.media.mediacodec.height";
+inline constexpr char MIME_TYPE[] = "android.media.mediacodec.mime";
+inline constexpr char MODE[] = "android.media.mediacodec.mode";
+inline constexpr char MODE_AUDIO[] = "audio";
+inline constexpr char MODE_VIDEO[] = "video";
+inline constexpr char ROTATION[] = "android.media.mediacodec.rotation";
+inline constexpr char SECURE[] = "android.media.mediacodec.secure";
+inline constexpr char WIDTH[] = "android.media.mediacodec.width";
 
-constexpr int32_t BUFFER_FLAG_CODEC_CONFIG = 2;
-constexpr int32_t BUFFER_FLAG_END_OF_STREAM = 4;
-constexpr int32_t BUFFER_FLAG_KEY_FRAME = 1;
-constexpr int32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
-constexpr int32_t BUFFER_FLAG_SYNC_FRAME = 1;
-constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
-constexpr int32_t CONFIGURE_FLAG_USE_BLOCK_MODEL = 2;
-constexpr int32_t CRYPTO_MODE_AES_CBC     = 2;
-constexpr int32_t CRYPTO_MODE_AES_CTR     = 1;
-constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
-constexpr int32_t INFO_OUTPUT_BUFFERS_CHANGED = -3;
-constexpr int32_t INFO_OUTPUT_FORMAT_CHANGED  = -2;
-constexpr int32_t INFO_TRY_AGAIN_LATER        = -1;
-constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT               = 1;
-constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
-constexpr char PARAMETER_KEY_OFFSET_TIME[] = "time-offset-us";
-constexpr char PARAMETER_KEY_REQUEST_SYNC_FRAME[] = "request-sync";
-constexpr char PARAMETER_KEY_SUSPEND[] = "drop-input-frames";
-constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
-constexpr char PARAMETER_KEY_TUNNEL_PEEK[] =  "tunnel-peek";
-constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
+inline constexpr int32_t BUFFER_FLAG_CODEC_CONFIG = 2;
+inline constexpr int32_t BUFFER_FLAG_DECODE_ONLY = 32;
+inline constexpr int32_t BUFFER_FLAG_END_OF_STREAM = 4;
+inline constexpr int32_t BUFFER_FLAG_KEY_FRAME = 1;
+inline constexpr int32_t BUFFER_FLAG_MUXER_DATA = 16;
+inline constexpr int32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
+inline constexpr int32_t BUFFER_FLAG_SYNC_FRAME = 1;
+inline constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
+inline constexpr int32_t CONFIGURE_FLAG_USE_BLOCK_MODEL = 2;
+inline constexpr int32_t CRYPTO_MODE_AES_CBC     = 2;
+inline constexpr int32_t CRYPTO_MODE_AES_CTR     = 1;
+inline constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
+inline constexpr int32_t INFO_OUTPUT_BUFFERS_CHANGED = -3;
+inline constexpr int32_t INFO_OUTPUT_FORMAT_CHANGED  = -2;
+inline constexpr int32_t INFO_TRY_AGAIN_LATER        = -1;
+inline constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT               = 1;
+inline constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
+inline constexpr char PARAMETER_KEY_OFFSET_TIME[] = "time-offset-us";
+inline constexpr char PARAMETER_KEY_REQUEST_SYNC_FRAME[] = "request-sync";
+inline constexpr char PARAMETER_KEY_SUSPEND[] = "drop-input-frames";
+inline constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
+inline constexpr char PARAMETER_KEY_TUNNEL_PEEK[] =  "tunnel-peek";
+inline constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
 
 }
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecList.h b/media/libstagefright/include/media/stagefright/MediaCodecList.h
index 3cf455c..08a5324 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecList.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecList.h
@@ -80,11 +80,9 @@
             const char *mime,
             bool createEncoder,
             uint32_t flags,
-            sp<AMessage> format,
+            const sp<AMessage> &format,
             Vector<AString> *matchingCodecs);
 
-    static bool codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info, sp<AMessage> format);
-
     static bool isSoftwareCodec(const AString &componentName);
 
 private:
@@ -115,6 +113,11 @@
 
     MediaCodecList(const MediaCodecList&) = delete;
     MediaCodecList& operator=(const MediaCodecList&) = delete;
+
+    static bool codecHandlesFormat(
+            const char *mime,
+            const sp<MediaCodecInfo> &info,
+            const sp<AMessage> &format);
 };
 
 }  // namespace android
diff --git a/media/libstagefright/include/media/stagefright/MediaFilter.h b/media/libstagefright/include/media/stagefright/MediaFilter.h
deleted file mode 100644
index 1255e0f..0000000
--- a/media/libstagefright/include/media/stagefright/MediaFilter.h
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_FILTER_H_
-#define MEDIA_FILTER_H_
-
-#include <media/stagefright/CodecBase.h>
-
-namespace android {
-
-struct GraphicBufferListener;
-struct SimpleFilter;
-
-struct MediaFilter : public CodecBase {
-    MediaFilter();
-
-    virtual std::shared_ptr<BufferChannelBase> getBufferChannel() override;
-    virtual void initiateAllocateComponent(const sp<AMessage> &msg);
-    virtual void initiateConfigureComponent(const sp<AMessage> &msg);
-    virtual void initiateCreateInputSurface();
-    virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
-
-    virtual void initiateStart();
-    virtual void initiateShutdown(bool keepComponentAllocated = false);
-
-    virtual void signalFlush();
-    virtual void signalResume();
-
-    virtual void signalRequestIDRFrame();
-    virtual void signalSetParameters(const sp<AMessage> &msg);
-    virtual void signalEndOfInputStream();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-protected:
-    virtual ~MediaFilter();
-
-private:
-    struct BufferInfo {
-        enum Status {
-            OWNED_BY_US,
-            OWNED_BY_UPSTREAM,
-        };
-
-        uint32_t mBufferID;
-        int32_t mGeneration;
-        int32_t mOutputFlags;
-        Status mStatus;
-
-        sp<MediaCodecBuffer> mData;
-    };
-
-    class BufferChannel;
-
-    enum State {
-      UNINITIALIZED,
-      INITIALIZED,
-      CONFIGURED,
-      STARTED,
-    };
-
-    enum {
-        kWhatInputBufferFilled       = 'inpF',
-        kWhatOutputBufferDrained     = 'outD',
-        kWhatShutdown                = 'shut',
-        kWhatFlush                   = 'flus',
-        kWhatResume                  = 'resm',
-        kWhatAllocateComponent       = 'allo',
-        kWhatConfigureComponent      = 'conf',
-        kWhatCreateInputSurface      = 'cisf',
-        kWhatSignalEndOfInputStream  = 'eois',
-        kWhatStart                   = 'star',
-        kWhatSetParameters           = 'setP',
-        kWhatProcessBuffers          = 'proc',
-    };
-
-    enum {
-        kPortIndexInput  = 0,
-        kPortIndexOutput = 1
-    };
-
-    // member variables
-    AString mComponentName;
-    State mState;
-    status_t mInputEOSResult;
-    int32_t mWidth, mHeight;
-    int32_t mStride, mSliceHeight;
-    int32_t mColorFormatIn, mColorFormatOut;
-    size_t mMaxInputSize, mMaxOutputSize;
-    int32_t mGeneration;
-    sp<AMessage> mInputFormat;
-    sp<AMessage> mOutputFormat;
-
-    Vector<BufferInfo> mBuffers[2];
-    Vector<BufferInfo*> mAvailableInputBuffers;
-    Vector<BufferInfo*> mAvailableOutputBuffers;
-    bool mPortEOS[2];
-
-    sp<SimpleFilter> mFilter;
-    sp<GraphicBufferListener> mGraphicBufferListener;
-
-    std::shared_ptr<BufferChannel> mBufferChannel;
-
-    // helper functions
-    void signalProcessBuffers();
-    void signalError(status_t error);
-
-    status_t allocateBuffersOnPort(OMX_U32 portIndex);
-    BufferInfo *findBuffer(
-            uint32_t portIndex, const sp<MediaCodecBuffer> &buffer,
-            ssize_t *index = NULL);
-    void postFillThisBuffer(BufferInfo *info);
-    void postDrainThisBuffer(BufferInfo *info);
-    void postEOS();
-    void requestFillEmptyInput();
-    void processBuffers();
-
-    void onAllocateComponent(const sp<AMessage> &msg);
-    void onConfigureComponent(const sp<AMessage> &msg);
-    void onStart();
-    void onInputBufferFilled(const sp<AMessage> &msg);
-    void onOutputBufferDrained(const sp<AMessage> &msg);
-    void onShutdown(const sp<AMessage> &msg);
-    void onFlush();
-    void onSetParameters(const sp<AMessage> &msg);
-    void onCreateInputSurface();
-    void onInputFrameAvailable();
-    void onSignalEndOfInputStream();
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaFilter);
-};
-
-}  // namespace android
-
-#endif  // MEDIA_FILTER_H_
diff --git a/media/libstagefright/include/media/stagefright/MediaHistogram.h b/media/libstagefright/include/media/stagefright/MediaHistogram.h
new file mode 100644
index 0000000..50fa258
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/MediaHistogram.h
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_HISTOGRAM_H_
+#define MEDIA_HISTOGRAM_H_
+
+#include <limits>
+#include <sstream>
+#include <string>
+#include <vector>
+
+namespace android {
+
+template<typename T>
+class MediaHistogram {
+public:
+    MediaHistogram();
+    void clear();
+    bool setup(int bucketCount, T width, T floor = 0);
+    bool setup(const std::vector<T> &bucketLimits);
+    void insert(T sample);
+    size_t size();
+    int64_t operator[](int);
+    T getMin() const { return mMin; }
+    T getMax() const { return mMax; }
+    T getCount() const { return mCount; }
+    T getSum() const { return mSum; }
+    T getAvg() const { return mSum / (mCount == 0 ? 1 : mCount); }
+    T getPercentile(int) const;
+    std::string emit() const;
+    std::string emitBuckets() const;
+private:
+    MediaHistogram(const MediaHistogram &); // disallow
+
+    bool allocate(int bucketCount, bool withBucketLimits);
+
+    T mFloor, mCeiling, mWidth;
+    T mMin, mMax, mSum;
+    int64_t mBelow, mAbove, mCount;
+    std::vector<T> mBuckets;
+    std::vector<T> mBucketLimits;
+};
+
+template<typename T>
+MediaHistogram<T>::MediaHistogram() {
+    mWidth = mCeiling = mFloor = -1;
+    clear();
+}
+
+template<typename T>
+void MediaHistogram<T>::clear() {
+    for (int i = 0; i < mBuckets.size(); ++i) {
+        mBuckets[i] = 0;
+    }
+    mMin = std::numeric_limits<T>::max();
+    mMax = std::numeric_limits<T>::min();
+    mSum = 0;
+    mCount = 0;
+    mBelow = mAbove = 0;
+}
+
+template<typename T>
+bool MediaHistogram<T>::setup(int bucketCount, T width, T floor) {
+    if (bucketCount <= 0 || width <= 0) {
+        return false;
+    }
+    if (!allocate(bucketCount, false)) {
+        return false;
+    }
+    mWidth = width;
+    mFloor = floor;
+    mCeiling = floor + bucketCount * width;
+    clear();
+    return true;
+}
+
+template<typename T>
+bool MediaHistogram<T>::setup(const std::vector<T> &bucketLimits) {
+    if (bucketLimits.size() <= 1) {
+        return false;
+    }
+    int bucketCount = bucketLimits.size() - 1;
+    if (!allocate(bucketCount, true)) {
+        return false;
+    }
+
+    mWidth = -1;
+    mFloor = bucketLimits[0];
+    for (int i = 0; i < bucketCount; ++i) {
+        mBucketLimits[i] = bucketLimits[i + 1];
+    }
+    mCeiling = bucketLimits[bucketCount];
+    clear();
+    return true;
+}
+
+template<typename T>
+bool MediaHistogram<T>::allocate(int bucketCount, bool withBucketLimits) {
+    assert(bucketCount > 0);
+    if (bucketCount != mBuckets.size()) {
+        mBuckets = std::vector<T>(bucketCount, 0);
+    }
+    if (withBucketLimits && mBucketLimits.size() != bucketCount) {
+        mBucketLimits = std::vector<T>(bucketCount, 0);
+    }
+    return true;
+}
+
+template<typename T>
+void MediaHistogram<T>::insert(T sample) {
+    // histogram is not set up
+    if (mBuckets.size() == 0) {
+        return;
+    }
+
+    mCount++;
+    mSum += sample;
+    if (mMin > sample) mMin = sample;
+    if (mMax < sample) mMax = sample;
+
+    if (sample < mFloor) {
+        mBelow++;
+    } else if (sample >= mCeiling) {
+        mAbove++;
+    } else if (mWidth == -1) {
+        // A binary search might be more efficient for large number of buckets, but it is expected
+        // that there will never be a large amount of buckets, so keep the code simple.
+        for (int slot = 0; slot < mBucketLimits.size(); ++slot) {
+            if (sample < mBucketLimits[slot]) {
+                mBuckets[slot]++;
+                break;
+            }
+        }
+    } else {
+        int64_t slot = (sample - mFloor) / mWidth;
+        assert(slot < mBuckets.size());
+        mBuckets[slot]++;
+    }
+    return;
+}
+
+template<typename T>
+size_t MediaHistogram<T>::size() {
+    return mBuckets.size() + 1;
+}
+
+template<typename T>
+int64_t MediaHistogram<T>::operator[](int i) {
+    assert(i >= 0);
+    assert(i <= mBuckets.size());
+    if (i == mBuckets.size()) {
+        return mAbove;
+    }
+    return mBuckets[i];
+}
+
+template<typename T>
+std::string MediaHistogram<T>::emit() const {
+    // emits:  floor,width,below{bucket0,bucket1,...., bucketN}above
+    // or.. emits:  below{bucket0,bucket1,...., bucketN}above
+    // unconfigured will emit: 0{}0
+    // XXX: is this best representation?
+    std::stringstream ss("");
+    if (mWidth == -1) {
+        ss << mBelow << "{";
+    } else {
+        ss << mFloor << "," << mWidth << "," << mBelow << "{";
+    }
+    for (int i = 0; i < mBuckets.size(); i++) {
+        if (i != 0) {
+            ss << ",";
+        }
+        ss << mBuckets[i];
+    }
+    ss << "}" << mAbove;
+    return ss.str();
+}
+
+template<typename T>
+std::string MediaHistogram<T>::emitBuckets() const {
+    std::stringstream ss("");
+    if (mWidth == -1) {
+        ss << mFloor;
+        for (int i = 0; i < mBucketLimits.size(); ++i) {
+            ss << ',' << mBucketLimits[i];
+        }
+    } else {
+        ss << mFloor;
+        for (int i = 1; i <= mBuckets.size(); ++i) {
+            ss << ',' << (mFloor + i * mWidth);
+        }
+    }
+    return ss.str();
+}
+
+} // android
+
+#endif // MEDIA_HISTOGRAM_H_
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index e97a65e..33aaf11 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -48,9 +48,13 @@
 // deleting the output file after stop.
 struct MediaMuxer : public MediaMuxerBase {
 public:
-    // Construct the muxer with the file descriptor. Note that the MediaMuxer
-    // will close this file at stop().
-    MediaMuxer(int fd, OutputFormat format);
+    /**
+     * Creates the muxer for a given output format.
+     * @param fd : file descriptor of the output file.
+     * @param format : output format of the muxer. e.g.: webm/mp4/ogg
+     * @return writer's object or nullptr if error.
+     */
+    static MediaMuxer* create(int fd, OutputFormat format);
 
     virtual ~MediaMuxer();
 
@@ -127,6 +131,11 @@
     sp<AMessage> getTrackFormat(size_t idx);
 
 private:
+    // Construct the muxer with the file descriptor. Note that the MediaMuxer
+    // will close this file at stop().
+    // This constructor is made private to ensure that MediaMuxer::create() is used instead.
+    MediaMuxer(int fd, OutputFormat format);
+
     const OutputFormat mFormat;
     sp<MediaWriter> mWriter;
     Vector< sp<MediaAdapter> > mTrackList;  // Each track has its MediaAdapter.
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 9f20185..2b14811 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -31,6 +31,29 @@
           mMaxFileDurationLimitUs(0) {
     }
 
+    // Returns true if the file descriptor is opened using a mode
+    // which meets minimum writer/muxer requirements.
+    static bool isFdOpenModeValid(int fd) {
+        // check for invalid file descriptor.
+        int flags = fcntl(fd, F_GETFL);
+        if (flags == -1) {
+            ALOGE("Invalid File Status Flags and/or mode : %d", flags);
+            return false;
+        }
+        // fd must be in read-write mode or write-only mode.
+        if ((flags & (O_RDWR | O_WRONLY)) == 0) {
+            ALOGE("File must be writable");
+            return false;
+        }
+        // Verify fd is seekable
+        off64_t off = lseek64(fd, 0, SEEK_SET);
+        if (off < 0) {
+            ALOGE("File descriptor is not seekable");
+            return false;
+        }
+        return true;
+    }
+
     virtual status_t addSource(const sp<MediaSource> &source) = 0;
     virtual bool reachedEOS() = 0;
     virtual status_t start(MetaData *params = NULL) = 0;
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 88c1f3f..a7d2eb9 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -117,6 +117,12 @@
     kKeyVideoProfile      = 'vprf',  // int32_t
     kKeyVideoLevel        = 'vlev',  // int32_t
 
+    // audio profile and level
+    // The codec framework doesn't distinguish between video and audio profiles,
+    // so there is no need to define a separate key
+    kKeyAudioProfile      = 'vprf',  // int32_t
+    kKeyAudioLevel        = 'vlev',  // int32_t
+
     kKey2ByteNalLength    = '2NAL',  // int32_t (bool)
 
     // Identify the file output format for authoring
@@ -267,6 +273,7 @@
     kKeyRtpExtMap        = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
     kKeyRtpCvoDegrees    = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
     kKeyRtpDscp          = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+    kKeyRtpEcn           = 'sEcn', // int32_t, ECN (Explicit Congestion Notification) of RFC 3168
     kKeySocketNetwork    = 'sNet', // int64_t, socket will be bound to network handle.
 
     // Slow-motion markers
@@ -277,6 +284,18 @@
     kKeyLastSampleIndexInChunk = 'lsic',  //int64_t, index of last sample in a chunk.
     kKeySampleTimeBeforeAppend = 'lsba', // int64_t, timestamp of last sample of a track.
 
+    // DVB component tag
+    kKeyDvbComponentTag = 'copt', // int32_t, component tag for DVB video/audio/subtitle
+
+    // DVB audio description
+    kKeyDvbAudioDescription = 'addt', // bool (int32_t), DVB audio description only defined for
+                                      // audio component
+
+    // DVB teletext magazine number
+    kKeyDvbTeletextMagazineNumber = 'ttxm', // int32_t, DVB teletext magazine number
+
+    // DVB teletext page number
+    kKeyDvbTeletextPageNumber = 'ttxp', // int32_t, DVB teletext page number
 };
 
 enum {
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index d17a480..52ea28b 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -146,6 +146,7 @@
     Vector<TrackInfo> mSelectedTracks;
     int64_t mTotalBitrate;  // in bits/sec
     int64_t mDurationUs;
+    String8 mName;
 
     void setEntryPointToRemoteMediaExtractor();
 
@@ -165,6 +166,7 @@
     bool getTotalBitrate(int64_t *bitRate) const;
     status_t updateDurationAndBitrate();
     status_t appendVorbisNumPageSamples(MediaBufferBase *mbuf, const sp<ABuffer> &buffer);
+    status_t initMediaExtractor(const sp<DataSource>& dataSource);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuMediaExtractor);
 };
diff --git a/media/libstagefright/PlaybackDurationAccumulator.h b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
similarity index 95%
rename from media/libstagefright/PlaybackDurationAccumulator.h
rename to media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
index cb5f0c4..bdf1171 100644
--- a/media/libstagefright/PlaybackDurationAccumulator.h
+++ b/media/libstagefright/include/media/stagefright/PlaybackDurationAccumulator.h
@@ -33,7 +33,7 @@
     }
 
     // Process a render time expressed in nanoseconds.
-    void processRenderTime(int64_t newRenderTimeNs) {
+    void onFrameRendered(int64_t newRenderTimeNs) {
         // If we detect wrap-around or out of order frames, just ignore the duration for this
         // and the next frame.
         if (newRenderTimeNs < mPreviousRenderTimeNs) {
@@ -59,7 +59,7 @@
     int64_t mPreviousRenderTimeNs;
 };
 
-}
+} // android
 
-#endif
+#endif // PLAYBACK_DURATION_ACCUMULATOR_H_
 
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
deleted file mode 100644
index b7fc858..0000000
--- a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PROCESS_INFO_INTERFACE_H_
-#define PROCESS_INFO_INTERFACE_H_
-
-#include <utils/RefBase.h>
-
-namespace android {
-
-struct ProcessInfoInterface : public RefBase {
-    virtual bool getPriority(int pid, int* priority) = 0;
-    virtual bool isPidTrusted(int pid) = 0;
-    virtual bool isPidUidTrusted(int pid, int uid) = 0;
-    virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
-    virtual void removeProcessInfoOverride(int pid);
-
-protected:
-    virtual ~ProcessInfoInterface() {}
-};
-
-}  // namespace android
-
-#endif  // PROCESS_INFO_INTERFACE_H_
diff --git a/media/libstagefright/include/media/stagefright/RenderScriptWrapper.h b/media/libstagefright/include/media/stagefright/RenderScriptWrapper.h
deleted file mode 100644
index b42649e..0000000
--- a/media/libstagefright/include/media/stagefright/RenderScriptWrapper.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RENDERSCRIPT_WRAPPER_H_
-#define RENDERSCRIPT_WRAPPER_H_
-
-#include <RenderScript.h>
-
-namespace android {
-
-struct RenderScriptWrapper : public RefBase {
-public:
-    struct RSFilterCallback : public RefBase {
-    public:
-        // called by RSFilter to process each input buffer
-        virtual status_t processBuffers(
-                RSC::Allocation* inBuffer,
-                RSC::Allocation* outBuffer) = 0;
-
-        virtual status_t handleSetParameters(const sp<AMessage> &msg) = 0;
-    };
-
-    sp<RSFilterCallback> mCallback;
-    RSC::sp<RSC::RS> mContext;
-};
-
-}   // namespace android
-
-#endif  // RENDERSCRIPT_WRAPPER_H_
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
new file mode 100644
index 0000000..82ba81c
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -0,0 +1,449 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#define VIDEO_RENDER_QUALITY_TRACKER_H_
+
+#include <assert.h>
+#include <list>
+#include <queue>
+
+#include <media/stagefright/MediaHistogram.h>
+
+namespace android {
+
+// A variety of video rendering quality metrics.
+struct VideoRenderQualityMetrics {
+    static constexpr float FRAME_RATE_UNDETERMINED = -1.0f;
+    static constexpr float FRAME_RATE_24_3_2_PULLDOWN = -2.0f;
+
+    VideoRenderQualityMetrics();
+
+    void clear();
+
+    // The render time of the first video frame.
+    int64_t firstRenderTimeUs;
+
+    // The number of frames released to be rendered.
+    int64_t frameReleasedCount;
+
+    // The number of frames actually rendered.
+    int64_t frameRenderedCount;
+
+    // The number of frames dropped - frames that were released but never rendered.
+    int64_t frameDroppedCount;
+
+    // The number of frames that were intentionally dropped/skipped by the app.
+    int64_t frameSkippedCount;
+
+    // The frame rate as detected by looking at the position timestamp from the content stream.
+    float contentFrameRate;
+
+    // The frame rate as detected by looking at the desired render time passed in by the app.
+    float desiredFrameRate;
+
+    // The frame rate as detected by looking at the actual render time, as returned by the system
+    // post-render.
+    float actualFrameRate;
+
+    // A histogram of the durations of freezes due to dropped/skipped frames.
+    MediaHistogram<int32_t> freezeDurationMsHistogram;
+    // The computed overall freeze score using the above histogram and score conversion table. The
+    // score is based on counts in the histogram bucket, multiplied by the value in the score
+    // conversion table for that bucket. For example, the impact of a short freeze may be minimal,
+    // but the impact of long freeze may be disproportionally worse. Therefore, the score
+    // multipliers for each bucket might increase exponentially instead of linearly. A score
+    // multiplier of zero would reflect that small freeze durations have near-zero impact to the
+    // user experience.
+    int32_t freezeScore;
+    // The computed percentage of total playback duration that was frozen.
+    float freezeRate;
+    // The number of freeze events.
+    int32_t freezeEventCount;
+
+    // A histogram of the durations between each freeze.
+    MediaHistogram<int32_t> freezeDistanceMsHistogram;
+
+    // A histogram of the judder scores - based on the error tolerance between actual render
+    // duration of each frame and the ideal render duration.
+    MediaHistogram<int32_t> judderScoreHistogram;
+    // The computed overall judder score using the above histogram and score conversion table. The
+    // score is based on counts in the histogram bucket, multiplied by the value in the score
+    // conversion table for that bucket. For example, the impact of minimal judder may be small,
+    // but the impact of large judder may be disproportionally worse. Therefore, the score
+    // multipliers for each bucket might increase exponentially instead of linearly. A score
+    // multiplier of zero would reflect that small judder errors have near-zero impact to the user
+    // experience.
+    int32_t judderScore;
+    // The computed percentage of total frames that had judder.
+    float judderRate;
+    // The number of judder events.
+    int32_t judderEventCount;
+};
+
+///////////////////////////////////////////////////////
+// This class analyzes various timestamps related to video rendering to compute a set of metrics
+// that attempt to capture the quality of the user experience during video playback.
+//
+// The following timestamps (in microseconds) are analyzed to compute these metrics:
+//   * The content timestamp found in the content stream, indicating the position of each video
+//     frame.
+//   * The desired timestamp passed in by the app, indicating at what point in time in the future
+//     the app would like the frame to be rendered.
+//   * The actual timestamp passed in by the display subsystem, indicating the point in time at
+//     which the frame was actually rendered.
+//
+// Core to the algorithms are deriving frame durations based on these timestamps and determining
+// the result of each video frame in the content stream:
+//   * skipped: the app didn't want to render the frame
+//   * dropped: the display subsystem could not render the frame in time
+//   * rendered: the display subsystem rendered the frame
+//
+class VideoRenderQualityTracker {
+public:
+    // Configurable elements of the metrics algorithms
+    class Configuration {
+    public:
+        // system/server_configurable_flags/libflags/include/get_flags.h:GetServerConfigurableFlag
+        typedef std::string (*GetServerConfigurableFlagFn)(
+                const std::string& experiment_category_name,
+                const std::string& experiment_flag_name,
+                const std::string& default_value);
+
+        static Configuration getFromServerConfigurableFlags(
+                GetServerConfigurableFlagFn getServerConfigurableFlagFn);
+
+        Configuration();
+
+        // Whether or not frame render quality is tracked.
+        bool enabled;
+
+        // Whether or not frames that are intentionally not rendered by the app should be considered
+        // as dropped.
+        bool areSkippedFramesDropped;
+
+        // How large of a jump forward in content time is allowed before it is considered a
+        // discontinuity (seek/playlist) and various internal states are reset.
+        int32_t maxExpectedContentFrameDurationUs;
+
+        // How much tolerance in frame duration when considering whether or not two frames have the
+        // same frame rate.
+        int32_t frameRateDetectionToleranceUs;
+
+        // A skip forward in content time could occur during frame drops of live content. Therefore
+        // the content frame duration and the app-desired frame duration are compared using this
+        // tolerance to determine whether the app is intentionally seeking forward or whether the
+        // skip forward in content time is due to frame drops. If the app-desired frame duration is
+        // short, but the content frame duration is large, it is assumed the app is intentionally
+        // seeking forward.
+        int32_t liveContentFrameDropToleranceUs;
+
+        // Freeze configuration
+        //
+        // The values used to distribute freeze durations across a histogram.
+        std::vector<int32_t> freezeDurationMsHistogramBuckets;
+        //
+        // The values used to multiply the counts in the histogram buckets above to compute an
+        // overall score. This allows the score to reflect disproportionate impact as freeze
+        // durations increase.
+        std::vector<int64_t> freezeDurationMsHistogramToScore;
+        //
+        // The values used to distribute distances between freezes across a histogram.
+        std::vector<int32_t> freezeDistanceMsHistogramBuckets;
+        //
+        // The maximum number of freeze events to send back to the caller.
+        int32_t freezeEventMax;
+        //
+        // The maximum number of detail entries tracked per freeze event.
+        int32_t freezeEventDetailsMax;
+        //
+        // The maximum distance in time between two freeze occurrences such that both will be
+        // lumped into the same freeze event.
+        int32_t freezeEventDistanceToleranceMs;
+
+        // Judder configuration
+        //
+        // A judder error lower than this value is not scored as judder.
+        int32_t judderErrorToleranceUs;
+        //
+        // The values used to distribute judder scores across a histogram.
+        std::vector<int32_t> judderScoreHistogramBuckets;
+        //
+        // The values used to multiply the counts in the histogram buckets above to compute an
+        // overall score. This allows the score to reflect disproportionate impact as judder scores
+        // increase.
+        std::vector<int64_t> judderScoreHistogramToScore;
+        //
+        // The maximum number of judder events to send back to the caller.
+        int32_t judderEventMax;
+        //
+        // The maximum number of detail entries tracked per judder event.
+        int32_t judderEventDetailsMax;
+        //
+        // The maximum distance in time between two judder occurrences such that both will be
+        // lumped into the same judder event.
+        int32_t judderEventDistanceToleranceMs;
+    };
+
+    struct FreezeEvent {
+        // Details are captured for each freeze up to a limited number. The arrays are guaranteed to
+        // have the same size.
+        struct Details {
+            /// The duration of the freeze.
+            std::vector<int32_t> durationMs;
+            // The distance between the beginning of this freeze and the end of the previous freeze.
+            std::vector<int32_t> distanceMs;
+        };
+        // Whether or not the data in this structure is valid.
+        bool valid = false;
+        // The time at which the first freeze for this event was detected.
+        int64_t initialTimeUs;
+        // The total duration from the beginning of the first freeze to the end of the last freeze
+        // in this event.
+        int32_t durationMs;
+        // The number of freezes in this event.
+        int64_t count;
+        // The sum of all durations of all freezes in this event.
+        int64_t sumDurationMs;
+        // The sum of all distances between each freeze in this event.
+        int64_t sumDistanceMs;
+        // Detailed information for the first N freezes in this event.
+        Details details;
+    };
+
+    struct JudderEvent {
+        // Details are captured for each frame judder up to a limited number. The arrays are
+        // guaranteed to have the same size.
+        struct Details {
+            // The actual render duration of the frame for this judder occurrence.
+            std::vector<int32_t> actualRenderDurationUs;
+            // The content render duration of the frame for this judder occurrence.
+            std::vector<int32_t> contentRenderDurationUs;
+            // The distance from this judder occurrence and the previous judder occurrence.
+            std::vector<int32_t> distanceMs;
+        };
+        // Whether or not the data in this structure is valid.
+        bool valid = false;
+        // The time at which the first judder occurrence for this event was detected.
+        int64_t initialTimeUs;
+        // The total duration from the first judder occurrence to the last judder occurrence in this
+        // event.
+        int32_t durationMs;
+        // The number of judder occurrences in this event.
+        int64_t count;
+        // The sum of all judder scores in this event.
+        int64_t sumScore;
+        // The sum of all distances between each judder occurrence in this event.
+        int64_t sumDistanceMs;
+        // Detailed information for the first N judder occurrences in this event.
+        Details details;
+    };
+
+    VideoRenderQualityTracker();
+    VideoRenderQualityTracker(const Configuration &configuration);
+
+    // Called when a tunnel mode frame has been queued.
+    void onTunnelFrameQueued(int64_t contentTimeUs);
+
+    // Called when the app has intentionally decided not to render this frame.
+    void onFrameSkipped(int64_t contentTimeUs);
+
+    // Called when the app has requested the frame to be rendered as soon as possible.
+    void onFrameReleased(int64_t contentTimeUs);
+
+    // Called when the app has requested the frame to be rendered at a specific point in time in the
+    // future.
+    void onFrameReleased(int64_t contentTimeUs, int64_t desiredRenderTimeNs);
+
+    // Called when the system has detected that the frame has actually been rendered to the display.
+    // Returns any freeze events or judder events that were detected.
+    void onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
+                         FreezeEvent *freezeEventOut = nullptr,
+                         JudderEvent *judderEventOut = nullptr);
+
+    // Gets and resets data for the current freeze event.
+    FreezeEvent getAndResetFreezeEvent();
+
+    // Gets and resets data for the current judder event.
+    JudderEvent getAndResetJudderEvent();
+
+    // Retrieve the metrics.
+    const VideoRenderQualityMetrics &getMetrics();
+
+    // Called when a change in codec state will result in a content discontinuity - e.g. flush.
+    void resetForDiscontinuity();
+
+    // Clear out all metrics and tracking - e.g. codec reconfigured.
+    void clear();
+
+private:
+    // Tracking of frames that are pending to be rendered to the display.
+    struct FrameInfo {
+        int64_t contentTimeUs;
+        int64_t desiredRenderTimeUs;
+    };
+
+    // Historic tracking of frame durations
+    struct FrameDurationUs {
+        static const int SIZE = 5;
+
+        FrameDurationUs() {
+            for (int i = 0; i < SIZE; ++i) {
+                durationUs[i] = -1;
+            }
+            priorTimestampUs = -1;
+        }
+
+        int32_t &operator[](int index) {
+            assert(index < SIZE);
+            return durationUs[index];
+        }
+
+        const int32_t &operator[](int index) const {
+            assert(index < SIZE);
+            return durationUs[index];
+        }
+
+        // The duration of the past N frames.
+        int32_t durationUs[SIZE];
+
+        // The timestamp of the previous frame.
+        int64_t priorTimestampUs;
+    };
+
+    // Configure histograms for the metrics.
+    static void configureHistograms(VideoRenderQualityMetrics &m, const Configuration &c);
+
+    // The current time in microseconds.
+    static int64_t nowUs();
+
+    // A new frame has been processed, so update the frame durations based on the new frame
+    // timestamp.
+    static void updateFrameDurations(FrameDurationUs &durationUs, int64_t newTimestampUs);
+
+    // Update a frame rate if, and only if, one can be detected.
+    static void updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
+                                const Configuration &c);
+
+    // Examine the past few frames to detect the frame rate based on each frame's render duration.
+    static float detectFrameRate(const FrameDurationUs &durationUs, const Configuration &c);
+
+    // Determine whether or not 3:2 pulldowng for displaying 24fps content on 60Hz displays is
+    // occurring.
+    static bool is32pulldown(const FrameDurationUs &durationUs, const Configuration &c);
+
+    // Process a frame freeze.
+    static void processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
+                              int64_t lastFreezeEndTimeUs, FreezeEvent &e,
+                              VideoRenderQualityMetrics &m, const Configuration &c);
+
+    // Retrieve a freeze event if an event just finished.
+    static void maybeCaptureFreezeEvent(int64_t actualRenderTimeUs, int64_t lastFreezeEndTimeUs,
+                                        FreezeEvent &e, const VideoRenderQualityMetrics & m,
+                                        const Configuration &c, FreezeEvent *freezeEventOut);
+
+    // Compute a judder score for the previously-rendered frame.
+    static int64_t computePreviousJudderScore(const FrameDurationUs &actualRenderDurationUs,
+                                              const FrameDurationUs &contentRenderDurationUs,
+                                              const Configuration &c);
+
+    // Process a frame judder.
+    static void processJudder(int32_t judderScore, int64_t judderTimeUs,
+                              int64_t lastJudderEndTimeUs,
+                              const FrameDurationUs &contentDurationUs,
+                              const FrameDurationUs &actualDurationUs, JudderEvent &e,
+                              VideoRenderQualityMetrics &m, const Configuration &c);
+
+    // Retrieve a judder event if an event just finished.
+    static void maybeCaptureJudderEvent(int64_t actualRenderTimeUs, int64_t lastJudderEndTimeUs,
+                                        JudderEvent &e, const VideoRenderQualityMetrics & m,
+                                        const Configuration &c, JudderEvent *judderEventOut);
+
+    // Check to see if a discontinuity has occurred by examining the content time and the
+    // app-desired render time. If so, reset some internal state.
+    bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+    // Update the metrics because a skipped frame was detected.
+    void processMetricsForSkippedFrame(int64_t contentTimeUs);
+
+    // Update the metrics because a dropped frame was detected.
+    void processMetricsForDroppedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs);
+
+    // Update the metrics because a rendered frame was detected.
+    void processMetricsForRenderedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs,
+                                        int64_t actualRenderTimeUs,
+                                        FreezeEvent *freezeEventOut, JudderEvent *judderEventOut);
+
+    // Configurable elements of the metrics algorithms.
+    const Configuration mConfiguration;
+
+    // Metrics are updated every time a frame event occurs - skipped, dropped, rendered.
+    VideoRenderQualityMetrics mMetrics;
+
+    // The most recently processed timestamp referring to the position in the content stream.
+    int64_t mLastContentTimeUs;
+
+    // The most recently processed timestamp referring to the wall clock time a frame was rendered.
+    int64_t mLastRenderTimeUs;
+
+    // The most recent timestamp of the first frame rendered after the freeze.
+    int64_t mLastFreezeEndTimeUs;
+
+    // The most recent timestamp of frame judder.
+    int64_t mLastJudderEndTimeUs;
+
+    // The render duration of the playback.
+    int64_t mRenderDurationMs;
+
+    // True if the previous frame was dropped.
+    bool mWasPreviousFrameDropped;
+
+    // The freeze event that's currently being tracked.
+    FreezeEvent mFreezeEvent;
+
+    // The judder event that's currently being tracked.
+    JudderEvent mJudderEvent;
+
+    // Frames skipped at the end of playback shouldn't really be considered skipped, therefore keep
+    // a list of the frames, and process them as skipped frames the next time a frame is rendered.
+    std::list<int64_t> mPendingSkippedFrameContentTimeUsList;
+
+    // Since the system only signals when a frame is rendered, dropped frames are detected by
+    // checking to see if the next expected frame is rendered. If not, it is considered dropped.
+    std::queue<FrameInfo> mNextExpectedRenderedFrameQueue;
+
+    // When B-frames are present in the stream, a P-frame will be queued before the B-frame even
+    // though it is rendered after. Therefore, the P-frame is held here and not inserted into
+    // mNextExpectedRenderedFrameQueue until it should be inserted to maintain render order.
+    int64_t mTunnelFrameQueuedContentTimeUs;
+
+    // Frame durations derived from timestamps encoded into the content stream. These are the
+    // durations that each frame is supposed to be rendered for.
+    FrameDurationUs mContentFrameDurationUs;
+
+    // Frame durations derived from timestamps passed in by the app, indicating the wall clock time
+    // at which the app would like to have the frame rendered.
+    FrameDurationUs mDesiredFrameDurationUs;
+
+    // Frame durations derived from timestamps captured by the display subsystem, indicating the
+    // wall clock atime at which the frame is actually rendered.
+    FrameDurationUs mActualFrameDurationUs;
+};
+
+}  // namespace android
+
+#endif  // VIDEO_RENDER_QUALITY_TRACKER_H_
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index f7bf3ba..f4ccaba 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -143,7 +143,7 @@
 
     // use consumer usage bits queried from encoder, but always add
     // HW_VIDEO_ENCODER for backward compatibility.
-    uint32_t  consumerUsage;
+    uint64_t  consumerUsage;
     void *_params = &consumerUsage;
     uint8_t *params = static_cast<uint8_t*>(_params);
     fnStatus = UNKNOWN_ERROR;
@@ -155,15 +155,32 @@
                         outParams.data() + outParams.size(),
                         params);
             });
+
+    // try 64 bit consumer usage first
     auto transStatus = omxNode->getParameter(
-            static_cast<uint32_t>(OMX_IndexParamConsumerUsageBits),
+            static_cast<uint32_t>(OMX_IndexParamConsumerUsageBits64),
             inHidlBytes(&consumerUsage, sizeof(consumerUsage)),
             _hidl_cb);
     if (!transStatus.isOk()) {
         return toStatus(FAILED_TRANSACTION);
     }
     if (fnStatus != OK) {
-        consumerUsage = 0;
+        // try 32 bit consumer usage upon failure
+        uint32_t usage;
+        _params = &usage;
+        params = static_cast<uint8_t*>(_params);
+        transStatus = omxNode->getParameter(
+                static_cast<uint32_t>(OMX_IndexParamConsumerUsageBits),
+                inHidlBytes(&usage, sizeof(usage)),
+                _hidl_cb);
+        if (!transStatus.isOk()) {
+            return toStatus(FAILED_TRANSACTION);
+        }
+        if (fnStatus != OK) {
+            consumerUsage = 0;
+        } else {
+            consumerUsage = usage;
+        }
     }
 
     OMX_PARAM_PORTDEFINITIONTYPE def;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index bebd516..c82a303 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -316,7 +316,7 @@
     // that a new message is available on the queue. Otherwise, the message stays on the queue, but
     // the listener is not notified of it. It will process this message when a subsequent message
     // is posted with |realTime| set to true.
-    void post(const omx_message &msg, bool realTime = true);
+    void post(const omx_message &msg, bool realTime);
 
     bool loop();
 
@@ -325,18 +325,15 @@
 
 private:
     enum {
-        // This is used for frame_rendered message batching, which will eventually end up in a
-        // single AMessage in MediaCodec when it is signaled to the app. AMessage can contain
-        // up-to 64 key-value pairs, and each frame_rendered message uses 2 keys, so the max
-        // value for this would be 32. Nonetheless, limit this to 12 to which gives at least 10
-        // mseconds of batching at 120Hz.
-        kMaxQueueSize = 12,
+        // Don't delay non-realtime messages longer than 200ms
+        kMaxBatchedDelayNs = 200 * 1000 * 1000,
     };
 
     Mutex mLock;
 
     sp<OMXNodeInstance> const mOwner;
     bool mDone;
+    bool mHasBatchedMessages;
     Condition mQueueChanged;
     std::list<omx_message> mQueue;
 
@@ -350,7 +347,8 @@
 
 OMXNodeInstance::CallbackDispatcher::CallbackDispatcher(const sp<OMXNodeInstance> &owner)
     : mOwner(owner),
-      mDone(false) {
+      mDone(false),
+      mHasBatchedMessages(false) {
     mThread = new CallbackDispatcherThread(this);
     mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
 }
@@ -358,7 +356,6 @@
 OMXNodeInstance::CallbackDispatcher::~CallbackDispatcher() {
     {
         Mutex::Autolock autoLock(mLock);
-
         mDone = true;
         mQueueChanged.signal();
     }
@@ -377,8 +374,11 @@
     Mutex::Autolock autoLock(mLock);
 
     mQueue.push_back(msg);
-    if (realTime || mQueue.size() >= kMaxQueueSize) {
+    if (realTime) {
         mQueueChanged.signal();
+    } else if (!mHasBatchedMessages) {
+        mHasBatchedMessages = true;
+        mQueueChanged.signal(); // The first non-realtime message is not batched.
     }
 }
 
@@ -393,11 +393,16 @@
 bool OMXNodeInstance::CallbackDispatcher::loop() {
     for (;;) {
         std::list<omx_message> messages;
+        std::list<long long> messageTimestamps;
 
         {
             Mutex::Autolock autoLock(mLock);
             while (!mDone && mQueue.empty()) {
-                mQueueChanged.wait(mLock);
+                if (mHasBatchedMessages) {
+                    mQueueChanged.waitRelative(mLock, kMaxBatchedDelayNs);
+                } else {
+                    mQueueChanged.wait(mLock);
+                }
             }
 
             if (mDone) {
@@ -2447,7 +2452,7 @@
     msg.type = omx_message::EMPTY_BUFFER_DONE;
     msg.fenceFd = fenceFd;
     msg.u.buffer_data.buffer = instance->findBufferID(pBuffer);
-    instance->mDispatcher->post(msg);
+    instance->mDispatcher->post(msg, true /* realTime */);
 
     return OMX_ErrorNone;
 }
@@ -2475,7 +2480,7 @@
     msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
     msg.u.extended_buffer_data.flags = pBuffer->nFlags;
     msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
-    instance->mDispatcher->post(msg);
+    instance->mDispatcher->post(msg, true /* realTime */);
 
     return OMX_ErrorNone;
 }
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
index 4827d9e..0906433 100644
--- a/media/libstagefright/omx/OMXStore.cpp
+++ b/media/libstagefright/omx/OMXStore.cpp
@@ -140,7 +140,8 @@
 
         Vector<String8> roles;
         OMX_ERRORTYPE err = plugin->getRolesOfComponent(name, &roles);
-        if (err == OMX_ErrorNone) {
+        static_assert(std::string_view("OMX.google.").size() == 11);
+        if (err == OMX_ErrorNone && strncmp(name, "OMX.google.", 11) == 0) {
             bool skip = false;
             for (String8 role : roles) {
                 if (role.find("video_decoder") != -1 || role.find("video_encoder") != -1) {
diff --git a/media/libstagefright/omx/OmxGraphicBufferSource.cpp b/media/libstagefright/omx/OmxGraphicBufferSource.cpp
index 9484046..33481e3 100644
--- a/media/libstagefright/omx/OmxGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/OmxGraphicBufferSource.cpp
@@ -85,7 +85,7 @@
         int32_t bufferCount,
         uint32_t frameWidth,
         uint32_t frameHeight,
-        uint32_t consumerUsage) {
+        uint64_t consumerUsage) {
     if (omxNode == NULL) {
         return BAD_VALUE;
     }
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
index 264c01d..1c3cb4e 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
@@ -425,8 +425,16 @@
     t->attr.anwBuffer.stride = graphicBuffer->getStride();
     t->attr.anwBuffer.format = static_cast<PixelFormat>(
             graphicBuffer->getPixelFormat());
-    t->attr.anwBuffer.layerCount = graphicBuffer->getLayerCount();
-    t->attr.anwBuffer.usage = graphicBuffer->getUsage();
+    // HACK
+    // anwBuffer.layerCount 8 bytes : GraphicBuffer::layerCount 4 bytes
+    // anwBuffer.usage      4 bytes : GraphicBuffer::usage      8 bytes
+    // We would like to retain high part of usage with high part of layerCount
+    uint64_t usage = graphicBuffer->getUsage();
+    uint32_t usageHigh = (usage >> 32);
+    uint32_t usageLow = (0xFFFFFFFF & usage);
+    uint32_t layerLow = graphicBuffer->getLayerCount();
+    t->attr.anwBuffer.layerCount = ((uint64_t(usageHigh) << 32) | layerLow);
+    t->attr.anwBuffer.usage = usageLow;
     t->nativeHandle = graphicBuffer->handle;
     return t;
 }
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h
index e576d75..a23efac 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h
@@ -70,7 +70,7 @@
         int32_t bufferCount,
         uint32_t frameWidth,
         uint32_t frameHeight,
-        uint32_t consumerUsage);
+        uint64_t consumerUsage);
 
     // Rest of the interface in GraphicBufferSource.
 
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 3c00a1c..3598e8d 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -32,6 +32,7 @@
         "libEGL",
         "libGLESv1_CM",
         "libGLESv2",
+        "libvulkan",
         "liblog",
         "libnativewindow",
         "libprocessgroup",
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
index 3ae35ec..af55172 100644
--- a/media/libstagefright/renderfright/gl/ProgramCache.cpp
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -299,8 +299,8 @@
                 highp vec3 ScaleLuminance(highp vec3 color) {
                     // The formula is:
                     // alpha * pow(Y, gamma - 1.0) * color + beta;
-                    // where alpha is 1000.0, gamma is 1.2, beta is 0.0.
-                    return color * 1000.0 * pow(color.y, 0.2);
+                    // where alpha is displayMaxLuminance, gamma is 1.2, beta is 0.0.
+                    return color * displayMaxLuminance * pow(color.y, 0.2);
                 }
             )__SHADER__";
             break;
@@ -316,7 +316,6 @@
     // Tone map absolute light to display luminance range.
     switch (needs.getInputTF()) {
         case Key::INPUT_TF_ST2084:
-        case Key::INPUT_TF_HLG:
             switch (needs.getOutputTF()) {
                 case Key::OUTPUT_TF_HLG:
                     // Right now when mixed PQ and HLG contents are presented,
@@ -374,7 +373,11 @@
                                     return color * slope;
                                 } else if (nits < x1) {
                                     // scale [x0, x1] to [y0, y1] linearly
-                                    float slope = (y1 - y0) / (x1 - x0);
+                                    // Use highp since some compilers may do this
+                                    // operation as reciprocal multiplication with
+                                    // re-association that could exceed the range
+                                    // of mediump float.
+                                    highp float slope = (y1 - y0) / (x1 - x0);
                                     nits = y0 + (nits - x0) * slope;
                                 } else if (nits < x2) {
                                     // scale [x1, x2] to [y1, y2] using Hermite interp
@@ -396,6 +399,14 @@
                     break;
             }
             break;
+        case Key::INPUT_TF_HLG:
+            // HLG OOTF is already applied as part of ScaleLuminance
+            fs << R"__SHADER__(
+                 highp vec3 ToneMap(highp vec3 color) {
+                     return color;
+                 }
+             )__SHADER__";
+            break;
         default:
             // inverse tone map; the output luminance can be up to maxOutLumi.
             fs << R"__SHADER__(
diff --git a/media/libstagefright/renderfright/tests/RenderEngineTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
index 2697ff4..3a67cc2 100644
--- a/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
+++ b/media/libstagefright/renderfright/tests/RenderEngineTest.cpp
@@ -60,7 +60,7 @@
     }
 
     static sp<GraphicBuffer> allocateDefaultBuffer() {
-        return new GraphicBuffer(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT,
+        return sp<GraphicBuffer>::make(DEFAULT_DISPLAY_WIDTH, DEFAULT_DISPLAY_HEIGHT,
                                  HAL_PIXEL_FORMAT_RGBA_8888, 1,
                                  GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
                                          GRALLOC_USAGE_HW_RENDER,
@@ -69,7 +69,7 @@
 
     // Allocates a 1x1 buffer to fill with a solid color
     static sp<GraphicBuffer> allocateSourceBuffer(uint32_t width, uint32_t height) {
-        return new GraphicBuffer(width, height, HAL_PIXEL_FORMAT_RGBA_8888, 1,
+        return sp<GraphicBuffer>::make(width, height, HAL_PIXEL_FORMAT_RGBA_8888, 1,
                                  GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
                                          GRALLOC_USAGE_HW_TEXTURE,
                                  "input");
@@ -275,7 +275,7 @@
         renderengine::DisplaySettings settings;
         std::vector<const renderengine::LayerSettings*> layers;
         // Meaningless buffer since we don't do any drawing
-        sp<GraphicBuffer> buffer = new GraphicBuffer();
+        sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
         invokeDraw(settings, layers, buffer);
     }
 
diff --git a/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
index 97c7442..b82586b 100644
--- a/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
+++ b/media/libstagefright/renderfright/tests/RenderEngineThreadedTest.cpp
@@ -70,7 +70,7 @@
 }
 
 TEST_F(RenderEngineThreadedTest, bindExternalBuffer_withBuffer) {
-    sp<GraphicBuffer> buf = new GraphicBuffer();
+    sp<GraphicBuffer> buf = sp<GraphicBuffer>::make();
     EXPECT_CALL(*mRenderEngine, bindExternalTextureBuffer(0, buf, Eq(nullptr)))
             .WillOnce(Return(NO_ERROR));
     status_t result = mThreadedRE->bindExternalTextureBuffer(0, buf, nullptr);
@@ -83,7 +83,7 @@
 }
 
 TEST_F(RenderEngineThreadedTest, cacheExternalTextureBuffer_withBuffer) {
-    sp<GraphicBuffer> buf = new GraphicBuffer();
+    sp<GraphicBuffer> buf = sp<GraphicBuffer>::make();
     EXPECT_CALL(*mRenderEngine, cacheExternalTextureBuffer(buf));
     mThreadedRE->cacheExternalTextureBuffer(buf);
 }
@@ -198,7 +198,7 @@
 TEST_F(RenderEngineThreadedTest, drawLayers) {
     renderengine::DisplaySettings settings;
     std::vector<const renderengine::LayerSettings*> layers;
-    sp<GraphicBuffer> buffer = new GraphicBuffer();
+    sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
     base::unique_fd bufferFence;
     base::unique_fd drawFence;
 
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 2f516d5..100c0cd 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -46,7 +46,6 @@
       mFirstIFrameProvided(false),
       mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
-      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 }
@@ -123,20 +122,11 @@
     }
 
     sp<ABuffer> buffer = *queue->begin();
+    uint32_t seqNum = (uint32_t)buffer->int32Data();
     buffer->meta()->setObject("source", source);
 
-    /**
-     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
-     * But that is not useful as an ingredient of buffering time.
-     * Instead, we calculates the time only for all 'NAL units'.
-     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
     int64_t nowTimeUs = ALooper::GetNowUs();
-    if (rtpTime != mLastRtpTimeJitterDataUs) {
-        source->putBaseJitterData(rtpTime, nowTimeUs);
-        mLastRtpTimeJitterDataUs = rtpTime;
-    }
-    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -168,7 +158,7 @@
     const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
     const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
     /* Fundamental jitter time */
-    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
     const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
     // Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -177,7 +167,7 @@
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
 
     // From (T), this assembler tries to complete the NAL till (T + try)
-    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int32_t tryJbTimeMs = dynamicJbTimeMs;
     int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
     bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
 
@@ -208,10 +198,10 @@
         String8 info;
         info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %d + (%d + %d * %.3f)",
+                    "JitterMs [%d + (~%d~)] + %d * %.3f",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
-                    buffer->int32Data(), mNextExpectedSeqNo,
-                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+                    seqNum, mNextExpectedSeqNo,
+                    baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
         if (isSecondLineBroken) {
             ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -223,6 +213,9 @@
     }
 
     if (mNextExpectedSeqNoValid) {
+        if (mNextExpectedSeqNo > seqNum) {
+            ALOGE("Reversed exp seq# %d \t current head %d", mNextExpectedSeqNo, seqNum);
+        }
         mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
@@ -241,10 +234,10 @@
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
-        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
-    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
-        ALOGV("Not the sequence number I expected");
-
+        mNextExpectedSeqNo = seqNum;
+    } else if (seqNum != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+                mNextExpectedSeqNo, seqNum);
         return WRONG_SEQUENCE_NUMBER;
     }
 
@@ -332,6 +325,11 @@
 }
 
 bool AAVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        ALOGE("b/230630526 buffer->size() == 0");
+        android_errorWriteLog(0x534e4554, "230630526");
+        return false;
+    }
     const uint8_t *data = buffer->data();
     unsigned nalType = data[0] & 0x1f;
     if (!mFirstIFrameProvided && nalType < 0x5) {
@@ -618,13 +616,13 @@
 
 int32_t AAVCAssembler::pickStartSeq(const Queue *queue,
         uint32_t first, int64_t play, int64_t jit) {
+    CHECK(!queue->empty());
     // pick the first sequence number has the start bit.
     sp<ABuffer> buffer = *(queue->begin());
     int32_t firstSeqNo = buffer->int32Data();
 
     // This only works for FU-A type & non-start sequence
-    unsigned nalType = buffer->data()[0] & 0x1f;
-    if (nalType != 28 || buffer->data()[1] & 0x80) {
+    if (buffer->size() < 2 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[1] & 0x80) {
         return firstSeqNo;
     }
 
@@ -634,7 +632,7 @@
         if (rtpTime + jit >= play) {
             break;
         }
-        if ((data[1] & 0x80)) {
+        if (it->size() >= 2 && (data[1] & 0x80)) {
             const int32_t seqNo = it->int32Data();
             ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
             firstSeqNo = seqNo;
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index bb42d1f..7b5c24a 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -53,7 +53,6 @@
       mFirstIFrameProvided(false),
       mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
-      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 
@@ -133,20 +132,11 @@
     }
 
     sp<ABuffer> buffer = *queue->begin();
+    uint32_t seqNum = (uint32_t)buffer->int32Data();
     buffer->meta()->setObject("source", source);
 
-    /**
-     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
-     * But that is not useful as an ingredient of buffering time.
-     * Instead, we calculates the time only for all 'NAL units'.
-     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
     int64_t nowTimeUs = ALooper::GetNowUs();
-    if (rtpTime != mLastRtpTimeJitterDataUs) {
-        source->putBaseJitterData(rtpTime, nowTimeUs);
-        mLastRtpTimeJitterDataUs = rtpTime;
-    }
-    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -178,7 +168,7 @@
     const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
     const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
     /* Fundamental jitter time */
-    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
     const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
     // Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -187,7 +177,7 @@
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
 
     // From (T), this assembler tries to complete the NAL till (T + try)
-    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int32_t tryJbTimeMs = dynamicJbTimeMs;
     int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
     bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
 
@@ -218,10 +208,10 @@
         String8 info;
         info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %d + (%d + %d * %.3f)",
+                    "JitterMs [%d + (~%d~)] + %d * %.3f",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
-                    buffer->int32Data(), mNextExpectedSeqNo,
-                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+                    seqNum, mNextExpectedSeqNo,
+                    baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
         if (isSecondLineBroken) {
             ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -251,10 +241,10 @@
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
-        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
-    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
-        ALOGV("Not the sequence number I expected");
-
+        mNextExpectedSeqNo = seqNum;
+    } else if (seqNum != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+                mNextExpectedSeqNo, seqNum);
         return WRONG_SEQUENCE_NUMBER;
     }
 
@@ -629,13 +619,13 @@
 
 int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
         uint32_t first, int64_t play, int64_t jit) {
+    CHECK(!queue->empty());
     // pick the first sequence number has the start bit.
     sp<ABuffer> buffer = *(queue->begin());
     int32_t firstSeqNo = buffer->int32Data();
 
     // This only works for FU-A type & non-start sequence
-    unsigned nalType = buffer->data()[0] & 0x1f;
-    if (nalType != 28 || buffer->data()[2] & 0x80) {
+    if (buffer->size() < 3 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[2] & 0x80) {
         return firstSeqNo;
     }
 
@@ -645,7 +635,7 @@
         if (rtpTime + jit >= play) {
             break;
         }
-        if ((data[2] & 0x80)) {
+        if (it->size() >= 3 && (data[2] & 0x80)) {
             const int32_t seqNo = it->int32Data();
             ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
             firstSeqNo = seqNo;
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index a61f48f..165c336 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -16,6 +16,12 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ARTPConnection"
+#define INET_ECN_NOT_ECT    0x00    /* ECN was not enabled */
+#define INET_ECN_ECT_1      0x01    /* ECN capable packet */
+#define INET_ECN_ECT_0      0x02    /* ECN capable packet */
+#define INET_ECN_CE         0x03    /* ECN congestion */
+#define INET_ECN_MASK       0x03    /* Mask of ECN bits */
+
 #include <utils/Log.h>
 
 #include <media/stagefright/rtsp/ARTPAssembler.h>
@@ -56,6 +62,7 @@
 
 // static
 const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
+const int64_t ARTPConnection::kMinOneSecondNotifyDelayUs = 100000ll;
 
 struct ARTPConnection::StreamInfo {
     bool isIPv6;
@@ -84,7 +91,10 @@
       mPollEventPending(false),
       mLastReceiverReportTimeUs(-1),
       mLastBitrateReportTimeUs(-1),
+      mLastCongestionNotifyTimeUs(-1),
       mTargetBitrate(-1),
+      mRtpSockOptEcn(0),
+      mIsIPv6(false),
       mStaticJitterTimeMs(kStaticJitterTimeMs) {
 }
 
@@ -175,7 +185,7 @@
 // static
 void ARTPConnection::MakeRTPSocketPair(
         int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
-        unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+        unsigned localPort, unsigned remotePort, int64_t socketNetwork, int32_t sockOptEcn) {
     bool isIPv6 = false;
     if (strchr(localIp, ':') != NULL)
         isIPv6 = true;
@@ -204,6 +214,24 @@
         }
     }
 
+    if (sockOptEcn != 0) {
+        int sockOptForTOS = 1;
+        if (setsockopt(*rtpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+               isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+               (int *)&sockOptForTOS, sizeof(sockOptForTOS)) < 0) {
+            ALOGE("failed to set recv sockopt TOS on rtpsock(%d). err=%s", *rtpSocket,
+                strerror(errno));
+        } else {
+            ALOGD("successfully set recv sockopt TOS on rtpsock(%d)", *rtpSocket);
+            int result = setsockopt(*rtcpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+                isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+                (int *)&sockOptForTOS, sizeof(sockOptForTOS));
+            if (result >= 0) {
+                ALOGD("successfully set recv sockopt TOS on rtcpsock(%d).", *rtcpSocket);
+            }
+        }
+    }
+
     bumpSocketBufferSize(*rtcpSocket);
 
     struct sockaddr *addr;
@@ -593,32 +621,25 @@
 
     sp<ABuffer> buffer = new ABuffer(65536);
 
-    struct sockaddr *pRemoteRTCPAddr;
-    int sizeSockSt;
-    if (s->isIPv6) {
-        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
-        sizeSockSt = sizeof(struct sockaddr_in6);
-    } else {
-        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
-        sizeSockSt = sizeof(struct sockaddr_in);
-    }
-    socklen_t remoteAddrLen =
-        (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
-            ? sizeSockSt : 0;
+    struct msghdr sMsg = {};
+    struct iovec sIov[1] = {};
 
-    if (mFlags & kViLTEConnection) {
-        remoteAddrLen = 0;
-    }
+    sIov[0].iov_base = (char *) buffer->data();
+    sIov[0].iov_len = buffer->capacity();
+
+    sMsg.msg_iov = sIov;
+    sMsg.msg_iovlen = 1;
+
+    int cMsgSize = sizeof(struct cmsghdr) + sizeof(uint8_t);
+    char buf[CMSG_SPACE(cMsgSize)];
+    sMsg.msg_control = buf;
+    sMsg.msg_controllen = sizeof(buf);
+    sMsg.msg_flags = 0;
 
     ssize_t nbytes;
     do {
-        nbytes = recvfrom(
-            receiveRTP ? s->mRTPSocket : s->mRTCPSocket,
-            buffer->data(),
-            buffer->capacity(),
-            0,
-            remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
-            remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+        // Used recvmsg to get the TOS header of incoming packet
+        nbytes = recvmsg(receiveRTP ? s->mRTPSocket : s->mRTCPSocket, &sMsg, 0);
         mCumulativeBytes += nbytes;
     } while (nbytes < 0 && errno == EINTR);
 
@@ -633,6 +654,10 @@
         }
     }
 
+    if (nbytes > 0) {
+        handleIpHeadersIfReceived(s, sMsg);
+    }
+
     buffer->setRange(0, nbytes);
 
     // ALOGI("received %d bytes.", buffer->size());
@@ -647,13 +672,68 @@
     return err;
 }
 
+/* This function will check if TOS is present or not in received IP packet.
+ * After that if it is present then it will notify about congestion to upper
+ * layer if CE bit is set in TOS header.
+ **/
+void ARTPConnection::handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg) {
+    struct cmsghdr *cMsg;
+    cMsg = CMSG_FIRSTHDR(&sMsg);
+
+    if (cMsg == NULL) {
+        ALOGV("cmsg is null");
+    }
+
+    for (; cMsg != NULL; cMsg = CMSG_NXTHDR(&sMsg, cMsg)) {
+        bool isTOSHeader = ((cMsg->cmsg_level == (mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP))
+                              && (cMsg->cmsg_type == (mIsIPv6 ? IPV6_TCLASS : IP_TOS))
+                              && (cMsg->cmsg_len));
+        if (isTOSHeader) {
+            uint8_t receivedTOS;
+            receivedTOS = *((uint8_t *) CMSG_DATA(cMsg));
+            // checking CE bit is set
+            bool isCEBitMarked = ((receivedTOS & INET_ECN_MASK) == INET_ECN_CE);
+
+            ALOGV("receivedTos(value -> %d)", receivedTOS);
+
+            if (isCEBitMarked) {
+                ALOGD("receivedTos(value -> %d), is ECN CE marked = %d",
+                    receivedTOS, isCEBitMarked);
+                notifyCongestionToUpperLayerIfNeeded(s);
+            }
+            break;
+        }
+    }
+}
+
+/* this function will be use to notify congestion in video call to upper layer */
+void ARTPConnection::notifyCongestionToUpperLayerIfNeeded(StreamInfo *s) {
+    int64_t nowUs = ALooper::GetNowUs();
+
+    if (mLastCongestionNotifyTimeUs <= 0) {
+        mLastCongestionNotifyTimeUs = nowUs;
+    }
+
+    bool isNeedToUpdate = (mLastCongestionNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs);
+    ALOGD("ECN info set by upper layer=%d, isNeedToUpdate=%d", mRtpSockOptEcn, isNeedToUpdate);
+
+    if ((mRtpSockOptEcn != 0) && (isNeedToUpdate)) {
+        sp<AMessage> notify = s->mNotifyMsg->dup();
+        notify->setInt32("rtcp-event", 1);
+        notify->setInt32("payload-type", ARTPSource::RTP_QUALITY_CD);
+        notify->post();
+        mLastCongestionNotifyTimeUs = nowUs;
+        ALOGD("Congestion detected in n/w, Notify upper layer");
+    }
+}
+
 ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
         struct sockaddr* pRemoteRTCPAddr;
         int sizeSockSt;
 
         /* It seems this isIPv6 variable is useless.
          * We should remove it to prevent confusion */
-        if (info->isIPv6) {
+        if (mIsIPv6) {
             pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
             sizeSockSt = sizeof(struct sockaddr_in6);
         } else {
@@ -1215,12 +1295,20 @@
     mTargetBitrate = targetBitrate;
 }
 
+void ARTPConnection::setRtpSockOptEcn(int32_t sockOptEcn) {
+    mRtpSockOptEcn = sockOptEcn;
+}
+
+void ARTPConnection::setIsIPv6(const char *localIp) {
+    mIsIPv6 = (strchr(localIp, ':') != nullptr);
+}
+
 void ARTPConnection::checkRxBitrate(int64_t nowUs) {
     if (mLastBitrateReportTimeUs <= 0) {
         mCumulativeBytes = 0;
         mLastBitrateReportTimeUs = nowUs;
     }
-    else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+    else if (mLastEarlyNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs) {
         int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
         int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
         mLastEarlyNotifyTimeUs = nowUs;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 717d8af..c5b0a1e 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -264,12 +264,12 @@
 
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     int64_t nowUs = ALooper::GetNowUs();
+    int64_t rtpTime = 0;
     uint32_t seqNum = (uint32_t)buffer->int32Data();
-    int32_t ssrc = 0, rtpTime = 0;
+    int32_t ssrc = 0;
 
     buffer->meta()->findInt32("ssrc", &ssrc);
     CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-    mLatestRtpTime = rtpTime;
 
     if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
         mFirstSysTime = nowUs;
@@ -277,7 +277,7 @@
         mLastSysAnchorTimeUpdatedUs = nowUs;
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
-        mFirstRtpTime = rtpTime;
+        mFirstRtpTime = (uint32_t)rtpTime;
         mFirstSsrc = ssrc;
         ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
                 mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
@@ -352,6 +352,18 @@
 
     mQueue.insert(it, buffer);
 
+    /**
+     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+     * We calculate anothor jitter only for all 'Head NAL units'
+     */
+    ALOGV("<======== Insert %d", seqNum);
+    rtpTime = mAssembler->findRTPTime(mFirstRtpTime, buffer);
+    if (rtpTime != mLatestRtpTime) {
+        mJitterCalc->putBaseData(rtpTime, nowUs);
+    }
+    mJitterCalc->putInterArrivalData(rtpTime, nowUs);
+    mLatestRtpTime = rtpTime;
+
     return true;
 }
 
@@ -680,14 +692,6 @@
     mStaticJbTimeMs = jbTimeMs;
 }
 
-void ARTPSource::putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime) {
-    mJitterCalc->putBaseData(timeStamp, arrivalTime);
-}
-
-void ARTPSource::putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime) {
-    mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
-}
-
 void ARTPSource::setJbTimer(const sp<AMessage> timer) {
     mJbTimer = timer;
 }
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 8990f0c..41f2d67 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -255,9 +255,34 @@
     if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
         mRTPCVODegrees = rtpCVODegrees;
 
+    bool needToSetSockOpt = false;
     int32_t dscp = 0;
-    if (params->findInt32(kKeyRtpDscp, &dscp))
-        updateSocketDscp(dscp);
+    if (params->findInt32(kKeyRtpDscp, &dscp)) {
+        mRtpLayer3Dscp = dscp << 2;
+        needToSetSockOpt = true;
+    }
+
+    int32_t ecn = 0;
+    if (params->findInt32(kKeyRtpEcn, &ecn)) {
+        /*
+         * @ecn, possible value for ECN.
+         *  +-----+-----+
+         *  | ECN FIELD |
+         *  +-----+-----+
+         *    ECT   CE         [Obsolete] RFC 2481 names for the ECN bits.
+         *     0     0         Not-ECT
+         *     0     1         ECT (ECN-Capable Transport) (1)
+         *     1     0         ECT (ECN-Capable Transport) (0)
+         *     1     1         CE (Congestion Experienced)
+         *
+         */
+        mRtpSockOptEcn = ecn;
+        needToSetSockOpt = true;
+    }
+
+    if (needToSetSockOpt) {
+        updateSocketOpt();
+    }
 
     int64_t sockNetwork = 0;
     if (params->findInt64(kKeySocketNetwork, &sockNetwork))
@@ -1438,18 +1463,29 @@
     mPayloadType = payloadType;
 }
 
-void ARTPWriter::updateSocketDscp(int32_t dscp) {
-    mRtpLayer3Dscp = dscp << 2;
+/*
+ * This function will set socket option in IP header
+ */
+void ARTPWriter::updateSocketOpt() {
+    /*
+     * 0     1     2     3     4     5     6     7
+     * +-----+-----+-----+-----+-----+-----+-----+-----+
+     * |          DS FIELD, DSCP           | ECN FIELD |
+     * +-----+-----+-----+-----+-----+-----+-----+-----+
+     */
+    int sockOpt = mRtpLayer3Dscp ^ mRtpSockOptEcn;
+    ALOGD("Update socket opt with sockopt=%d, mRtpLayer3Dscp=%d, mRtpSockOptEcn=%d",
+                sockOpt, mRtpLayer3Dscp, mRtpSockOptEcn);
 
-    /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
-    if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
-                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
-        ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+    /* sockOpt will be used to set socket option in IP header */
+    if (setsockopt(mRTPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+                (int *)&sockOpt, sizeof(sockOpt)) < 0) {
+        ALOGE("failed to set sockopt on rtpsock. err=%s", strerror(errno));
     } else {
-        ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
-        setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
-                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
-        ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+        ALOGD("successfully set sockopt. opt=%d", sockOpt);
+        setsockopt(mRTCPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+                (int *)&sockOpt, sizeof(sockOpt));
+        ALOGD("successfully set sockopt rtcpsock. opt=%d", sockOpt);
     }
 }
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
index 2f8b8ba..70ce388 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
@@ -50,7 +50,6 @@
     bool mFirstIFrameProvided;
     int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
-    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
index 9575d8c..ed3f1ae 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
@@ -51,7 +51,6 @@
     bool mFirstIFrameProvided;
     int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
-    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
index 39161b6..8f87642 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
@@ -44,6 +44,13 @@
     virtual void onByeReceived() = 0;
     virtual bool initCheck() { return true; }
 
+    // Utility functions
+    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+    inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
+    inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
+    inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
+    inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
+
 protected:
     virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source) = 0;
     virtual void packetLost() = 0;
@@ -64,13 +71,6 @@
     bool mShowQueue;
     int32_t mShowQueueCnt;
 
-    // Utility functions
-    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
-    inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
-    inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
-    inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
-    inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
-
 private:
     int64_t mFirstFailureTimeUs;
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 73d2866..250de71 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -20,6 +20,7 @@
 
 #include <media/stagefright/foundation/AHandler.h>
 #include <utils/List.h>
+#include <sys/socket.h>
 
 namespace android {
 
@@ -48,6 +49,8 @@
     void setSelfID(const uint32_t selfID);
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
     void setTargetBitrate(int32_t targetBitrate);
+    void setRtpSockOptEcn(int32_t sockOptEcn);
+    void setIsIPv6(const char *localIp);
 
     // Creates a pair of UDP datagram sockets bound to adjacent ports
     // (the rtpSocket is bound to an even port, the rtcpSocket to the
@@ -60,7 +63,8 @@
     static void MakeRTPSocketPair(
             int *rtpSocket, int *rtcpSocket,
             const char *localIp, const char *remoteIp,
-            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
+            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0,
+            int32_t sockOptEcn = 0);
 
 protected:
     virtual ~ARTPConnection();
@@ -77,6 +81,7 @@
     };
 
     static const int64_t kSelectTimeoutUs;
+    static const int64_t kMinOneSecondNotifyDelayUs;
 
     uint32_t mFlags;
 
@@ -87,9 +92,12 @@
     int64_t mLastReceiverReportTimeUs;
     int64_t mLastBitrateReportTimeUs;
     int64_t mLastEarlyNotifyTimeUs;
+    int64_t mLastCongestionNotifyTimeUs;
 
     int32_t mSelfID;
     int32_t mTargetBitrate;
+    int32_t mRtpSockOptEcn;
+    bool mIsIPv6;
 
     uint32_t mStaticJitterTimeMs;
 
@@ -103,6 +111,8 @@
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
     void checkRxBitrate(int64_t nowUs);
+    void notifyCongestionToUpperLayerIfNeeded(StreamInfo *s);
+    void handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg);
 
     status_t receive(StreamInfo *info, bool receiveRTP);
     ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index e9b4942..7d1faf2 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -50,6 +50,7 @@
         RTCP_FIRST_PACKET = 101,
         RTP_QUALITY = 102,
         RTP_QUALITY_EMC = 103,
+        RTP_QUALITY_CD = 104,
         RTCP_SR = 200,
         RTCP_RR = 201,
         RTCP_TSFB = 205,
@@ -81,8 +82,6 @@
     int32_t getBaseJitterTimeMs();
     int32_t getInterArrivalJitterTimeMs();
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
-    void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
-    void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
     void setJbTimer(const sp<AMessage> timer);
     void setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs);
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
index 2982cf6..ecd29d0 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
@@ -50,7 +50,7 @@
     virtual status_t pause();
     void updateCVODegrees(int32_t cvoDegrees);
     void updatePayloadType(int32_t payloadType);
-    void updateSocketDscp(int32_t dscp);
+    void updateSocketOpt();
     void updateSocketNetwork(int64_t socketNetwork);
     uint32_t getSequenceNum();
     virtual uint64_t getAccumulativeBytes() override;
@@ -98,6 +98,7 @@
     struct sockaddr_in6 mRTPAddr6;
     struct sockaddr_in6 mRTCPAddr6;
     int32_t mRtpLayer3Dscp;
+    int32_t mRtpSockOptEcn;
     net_handle_t mRTPSockNetwork;
 
     AString mProfileLevel;
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index e6b67ce..581292e 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -55,3 +55,20 @@
         "-Wall",
     ],
 }
+
+cc_test {
+    name: "VideoRenderQualityTracker_test",
+    srcs: ["VideoRenderQualityTracker_test.cpp"],
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libstagefright",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+}
diff --git a/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
index c43e1f8..19c8577 100644
--- a/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
+++ b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
@@ -19,6 +19,7 @@
 #include <utils/Log.h>
 
 #include <fstream>
+#include <memory>
 
 #include <media/stagefright/foundation/ABitReader.h>
 #include <HevcUtils.h>
@@ -88,10 +89,10 @@
         stringLine >> type >> chunkLength;
         ASSERT_GT(chunkLength, 0) << "Length of data chunk must be greater than 0";
 
-        char *data = (char *)malloc(chunkLength);
+        std::unique_ptr<char[]> data(new char[chunkLength]);
         ASSERT_NE(data, nullptr) << "Failed to allocate data buffer of size: " << chunkLength;
 
-        mMediaFileStream.read(data, chunkLength);
+        mMediaFileStream.read(data.get(), chunkLength);
         ASSERT_EQ(mMediaFileStream.gcount(), chunkLength)
                 << "Failed to read complete file, bytes read: " << mMediaFileStream.gcount();
 
@@ -105,7 +106,7 @@
         offset += 3;
         ASSERT_LE(offset, chunkLength) << "NAL unit offset must not exceed the chunk length";
 
-        uint8_t *nalUnit = (uint8_t *)(data + offset);
+        uint8_t *nalUnit = (uint8_t *)(data.get() + offset);
         size_t nalUnitLength = chunkLength - offset;
 
         // Add NAL units only if they're of type: VPS/SPS/PPS/SEI
@@ -118,20 +119,18 @@
             size_t sizeNalUnit = hevcParams.getSize(index);
             ASSERT_EQ(sizeNalUnit, nalUnitLength) << "Invalid size returned for NAL: " << type;
 
-            uint8_t *destination = (uint8_t *)malloc(nalUnitLength);
+            std::unique_ptr<uint8_t[]> destination(new uint8_t[nalUnitLength]);
             ASSERT_NE(destination, nullptr)
                     << "Failed to allocate buffer of size: " << nalUnitLength;
 
-            bool status = hevcParams.write(index, destination, nalUnitLength);
+            bool status = hevcParams.write(index, destination.get(), nalUnitLength);
             ASSERT_TRUE(status) << "Unable to write NAL Unit data";
 
-            free(destination);
             index++;
         } else {
             err = hevcParams.addNalUnit(nalUnit, nalUnitLength);
             ASSERT_NE(err, (status_t)OK) << "Invalid NAL Unit added, type: " << type;
         }
-        free(data);
     }
 
     size_t numNalUnits = hevcParams.getNumNalUnitsOfType(kVPSCode);
@@ -166,10 +165,10 @@
             << "Expected NAL type: 34(PPS), found: " << typeNalUnit;
 
     size_t hvccBoxSize = kHvccBoxMaxSize;
-    uint8_t *hvcc = (uint8_t *)malloc(kHvccBoxMaxSize);
+    std::unique_ptr<uint8_t[]> hvcc(new uint8_t[kHvccBoxMaxSize]);
     ASSERT_NE(hvcc, nullptr) << "Failed to allocate a hvcc buffer of size: " << kHvccBoxMaxSize;
 
-    err = hevcParams.makeHvcc(hvcc, &hvccBoxSize, kNALSizeLength);
+    err = hevcParams.makeHvcc(hvcc.get(), &hvccBoxSize, kNALSizeLength);
     ASSERT_EQ(err, (status_t)OK) << "Unable to create hvcc box";
 
     ASSERT_GT(hvccBoxSize, kHvccBoxMinSize)
@@ -179,8 +178,6 @@
     if (frameRate != mFrameRate)
         cout << "[   WARN   ] Expected frame rate: " << mFrameRate << " Found: " << frameRate
              << endl;
-
-    free(hvcc);
 }
 
 // Info File contains the type and length for each chunk/frame
diff --git a/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
new file mode 100644
index 0000000..7823922
--- /dev/null
+++ b/media/libstagefright/tests/VideoRenderQualityTracker_test.cpp
@@ -0,0 +1,1027 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VideoRenderQualityTracker_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <media/stagefright/VideoRenderQualityTracker.h>
+
+namespace android {
+
+using Metrics = VideoRenderQualityMetrics;
+using Configuration = VideoRenderQualityTracker::Configuration;
+using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
+using JudderEvent = VideoRenderQualityTracker::JudderEvent;
+
+static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
+static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
+        VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
+
+class Helper {
+public:
+    Helper(double contentFrameDurationMs, const Configuration &configuration) :
+            mVideoRenderQualityTracker(configuration) {
+        mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+        mMediaTimeUs = 0;
+        mClockTimeNs = 0;
+    }
+
+    void changeContentFrameDuration(double contentFrameDurationMs) {
+        mContentFrameDurationUs = int64_t(contentFrameDurationMs * 1000);
+    }
+
+    template<typename T>
+    void render(std::initializer_list<T> renderDurationMsList) {
+        for (auto renderDurationMs : renderDurationMsList) {
+            mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+            mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+                                                       &mJudderEvent);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += int64_t(renderDurationMs * 1000 * 1000);
+        }
+    }
+
+    void render(int numFrames, float durationMs = -1) {
+        int64_t durationUs = durationMs < 0 ? mContentFrameDurationUs : durationMs * 1000;
+        for (int i = 0; i < numFrames; ++i) {
+            mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+            mVideoRenderQualityTracker.onFrameRendered(mMediaTimeUs, mClockTimeNs, &mFreezeEvent,
+                                                       &mJudderEvent);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += durationUs * 1000;
+        }
+    }
+
+    void skip(int numFrames) {
+        for (int i = 0; i < numFrames; ++i) {
+            mVideoRenderQualityTracker.onFrameSkipped(mMediaTimeUs);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += mContentFrameDurationUs * 1000;
+        }
+    }
+
+    void drop(int numFrames) {
+        for (int i = 0; i < numFrames; ++i) {
+            mVideoRenderQualityTracker.onFrameReleased(mMediaTimeUs);
+            mMediaTimeUs += mContentFrameDurationUs;
+            mClockTimeNs += mContentFrameDurationUs * 1000;
+        }
+    }
+
+    const Metrics & getMetrics() {
+        return mVideoRenderQualityTracker.getMetrics();
+    }
+
+    FreezeEvent getAndClearFreezeEvent() {
+        FreezeEvent e = std::move(mFreezeEvent);
+        mFreezeEvent.valid = false;
+        return e;
+    }
+
+    JudderEvent getAndClearJudderEvent() {
+        JudderEvent e = std::move(mJudderEvent);
+        mJudderEvent.valid = false;
+        return e;
+    }
+
+private:
+    VideoRenderQualityTracker mVideoRenderQualityTracker;
+    int64_t mContentFrameDurationUs;
+    int64_t mMediaTimeUs;
+    int64_t mClockTimeNs;
+    VideoRenderQualityTracker::FreezeEvent mFreezeEvent;
+    VideoRenderQualityTracker::JudderEvent mJudderEvent;
+};
+
+class VideoRenderQualityTrackerTest : public ::testing::Test {
+public:
+    VideoRenderQualityTrackerTest() {}
+};
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withDefaults) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn =
+        [](const std::string &, const std::string &, const std::string &defaultStr) -> std::string {
+            return defaultStr;
+        };
+
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withEmpty) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &, const std::string &) -> std::string {
+            return "";
+        }
+    };
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withInvalid) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &, const std::string &) -> std::string {
+            return "abc";
+        }
+    };
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withAlmostValid) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+            if (flag == "render_metrics_enabled") {
+                return "fals";
+            } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+                return "fals";
+            } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+                return "100a";
+            } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+                return "10b0";
+            } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+                return "c100";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+                return "1,5300,3b400,123";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+                return "2,5300*400,132";
+            } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+                return "3,12345678901234,5,7";
+            } else if (flag == "render_metrics_freeze_event_max") {
+                return "12345678901234";
+            } else if (flag == "render_metrics_freeze_event_details_max") {
+                return "12345.11321";
+            } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+                return "*!-";
+            } else if (flag == "render_metrics_judder_error_tolerance_us") {
+                return "10.5";
+            } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+                return "abc";
+            } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+                return "123,";
+            } else if (flag == "render_metrics_judder_event_max") {
+                return ",1234";
+            } else if (flag == "render_metrics_judder_event_details_max") {
+                return "10*10";
+            } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+                return "140-a";
+            }
+            return "";
+        }
+    };
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    Configuration d; // default configuration
+    EXPECT_EQ(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    EXPECT_EQ(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    EXPECT_EQ(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    EXPECT_EQ(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    EXPECT_EQ(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    EXPECT_EQ(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    EXPECT_EQ(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, getFromServerConfigurableFlags_withValid) {
+    Configuration::GetServerConfigurableFlagFn getServerConfigurableFlagFn{
+        [](const std::string &, const std::string &flag, const std::string &) -> std::string {
+            if (flag == "render_metrics_enabled") {
+                return "false";
+            } else if (flag == "render_metrics_are_skipped_frames_dropped") {
+                return "false";
+            } else if (flag == "render_metrics_max_expected_content_frame_duration_us") {
+                return "2000";
+            } else if (flag == "render_metrics_frame_rate_detection_tolerance_us") {
+                return "3000";
+            } else if (flag == "render_metrics_live_content_frame_drop_tolerance_us") {
+                return "4000";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_buckets") {
+                return "100,200,300,400";
+            } else if (flag == "render_metrics_freeze_duration_ms_histogram_to_score") {
+                return "1234567890120,1234567890121,1234567890122";
+            } else if (flag == "render_metrics_freeze_distance_ms_histogram_buckets") {
+                return "500,600,700,800,900";
+            } else if (flag == "render_metrics_freeze_event_max") {
+                return "5000";
+            } else if (flag == "render_metrics_freeze_event_details_max") {
+                return "6000";
+            } else if (flag == "render_metrics_freeze_event_distance_tolerance_ms") {
+                return "7000";
+            } else if (flag == "render_metrics_judder_error_tolerance_us") {
+                return "8000";
+            } else if (flag == "render_metrics_judder_score_histogram_buckets") {
+                return "1,2,3,4,5";
+            } else if (flag == "render_metrics_judder_score_histogram_to_score") {
+                return "-1,-2,-3,-4,-5";
+            } else if (flag == "render_metrics_judder_event_max") {
+                return "9000";
+            } else if (flag == "render_metrics_judder_event_details_max") {
+                return "10000";
+            } else if (flag == "render_metrics_judder_event_distance_tolerance_ms") {
+                return "11000";
+            }
+            return "";
+        }
+    };
+
+    Configuration c = Configuration::getFromServerConfigurableFlags(getServerConfigurableFlagFn);
+    // The default configuration here used to verify we're not configuring the values to the
+    // default - if we are accidentally configuring to the default then we're not necessarily
+    // testing the parsing.
+    Configuration d;
+    EXPECT_EQ(c.enabled, false);
+    EXPECT_NE(c.enabled, d.enabled);
+    EXPECT_EQ(c.areSkippedFramesDropped, false);
+    EXPECT_NE(c.areSkippedFramesDropped, d.areSkippedFramesDropped);
+    EXPECT_EQ(c.maxExpectedContentFrameDurationUs, 2000);
+    EXPECT_NE(c.maxExpectedContentFrameDurationUs, d.maxExpectedContentFrameDurationUs);
+    EXPECT_EQ(c.frameRateDetectionToleranceUs, 3000);
+    EXPECT_NE(c.frameRateDetectionToleranceUs, d.frameRateDetectionToleranceUs);
+    EXPECT_EQ(c.liveContentFrameDropToleranceUs, 4000);
+    EXPECT_NE(c.liveContentFrameDropToleranceUs, d.liveContentFrameDropToleranceUs);
+    {
+        std::vector<int32_t> expected({100,200,300,400});
+        EXPECT_EQ(c.freezeDurationMsHistogramBuckets, expected);
+        EXPECT_NE(c.freezeDurationMsHistogramBuckets, d.freezeDurationMsHistogramBuckets);
+    }
+    {
+        std::vector<int64_t> expected({1234567890120LL,1234567890121LL,1234567890122LL});
+        EXPECT_EQ(c.freezeDurationMsHistogramToScore, expected);
+        EXPECT_NE(c.freezeDurationMsHistogramToScore, d.freezeDurationMsHistogramToScore);
+    }
+    {
+        std::vector<int32_t> expected({500,600,700,800,900});
+        EXPECT_EQ(c.freezeDistanceMsHistogramBuckets, expected);
+        EXPECT_NE(c.freezeDistanceMsHistogramBuckets, d.freezeDistanceMsHistogramBuckets);
+    }
+    EXPECT_EQ(c.freezeEventMax, 5000);
+    EXPECT_NE(c.freezeEventMax, d.freezeEventMax);
+    EXPECT_EQ(c.freezeEventDetailsMax, 6000);
+    EXPECT_NE(c.freezeEventDetailsMax, d.freezeEventDetailsMax);
+    EXPECT_EQ(c.freezeEventDistanceToleranceMs, 7000);
+    EXPECT_NE(c.freezeEventDistanceToleranceMs, d.freezeEventDistanceToleranceMs);
+    EXPECT_EQ(c.judderErrorToleranceUs, 8000);
+    EXPECT_NE(c.judderErrorToleranceUs, d.judderErrorToleranceUs);
+    {
+        std::vector<int32_t> expected({1,2,3,4,5});
+        EXPECT_EQ(c.judderScoreHistogramBuckets, expected);
+        EXPECT_NE(c.judderScoreHistogramBuckets, d.judderScoreHistogramBuckets);
+    }
+    {
+        std::vector<int64_t> expected({-1,-2,-3,-4,-5});
+        EXPECT_EQ(c.judderScoreHistogramToScore, expected);
+        EXPECT_NE(c.judderScoreHistogramToScore, d.judderScoreHistogramToScore);
+    }
+    EXPECT_EQ(c.judderEventMax, 9000);
+    EXPECT_NE(c.judderEventMax, d.judderEventMax);
+    EXPECT_EQ(c.judderEventDetailsMax, 10000);
+    EXPECT_NE(c.judderEventDetailsMax, d.judderEventDetailsMax);
+    EXPECT_EQ(c.judderEventDistanceToleranceMs, 11000);
+    EXPECT_NE(c.judderEventDistanceToleranceMs, d.judderEventDistanceToleranceMs);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsReleasedFrames) {
+    Configuration c;
+    Helper h(16.66, c);
+    h.drop(10);
+    h.render({16.66, 16.66, 16.66});
+    h.skip(10); // skipped frames aren't released so they are not counted
+    h.render({16.66, 16.66, 16.66, 16.66});
+    h.drop(10);
+    EXPECT_EQ(27, h.getMetrics().frameReleasedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsSkippedFrames) {
+    Configuration c;
+    Helper h(16.66, c);
+    h.drop(10); // dropped frames are not counted
+    h.skip(10); // frames skipped before rendering a frame are not counted
+    h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10); // dropped frames are not counted
+    h.skip(10);
+    h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.skip(10); // frames skipped at the end of playback are not counted
+    h.drop(10); // dropped frames are not counted
+    EXPECT_EQ(10, h.getMetrics().frameSkippedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSkippedFramesAreDropped_countsDroppedFrames) {
+    Configuration c;
+    c.areSkippedFramesDropped = true;
+    Helper h(16.66, c);
+    h.skip(10); // skipped frames at the beginning of playback are not counted
+    h.drop(10);
+    h.skip(10); // skipped frames at the beginning of playback after dropped frames are not counted
+    h.render({16.66, 16.66, 16.66});  // rendered frames are not counted
+    h.drop(10);
+    h.skip(10);
+    h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10); // dropped frames at the end of playback are not counted
+    h.skip(10); // skipped frames at the end of playback are not counted
+    EXPECT_EQ(30, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenNotSkippedFramesAreDropped_countsDroppedFrames) {
+    Configuration c;
+    c.areSkippedFramesDropped = false;
+    Helper h(16.66, c);
+    h.skip(10); // skipped frames at the beginning of playback are not counted
+    h.drop(10);
+    h.skip(10); // skipped frames at the beginning of playback after dropped frames are not coutned
+    h.render({16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10);
+    h.skip(10); // skipped frames are not counted
+    h.render({16.66, 16.66, 16.66, 16.66}); // rendered frames are not counted
+    h.drop(10); // dropped frames at the end of playback are not counted
+    h.skip(10); // skipped frames at the end of playback are not counted
+    EXPECT_EQ(20, h.getMetrics().frameDroppedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, countsRenderedFrames) {
+    Configuration c;
+    Helper h(16.66, c);
+    h.drop(10); // dropped frames are not counted
+    h.render({16.66, 16.66, 16.66});
+    h.skip(10); // skipped frames are not counted
+    h.render({16.66, 16.66, 16.66, 16.66});
+    h.drop(10); // dropped frames are not counted
+    EXPECT_EQ(7, h.getMetrics().frameRenderedCount);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detectsFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, handlesSeeking) {
+    Configuration c;
+    c.maxExpectedContentFrameDurationUs = 30;
+    VideoRenderQualityTracker v(c);
+    v.onFrameReleased(0, 0);
+    v.onFrameRendered(0, 0);
+    v.onFrameReleased(20, 20);
+    v.onFrameRendered(20, 20);
+    v.onFrameReleased(40, 40);
+    v.onFrameRendered(40, 40);
+    v.onFrameReleased(60, 60);
+    v.onFrameRendered(60, 60);
+    v.onFrameReleased(80, 80);
+    v.onFrameRendered(80, 80);
+    v.onFrameReleased(7200000000, 100);
+    v.onFrameRendered(7200000000, 100);
+    v.onFrameReleased(7200000020, 120);
+    v.onFrameRendered(7200000020, 120);
+    v.onFrameReleased(7200000040, 140);
+    v.onFrameRendered(7200000040, 140);
+    v.onFrameReleased(7200000060, 160);
+    v.onFrameRendered(7200000060, 160);
+    v.onFrameReleased(7200000080, 180);
+    v.onFrameRendered(7200000080, 180);
+    v.onFrameReleased(0, 200);
+    v.onFrameRendered(0, 200);
+    v.onFrameReleased(20, 220);
+    v.onFrameRendered(20, 220);
+    v.onFrameReleased(40, 240);
+    v.onFrameRendered(40, 240);
+    v.onFrameReleased(60, 260);
+    v.onFrameRendered(60, 260);
+    const VideoRenderQualityMetrics &m = v.getMetrics();
+    EXPECT_EQ(m.judderRate, 0); // frame durations can get messed up during discontinuities so if
+                                // the discontinuity is not detected, judder is expected
+    EXPECT_NE(m.contentFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, withSkipping_handlesSeeking) {
+    Configuration c;
+    c.maxExpectedContentFrameDurationUs = 30;
+    VideoRenderQualityTracker v(c);
+    v.onFrameReleased(0, 0);
+    v.onFrameRendered(0, 0);
+    v.onFrameReleased(20, 20);
+    v.onFrameRendered(20, 20);
+    v.onFrameReleased(40, 40);
+    v.onFrameRendered(40, 40);
+    v.onFrameReleased(60, 60);
+    v.onFrameRendered(60, 60);
+    v.onFrameReleased(80, 80);
+    v.onFrameRendered(80, 80);
+    v.onFrameSkipped(7200000000);
+    v.onFrameSkipped(7200000020);
+    v.onFrameReleased(7200000040, 100);
+    v.onFrameRendered(7200000040, 100);
+    v.onFrameReleased(7200000060, 120);
+    v.onFrameRendered(7200000060, 120);
+    v.onFrameReleased(7200000080, 140);
+    v.onFrameSkipped(0);
+    v.onFrameRendered(7200000080, 140);
+    v.onFrameSkipped(20);
+    v.onFrameReleased(40, 160);
+    v.onFrameRendered(40, 160);
+    v.onFrameReleased(60, 180);
+    v.onFrameRendered(60, 180);
+    v.onFrameReleased(80, 200);
+    v.onFrameRendered(80, 200);
+    const VideoRenderQualityMetrics &m = v.getMetrics();
+    EXPECT_EQ(m.judderRate, 0); // frame durations can get messed up during discontinuities so if
+                                // the discontinuity is not detected, judder is expected
+    EXPECT_NE(m.contentFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenLowTolerance_doesntDetectFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 0;
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateDestabilizes_detectsFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    h.render({30.0, 16.6, 30.0, 16.6});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, detects32Pulldown) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(41.66, c);
+    h.render({49.9, 33.2, 50.0, 33.4, 50.1, 33.2});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad32Pulldown_doesntDetect32Pulldown) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(41.66, c);
+    h.render({50.0, 33.33, 33.33, 50.00, 33.33, 50.00});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateChanges_detectsMostRecentFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.6, 16.7, 16.6, 16.7});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_NEAR(h.getMetrics().actualFrameRate, 60.0, 0.5);
+    h.changeContentFrameDuration(41.66);
+    h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 24.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_24_3_2_PULLDOWN);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenFrameRateIsUnstable_doesntDetectFrameRate) {
+    Configuration c;
+    c.frameRateDetectionToleranceUs = 2 * 1000; // 2 ms
+    Helper h(16.66, c);
+    h.render({16.66, 30.0, 16.66, 30.0, 16.66});
+    EXPECT_NEAR(h.getMetrics().contentFrameRate, 60.0, 0.5);
+    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeRate) {
+    Configuration c;
+    Helper h(20, c);
+    h.render(3);
+    EXPECT_EQ(h.getMetrics().freezeRate, 0);
+    h.drop(3);
+    h.render(3);
+    // +1 because the first frame before drops is considered frozen
+    // and then -1 because the last frame has an unknown render duration
+    EXPECT_EQ(h.getMetrics().freezeRate, 4.0 / 8.0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDurationHistogram) {
+    Configuration c;
+    // +17 because freeze durations include the render time of the previous frame
+    c.freezeDurationMsHistogramBuckets = {2 * 17 + 17, 3 * 17 + 17, 6 * 17 + 17};
+    Helper h(17, c);
+    h.render(1);
+    h.drop(1); // below
+    h.render(1);
+    h.drop(3); // bucket 1
+    h.render(1);
+    h.drop(2); // bucket 0
+    h.render(1);
+    h.drop(4); // bucket 1
+    h.render(1);
+    h.drop(2); // bucket 0
+    h.render(1);
+    h.drop(5); // bucket 1
+    h.render(1);
+    h.drop(10); // above
+    h.render(1);
+    h.drop(15); // above
+    h.render(1);
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.emit(), "1{2,3}2");
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getCount(), 8);
+    // the smallest frame drop was 1, +17 because it includes the previous frame render time
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getMin(), 1 * 17 + 17);
+    // the largest frame drop was 10, +17 because it includes the previous frame render time
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getMax(), 15 * 17 + 17);
+    // total frame drop count, multiplied by 17, plus 17 for each occurrence, divided by occurrences
+    EXPECT_EQ(h.getMetrics().freezeDurationMsHistogram.getAvg(), ((1 + 3 + 2 + 4 + 2 + 5 + 10 + 15)
+                                                                   * 17 + 8 * 17) / 8);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDistanceHistogram) {
+    Configuration c;
+    c.freezeDistanceMsHistogramBuckets = {1 * 17, 5 * 17, 6 * 17};
+    Helper h(17, c);
+    h.render(1);
+    h.drop(1);
+    h.render(5); // bucket 0
+    h.drop(3);
+    h.render(3); // bucket 0
+    h.drop(2);
+    h.render(9); // above
+    h.drop(5);
+    h.render(1); // below
+    h.drop(2);
+    h.render(6); // bucket 1
+    h.drop(4);
+    h.render(12); // above
+    h.drop(2);
+    h.render(1);
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.emit(), "1{2,1}2");
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getCount(), 6);
+    // the smallest render between drops was 1, -17 because the last frame rendered also froze
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getMin(), 1 * 17 - 17);
+    // the largest render between drops was 12, -17 because the last frame rendered also froze
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getMax(), 12 * 17 - 17);
+    // total render count between, multiplied by 17, minus 17 for each occurrence, divided by
+    // occurrences
+    EXPECT_EQ(h.getMetrics().freezeDistanceMsHistogram.getAvg(), ((5 + 3 + 9 + 1 + 6 + 12) * 17 -
+                                                                  6 * 17) / 6);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when60hz_hasNoJudder) {
+    Configuration c;
+    Helper h(16.66, c); // ~24Hz
+    h.render({16.66, 16.66, 16.66, 16.66, 16.66, 16.66, 16.66});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance60hz_hasNoJudder) {
+    Configuration c;
+    Helper h(16.66, c); // ~24Hz
+    h.render({14, 18, 14, 18, 14, 18, 14, 18});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance60Hz_hasJudder) {
+    Configuration c;
+    Helper h(16.66, c); // ~24Hz
+    h.render({14, 18, 14, /* no 18 between 14s */ 14, 18, 14, 18});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when30Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({33.33, 33.33, 33.33, 33.33, 33.33, 33.33});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariance30Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({29.0, 35.0, 29.0, 35.0, 29.0, 35.0});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBadSmallVariance30Hz_hasJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({29.0, 35.0, 29.0, /* no 35 between 29s */ 29.0, 35.0, 29.0, 35.0});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad30HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({33.33, 33.33, 50.0, /* frame stayed 1 vsync too long */ 16.66, 33.33, 33.33});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 2); // note: 2 counts of judder
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when24HzTo60Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(41.66, c);
+    h.render({50.0, 33.33, 50.0, 33.33, 50.0, 33.33});
+    EXPECT_LE(h.getMetrics().judderScoreHistogram.getMax(), 0);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when25HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(40, c);
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    h.render({33.33, 33.33, 50.0});
+    EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when50HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(20, c);
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    h.render({16.66, 16.66, 16.66, 33.33});
+    EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, when30HzTo50Hz_hasJudder) {
+    Configuration c;
+    Helper h(33.33, c);
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    h.render({40.0, 40.0, 40.0, 60.0});
+    EXPECT_GT(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenSmallVariancePulldown24HzTo60Hz_hasNoJudder) {
+    Configuration c;
+    Helper h(41.66, c);
+    h.render({52.0, 31.33, 52.0, 31.33, 52.0, 31.33});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 0);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, whenBad24HzTo60Hz_hasJudder) {
+    Configuration c;
+    Helper h(41.66, c);
+    h.render({50.0, 33.33, 50.0, 33.33, /* no 50 between 33s */ 33.33, 50.0, 33.33});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 1);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesJudderScoreHistogram) {
+    Configuration c;
+    c.judderErrorToleranceUs = 2000;
+    c.judderScoreHistogramBuckets = {1, 5, 8};
+    Helper h(16, c);
+    h.render({16, 16, 23, 16, 16, 10, 16, 4, 16, 20, 16, 16});
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.emit(), "0{1,2}1");
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getCount(), 4);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getMin(), 4);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getMax(), 12);
+    EXPECT_EQ(h.getMetrics().judderScoreHistogram.getAvg(), (7 + 6 + 12 + 4) / 4);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, ranksJudderScoresInOrder) {
+    // Each rendering is ranked from best to worst from a user experience
+    Configuration c;
+    c.judderErrorToleranceUs = 2000;
+    c.judderScoreHistogramBuckets = {0, 1000};
+    int64_t previousScore = 0;
+
+    // 30fps poorly displayed at 60Hz
+    {
+        Helper h(33.33, c);
+        h.render({33.33, 33.33, 16.66, 50.0, 33.33, 33.33});
+        int64_t scoreBad30fpsTo60Hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(scoreBad30fpsTo60Hz, previousScore);
+        previousScore = scoreBad30fpsTo60Hz;
+    }
+
+    // 25fps displayed at 60hz
+    {
+        Helper h(40, c);
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        h.render({33.33, 33.33, 50.0});
+        int64_t score25fpsTo60hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score25fpsTo60hz, previousScore);
+        previousScore = score25fpsTo60hz;
+    }
+
+    // 50fps displayed at 60hz
+    {
+        Helper h(20, c);
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        h.render({16.66, 16.66, 16.66, 33.33});
+        int64_t score50fpsTo60hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score50fpsTo60hz, previousScore);
+        previousScore = score50fpsTo60hz;
+    }
+
+    // 24fps poorly displayed at 60Hz
+    {
+        Helper h(41.66, c);
+        h.render({50.0, 33.33, 50.0, 33.33, 33.33, 50.0, 33.33});
+        int64_t scoreBad24HzTo60Hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(scoreBad24HzTo60Hz, previousScore);
+        previousScore = scoreBad24HzTo60Hz;
+    }
+
+    // 30fps displayed at 50hz
+    {
+        Helper h(33.33, c);
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        h.render({40.0, 40.0, 40.0, 60.0});
+        int64_t score30fpsTo50hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score30fpsTo50hz, previousScore);
+        previousScore = score30fpsTo50hz;
+    }
+
+    // 24fps displayed at 50Hz
+    {
+        Helper h(41.66, c);
+        h.render(40.0, 11);
+        h.render(60.0, 1);
+        h.render(40.0, 11);
+        h.render(60.0, 1);
+        h.render(40.0, 11);
+        int64_t score24HzTo50Hz = h.getMetrics().judderScoreHistogram.getMax();
+        EXPECT_GT(score24HzTo50Hz, previousScore);
+        previousScore = score24HzTo50Hz;
+    }
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesFreezeEvents) {
+    Configuration c;
+    c.freezeEventMax = 5;
+    c.freezeEventDetailsMax = 4;
+    c.freezeEventDistanceToleranceMs = 1000;
+    Helper h(20, c);
+    h.render(10);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(3);
+    h.render(1000 / 20); // +1 because it's unclear if the current frame is frozen
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(1);
+    h.render(10);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(6);
+    h.render(12);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+    h.drop(10);
+    h.render(1000 / 20 + 1); // +1 because it's unclear if the current frame is frozen
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 1);
+    FreezeEvent e = h.getAndClearFreezeEvent();
+    EXPECT_EQ(e.valid, true); // freeze event
+    // -1 because the last rendered frame is considered frozen
+    EXPECT_EQ(e.initialTimeUs, 9 * 20 * 1000);
+    // only count the last frame of the first group of rendered frames
+    EXPECT_EQ(e.durationMs, (1 + 3 + 1000 / 20 + 1 + 10 + 6 + 12 + 10) * 20);
+    EXPECT_EQ(e.count, 4);
+    // number of dropped frames
+    // +1 because the last rendered frame is considered frozen
+    EXPECT_EQ(e.sumDurationMs, (4 + 2 + 7 + 11) * 20);
+    // number of rendered frames between dropped frames
+    // -1 because the last rendered frame is considered frozen
+    EXPECT_EQ(e.sumDistanceMs, ((1000 / 20) - 1 + 9 + 11) * 20);
+    // +1 for each since the last rendered frame is considered frozen
+    ASSERT_EQ(e.details.durationMs.size(), 4);
+    EXPECT_EQ(e.details.durationMs[0], 4 * 20);
+    EXPECT_EQ(e.details.durationMs[1], 2 * 20);
+    EXPECT_EQ(e.details.durationMs[2], 7 * 20);
+    EXPECT_EQ(e.details.durationMs[3], 11 * 20);
+    // -1 for each since the last rendered frame is considered frozen
+    ASSERT_EQ(e.details.distanceMs.size(), 4);
+    EXPECT_EQ(e.details.distanceMs[0], -1);
+    EXPECT_EQ(e.details.distanceMs[1], 1000 - 20);
+    EXPECT_EQ(e.details.distanceMs[2], 9 * 20);
+    EXPECT_EQ(e.details.distanceMs[3], 11 * 20);
+    int64_t previousEventEndTimeUs = e.initialTimeUs + e.durationMs * 1000;
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(4);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 2);
+    e = h.getAndClearFreezeEvent();
+    EXPECT_EQ(e.valid, true);
+    // 1000ms tolerance means 1000ms from the end of the last event to the beginning of this event
+    EXPECT_EQ(e.initialTimeUs, previousEventEndTimeUs + 1000 * 1000);
+    EXPECT_EQ(e.count, 5);
+    // 5 freezes captured in the freeze event, but only 4 details are recorded
+    EXPECT_EQ(e.details.durationMs.size(), 4);
+    EXPECT_EQ(e.details.distanceMs.size(), 4);
+    EXPECT_EQ(e.details.distanceMs[0], 1000); // same as the tolerance
+    // The duration across the entire series f freezes is captured, with only 4 details captured
+    // +1 because the first rendered frame is considered frozen (not the 1st dropped frame)
+    EXPECT_EQ(e.durationMs, (1 + 1 + 4 + 1 + 4 + 1 + 4 + 1 + 4 + 1) * 20);
+    // The duration of all 5 freeze events are captured, with only 4 details captured
+    EXPECT_EQ(e.sumDurationMs, (2 + 2 + 2 + 2 + 2) * 20);
+    // The distance of all 5 freeze events are captured, with only 4 details captured
+    EXPECT_EQ(e.sumDistanceMs, (3 + 3 + 3 + 3) * 20);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 3);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 4);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 5);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, true);
+    h.drop(1);
+    h.render(1000 / 20 + 1);
+    // The 6th event isn't captured because it exceeds the configured limit
+    EXPECT_EQ(h.getMetrics().freezeEventCount, 6);
+    EXPECT_EQ(h.getAndClearFreezeEvent().valid, false);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesJudderEvents) {
+    Configuration c;
+    c.judderEventMax = 4;
+    c.judderEventDetailsMax = 3;
+    c.judderEventDistanceToleranceMs = 100;
+    Helper h(20, c);
+    h.render({19, 20, 19});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    h.render({15, 19, 20, 19});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    h.render({28, 20, 19});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    h.render({13, 20, 20, 20, 20});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false);
+    // Start with judder for the next event at the end of the sequence, because judder is scored
+    // one frame behind, and for combining judder occurrences into events, it's not clear yet if
+    // the current frame has judder or not.
+    h.render({15, 20, 20, 20, 20, 20, 15});
+    JudderEvent e = h.getAndClearJudderEvent();
+    EXPECT_EQ(e.valid, true);
+    EXPECT_EQ(e.initialTimeUs, (19 + 20 + 19) * 1000);
+    EXPECT_EQ(e.durationMs, 15 + 19 + 20 + 19 /**/ + 28 + 20 + 19 /**/ + 13 + 20 * 4 /**/ + 15);
+    EXPECT_EQ(e.count, 4);
+    EXPECT_EQ(e.sumScore, (20 - 15) + (28 - 20) + (20 - 13) + (20 - 15));
+    EXPECT_EQ(e.sumDistanceMs, 19 + 20 + 19 /**/ + 20 + 19 /**/ + 20 * 4);
+    ASSERT_EQ(e.details.actualRenderDurationUs.size(), 3); // 3 details per configured maximum
+    EXPECT_EQ(e.details.actualRenderDurationUs[0], 15 * 1000);
+    EXPECT_EQ(e.details.actualRenderDurationUs[1], 28 * 1000);
+    EXPECT_EQ(e.details.actualRenderDurationUs[2], 13 * 1000);
+    ASSERT_EQ(e.details.contentRenderDurationUs.size(), 3);
+    EXPECT_EQ(e.details.contentRenderDurationUs[0], 20 * 1000);
+    EXPECT_EQ(e.details.contentRenderDurationUs[1], 20 * 1000);
+    EXPECT_EQ(e.details.contentRenderDurationUs[2], 20 * 1000);
+    ASSERT_EQ(e.details.distanceMs.size(), 3);
+    EXPECT_EQ(e.details.distanceMs[0], -1);
+    EXPECT_EQ(e.details.distanceMs[1], 19 + 20 + 19);
+    EXPECT_EQ(e.details.distanceMs[2], 20 + 19);
+    h.render({20, 20, 20, 20, 20, 15});
+    e = h.getAndClearJudderEvent();
+    EXPECT_EQ(e.valid, true);
+    ASSERT_EQ(e.details.distanceMs.size(), 1);
+    EXPECT_EQ(e.details.distanceMs[0], 100); // same as the tolerance
+    h.render({20, 20, 20, 20, 20, 15});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+    h.render({20, 20, 20, 20, 20, 15});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, true);
+    h.render({20, 20, 20, 20, 20, 20});
+    EXPECT_EQ(h.getAndClearJudderEvent().valid, false); // max number of judder events exceeded
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesOverallFreezeScore) {
+    Configuration c;
+    // # drops * 20ms + 20ms because current frame is frozen + 1 for bucket threshold
+    c.freezeDurationMsHistogramBuckets = {1 * 20 + 21, 5 * 20 + 21, 10 * 20 + 21};
+    c.freezeDurationMsHistogramToScore = {10, 100, 1000};
+    Helper h(20, c);
+    h.render(5);
+    h.drop(2); // bucket = 0, bucket count = 1, bucket score = 10
+    h.render(5);
+    h.drop(11); // bucket = 2, bucket count = 1, bucket score = 1000
+    h.render(5);
+    h.drop(6); // bucket = 1, bucket count = 1, bucket score = 100
+    h.render(5);
+    h.drop(1); // bucket = null
+    h.render(5);
+    h.drop(3); // bucket = 0, bucket count = 2, bucket score = 20
+    h.render(5);
+    h.drop(10); // bucket = 1, bucket count = 2, bucket score = 200
+    h.render(5);
+    h.drop(7); // bucket = 1, bucket count = 3, bucket score = 300
+    h.render(5);
+    EXPECT_EQ(h.getMetrics().freezeScore, 20 + 300 + 1000);
+}
+
+TEST_F(VideoRenderQualityTrackerTest, capturesOverallJudderScore) {
+    Configuration c;
+    c.judderScoreHistogramBuckets = {0, 6, 10};
+    c.judderScoreHistogramToScore = {10, 100, 1000};
+    Helper h(20, c);
+    h.render({20, 20, 15, 20, 20}); // bucket = 0, bucket count = 1, bucket score = 10
+    h.render({20, 20, 11, 20, 20}); // bucket = 1, bucket count = 1, bucket score = 100
+    h.render({20, 20, 13, 20, 20}); // bucket = 1, bucket count = 2, bucket score = 200
+    h.render({20, 20,  5, 20, 20}); // bucket = 2, bucket count = 1, bucket score = 1000
+    h.render({20, 20, 14, 20, 20}); // bucket = 1, bucket count = 3, bucket score = 300
+    h.render({20, 20, 10, 20, 20}); // bucket = 2, bucket count = 2, bucket score = 2000
+    EXPECT_EQ(h.getMetrics().judderScore, 10 + 300 + 2000);
+}
+
+} // android
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index d94c8ff..9f46a74 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -66,8 +66,8 @@
     for (size_t i = 0; i < extractor->countTracks(); ++i) {
         sp<MetaData> meta = extractor->getTrackMetaData(i);
 
-        const char *trackMime;
-        if (!strcasecmp(mime.c_str(), trackMime)) {
+        std::string trackMime = dataProvider->PickValueInArray(kTestedMimeTypes);
+        if (!strcasecmp(mime.c_str(), trackMime.c_str())) {
             sp<IMediaSource> track = extractor->getTrack(i);
             if (track == NULL) {
                 return NULL;
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
index 98bfb94..6856ac0 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -42,6 +42,51 @@
     kMaxValue = MPEG2TS,
 };
 
+static std::string kTestedMimeTypes[] = {"audio/3gpp",
+                                         "audio/amr-wb",
+                                         "audio/vorbis",
+                                         "audio/opus",
+                                         "audio/mp4a-latm",
+                                         "audio/mpeg",
+                                         "audio/mpeg-L1",
+                                         "audio/mpeg-L2",
+                                         "audio/midi",
+                                         "audio/qcelp",
+                                         "audio/g711-alaw",
+                                         "audio/g711-mlaw",
+                                         "audio/flac",
+                                         "audio/aac-adts",
+                                         "audio/gsm",
+                                         "audio/ac3",
+                                         "audio/eac3",
+                                         "audio/eac3-joc",
+                                         "audio/ac4",
+                                         "audio/scrambled",
+                                         "audio/alac",
+                                         "audio/x-ms-wma",
+                                         "audio/x-adpcm-ms",
+                                         "audio/x-adpcm-dvi-ima",
+                                         "video/avc",
+                                         "video/hevc",
+                                         "video/mp4v-es",
+                                         "video/3gpp",
+                                         "video/x-vnd.on2.vp8",
+                                         "video/x-vnd.on2.vp9",
+                                         "video/av01",
+                                         "video/mpeg2",
+                                         "video/dolby-vision",
+                                         "video/scrambled",
+                                         "video/divx",
+                                         "video/divx3",
+                                         "video/xvid",
+                                         "video/x-motion-jpeg",
+                                         "text/3gpp-tt",
+                                         "application/x-subrip",
+                                         "text/vtt",
+                                         "text/cea-608",
+                                         "text/cea-708",
+                                         "application/x-id3v4"};
+
 std::string genMimeType(FuzzedDataProvider *dataProvider);
 sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
 sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 5df3267..70d73c8 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -52,7 +52,10 @@
 
   MediaMuxer::OutputFormat format =
       (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
-  sp<MediaMuxer> mMuxer(new MediaMuxer(fd, format));
+  sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
+  if (mMuxer == nullptr) {
+    return 0;
+  }
 
   while (fdp.remaining_bytes() > 1) {
     switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
diff --git a/media/libstagefright/tests/mediacodec/Android.bp b/media/libstagefright/tests/mediacodec/Android.bp
index 9cdc6d4..23882ea 100644
--- a/media/libstagefright/tests/mediacodec/Android.bp
+++ b/media/libstagefright/tests/mediacodec/Android.bp
@@ -70,4 +70,4 @@
     test_suites: [
         "general-tests",
     ],
-}
+}
\ No newline at end of file
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index a8e64b6..71ddbe5 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -62,7 +62,8 @@
              size_t offset,
              const CryptoPlugin::SubSample *subSamples,
              size_t numSubSamples,
-             const sp<MediaCodecBuffer> &buffer),
+             const sp<MediaCodecBuffer> &buffer,
+             AString* errorDetailMsg),
             (override));
     MOCK_METHOD(status_t, renderOutputBuffer,
             (const sp<MediaCodecBuffer> &buffer, int64_t timestampNs),
@@ -70,6 +71,7 @@
     MOCK_METHOD(status_t, discardBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
     MOCK_METHOD(void, getInputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
     MOCK_METHOD(void, getOutputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+    MOCK_METHOD(void, pollForRenderedBuffers, (), (override));
 };
 
 class MockCodec : public CodecBase {
diff --git a/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
index f934b54..b2044d3 100644
--- a/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
+++ b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
@@ -22,6 +22,7 @@
 #include <string.h>
 #include <sys/stat.h>
 #include <fstream>
+#include <memory>
 
 #include <binder/Parcel.h>
 #include <media/stagefright/foundation/AString.h>
@@ -240,10 +241,10 @@
 
                     if (remaining < tempFontNameLength) break;
                     const uint8_t *tmpFont = tmpData;
-                    char *tmpFontName = strndup((const char *)tmpFont, tempFontNameLength);
+                    std::unique_ptr<char[]> tmpFontName(new char[tempFontNameLength]);
+                    strncpy(tmpFontName.get(), (const char *)tmpFont, tempFontNameLength);
                     ASSERT_NE(tmpFontName, nullptr) << "Font Name is null";
-                    ALOGI("FontName = %s", tmpFontName);
-                    free(tmpFontName);
+                    ALOGI("FontName = %s", tmpFontName.get());
                     tmpData += tempFontNameLength;
                     remaining -= tempFontNameLength;
                     fontRecordEntries.push_back({tempFontID, tempFontNameLength, tmpFont});
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 59ce8db..7d1442b 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -197,7 +197,6 @@
 }
 
 status_t WebmFrameSinkThread::stop() {
-    mDone = true;
     mVideoFrames.push(WebmFrame::EOS);
     mAudioFrames.push(WebmFrame::EOS);
     return WebmFrameThread::stop();
@@ -337,7 +336,6 @@
 }
 
 void WebmFrameMediaSourceThread::run() {
-    int32_t count = 0;
     int64_t timestampUs = 0xdeadbeef;
     int64_t lastTimestampUs = 0; // Previous sample time stamp
     int64_t lastDurationUs = 0; // Previous sample duration
@@ -368,7 +366,6 @@
             buffer = NULL;
             continue;
         }
-        ++count;
 
         // adjust time-stamps after pause/resume
         if (mResumed) {
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 5eaadbd..3823c36 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -54,6 +54,19 @@
 
 static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
 
+bool WebmWriter::isFdOpenModeValid(int fd) {
+    // check for invalid file descriptor.
+    if (!MediaWriter::isFdOpenModeValid(fd)) {
+        return false;
+    }
+    int flags = fcntl(fd, F_GETFL);
+    if ((flags & O_RDWR) == 0) {
+        ALOGE("File must be in read-write mode for webm writer");
+        return false;
+    }
+    return true;
+}
+
 WebmWriter::WebmWriter(int fd)
     : mFd(dup(fd)),
       mInitCheck(mFd < 0 ? NO_INIT : OK),
diff --git a/media/libstagefright/webm/include/webm/WebmWriter.h b/media/libstagefright/webm/include/webm/WebmWriter.h
index ed5bc4c..e339add 100644
--- a/media/libstagefright/webm/include/webm/WebmWriter.h
+++ b/media/libstagefright/webm/include/webm/WebmWriter.h
@@ -36,6 +36,10 @@
 
 class WebmWriter : public MediaWriter {
 public:
+    // Returns true if the file descriptor is opened using a mode
+    // which is compatible with WebmWriter.
+    // Note that this overloads that method in the base class.
+    static bool isFdOpenModeValid(int fd);
     explicit WebmWriter(int fd);
     ~WebmWriter() { reset(); }
 
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index b81f27e..58aa7cd 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -57,6 +57,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers target the APIs of all the various writers",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index 67c6102..8c1ef3b 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -19,6 +19,8 @@
 
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
 
+#include <android/api-level.h>
+
 #include <android-base/logging.h>
 #include <android-base/macros.h>
 #include <android-base/properties.h>
@@ -30,6 +32,7 @@
 
 #include <expat.h>
 #include <stdio.h>
+#include <stdlib.h>
 #include <string.h>
 #include <sys/stat.h>
 
@@ -360,7 +363,7 @@
 
         status_t updateMediaCodec(
                 const char *rank, const StringSet &domain, const StringSet &variants,
-                const char *enabled);
+                const char *enabled, const char *minsdk);
     };
 
     status_t parseXmlFilesInSearchDirs(
@@ -493,6 +496,9 @@
     }
 }
 
+// current SDK for this device; filled in when initializing the parser.
+static int mysdk = 0;
+
 MediaCodecsXmlParser::Impl::Parser::Parser(State *state, std::string path)
     : mState(state),
       mPath(path),
@@ -502,6 +508,20 @@
     if (end != std::string::npos) {
         mHrefBase = path.substr(0, end + 1);
     }
+
+#if defined(__ANDROID_API_U__)
+    // this is sdk calculation is intended only for devices >= U
+    static std::once_flag sCheckOnce;
+
+    std::call_once(sCheckOnce, [&](){
+        mysdk = android_get_device_api_level();
+
+        // work around main development branch being on same SDK as the last dessert release.
+        if (__ANDROID_API__ == __ANDROID_API_FUTURE__) {
+            mysdk++;
+        }
+    });
+#endif  // __ANDROID_API_U__
 }
 
 void MediaCodecsXmlParser::Impl::Parser::parseXmlFile() {
@@ -930,6 +950,7 @@
     const char *a_domain = nullptr;
     const char *a_variant = nullptr;
     const char *a_enabled = nullptr;
+    const char *a_minsdk = nullptr;
 
     size_t i = 0;
     while (attrs[i] != nullptr) {
@@ -953,6 +974,8 @@
             a_variant = attrs[++i];
         } else if (strEq(attrs[i], "enabled")) {
             a_enabled = attrs[++i];
+        } else if (strEq(attrs[i], "minsdk")) {
+            a_minsdk = attrs[++i];
         } else {
             PLOGD("MediaCodec: ignoring unrecognized attribute '%s'", attrs[i]);
             ++i;
@@ -981,7 +1004,7 @@
 
     return updateMediaCodec(
             a_rank, parseCommaSeparatedStringSet(a_domain),
-            parseCommaSeparatedStringSet(a_variant), a_enabled);
+            parseCommaSeparatedStringSet(a_variant), a_enabled, a_minsdk);
 }
 
 MediaCodecsXmlParser::Impl::Result
@@ -1035,7 +1058,7 @@
 
 status_t MediaCodecsXmlParser::Impl::Parser::updateMediaCodec(
         const char *rank, const StringSet &domains, const StringSet &variants,
-        const char *enabled) {
+        const char *enabled, const char *minsdk) {
     CHECK(mState->inCodec());
     CodecProperties &codec = mState->codec();
 
@@ -1048,6 +1071,7 @@
 
     codec.variantSet = variants;
 
+    // we allow sets of domains...
     for (const std::string &domain : domains) {
         if (domain.size() && domain.at(0) == '!') {
             codec.domainSet.erase(domain.substr(1));
@@ -1065,6 +1089,49 @@
             ALOGD("disabling %s", mState->codecName().c_str());
         }
     }
+
+    // evaluate against passed minsdk, with lots of logging to explain the logic
+    //
+    // if current sdk >= minsdk, we want to enable the codec
+    // this OVERRIDES any enabled="true|false" setting on the codec.
+    // (enabled=true minsdk=35 on a sdk 34 device results in a disabled codec)
+    //
+    // Although minsdk is not parsed before Android U, we can carry media_codecs.xml
+    // using this to devices earlier (e.g. as part of mainline). An example is appropriate.
+    //
+    // we have a codec that we want enabled in Android V (sdk=35), so we use:
+    //     <MediaCodec ..... enabled="false" minsdk="35" >
+    //
+    // on Q/R/S/T: it sees enabled=false, but ignores the unrecognized minsdk
+    //     so the codec will be disabled
+    // on U: it sees enabled=false, and sees minsdk=35, but U==34 and 34 < 35
+    //     so the codec will be disabled
+    // on V: it sees enabled=false, and sees minsdk=35, V==35 and 35 >= 35
+    //     so the codec will be enabled
+    //
+    // if we know the XML files will be used only on devices >= U, we can skip the enabled=false
+    // piece.  Android mainline's support horizons say we will be using the enabled=false for
+    // another 4-5 years after U.
+    //
+    if (minsdk != nullptr) {
+        char *p = nullptr;
+        int sdk = strtol(minsdk, &p, 0);
+        if (p == minsdk || sdk < 0) {
+            ALOGE("minsdk parsing '%s' yielded %d, mapping to 0", minsdk, sdk);
+            sdk = 0;
+        }
+        // minsdk="#" means: "enable if sdk is >= #, disable otherwise"
+        if (mysdk < sdk) {
+            ALOGI("codec %s disabled, device sdk %d < required %d",
+                mState->codecName().c_str(), mysdk, sdk);
+            codec.quirkSet.emplace("attribute::disabled");
+        } else {
+            ALOGI("codec %s enabled, device sdk %d >= required %d",
+                mState->codecName().c_str(), mysdk, sdk);
+            codec.quirkSet.erase("attribute::disabled");
+        }
+    }
+
     return OK;
 }
 
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index ecfd85e..95c347a 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -84,6 +84,7 @@
     method public java.util.List<media.codecs.Feature> getFeature_optional();
     method public java.util.List<media.codecs.Limit> getLimit_optional();
     method public java.util.List<media.codecs.Mapping> getMapping_optional();
+    method public String getMinsdk();
     method public String getName();
     method public java.util.List<media.codecs.Quirk> getQuirk_optional();
     method public String getRank();
@@ -95,6 +96,7 @@
     method public java.util.List<media.codecs.Variant> getVariant_optional();
     method public void setDomain(String);
     method public void setEnabled(String);
+    method public void setMinsdk(String);
     method public void setName(String);
     method public void setRank(String);
     method public void setType(String);
diff --git a/media/libstagefright/xmlparser/media_codecs.xsd b/media/libstagefright/xmlparser/media_codecs.xsd
index c9a7efc..33f3a27 100644
--- a/media/libstagefright/xmlparser/media_codecs.xsd
+++ b/media/libstagefright/xmlparser/media_codecs.xsd
@@ -74,6 +74,7 @@
         <xs:attribute name="domain" type="xs:string"/>
         <xs:attribute name="variant" type="xs:string"/>
         <xs:attribute name="enabled" type="xs:string"/>
+        <xs:attribute name="minsdk" type="xs:string"/>
     </xs:complexType>
     <xs:complexType name="Quirk">
         <xs:attribute name="name" type="xs:string"/>
diff --git a/media/libstagefright/xmlparser/test/XMLParserTest.cpp b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
index 7629d97..2c5821e 100644
--- a/media/libstagefright/xmlparser/test/XMLParserTest.cpp
+++ b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
@@ -145,6 +145,33 @@
            },
            {}, "");
 
+    // minsdk
+    setCodecProperties("test12.encoder", true, 12, {"attribute::disabled"}, {}, {}, "video/t12",
+           {
+                   pair<string, string>("tuning-enable-goal", "no"),
+           },
+           {}, "");
+    setCodecProperties("test13.encoder", true, 13, {"attribute::disabled"}, {}, {}, "video/t13",
+           {
+                   pair<string, string>("tuning-enable-goal", "no"),
+           },
+           {}, "");
+    setCodecProperties("test14.encoder", true, 14, {"attribute::disabled"}, {}, {}, "video/t14",
+           {
+                   pair<string, string>("tuning-enable-goal", "no"),
+           },
+           {}, "");
+    setCodecProperties("test15.encoder", true, 15, {}, {}, {}, "video/t15",
+           {
+                   pair<string, string>("tuning-enable-goal", "yes"),
+           },
+           {}, "");
+    setCodecProperties("test16.encoder", true, 16, {}, {}, {}, "video/t16",
+           {
+                   pair<string, string>("tuning-enable-goal", "yes"),
+           },
+           {}, "");
+
     setRoleProperties("audio_decoder.mp3", false, 1, "audio/mpeg", "test1.decoder",
                       {pair<string, string>("attribute::disabled", "present"),
                        pair<string, string>("rank", "4")});
@@ -191,6 +218,22 @@
                         pair<string, string>("tuning-pi", "3.1415")
                        });
 
+    // minsdk
+    setRoleProperties("video_encoder.t12", true, 12, "video/t12", "test12.encoder",
+                       {pair<string, string>("tuning-enable-goal", "no"),
+                        pair<string, string>("attribute::disabled", "present") });
+    setRoleProperties("video_encoder.t13", true, 13, "video/t13", "test13.encoder",
+                       {pair<string, string>("tuning-enable-goal", "no"),
+                        pair<string, string>("attribute::disabled", "present") });
+    setRoleProperties("video_encoder.t14", true, 14, "video/t14", "test14.encoder",
+                       {pair<string, string>("tuning-enable-goal", "no"),
+                        pair<string, string>("attribute::disabled", "present") });
+    setRoleProperties("video_encoder.t15", true, 15, "video/t15", "test15.encoder",
+                       {pair<string, string>("tuning-enable-goal", "yes")});
+    setRoleProperties("video_encoder.t16", true, 16, "video/t16", "test16.encoder",
+                       {pair<string, string>("tuning-enable-goal", "yes")});
+
+
     setServiceAttribute(
             {pair<string, string>("domain-telephony", "0"), pair<string, string>("domain-tv", "0"),
              pair<string, string>("setting2", "0"), pair<string, string>("variant-variant1", "0")});
diff --git a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
index 8cae423..e066927 100644
--- a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
+++ b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
@@ -88,5 +88,21 @@
             <Tuning name="hungry" value="yes"/>
             <Tuning name="pi" value="3.1415"/>
         </MediaCodec>
+        <!-- test minsdk -->
+        <MediaCodec name="test12.encoder" type="video/t12" minsdk="100">
+            <Tuning name="enable-goal" value="no"/>
+        </MediaCodec>
+        <MediaCodec name="test13.encoder" type="video/t13" enabled="false" minsdk="100">
+            <Tuning name="enable-goal" value="no"/>
+        </MediaCodec>
+        <MediaCodec name="test14.encoder" type="video/t14" enabled="true" minsdk="100">
+            <Tuning name="enable-goal" value="no"/>
+        </MediaCodec>
+        <MediaCodec name="test15.encoder" type="video/t15" minsdk="34">
+            <Tuning name="enable-goal" value="yes"/>
+        </MediaCodec>
+        <MediaCodec name="test16.encoder" type="video/t16" enabled="false" minsdk="34">
+            <Tuning name="enable-goal" value="yes"/>
+        </MediaCodec>
     </Encoders>
 </Included>
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index edddaa4..19f9549 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -26,7 +26,33 @@
     ],
 }
 
-cc_binary {
+prebuilt_etc {
+    name: "mediaserver.zygote64_32.rc",
+    src: "mediaserver.zygote64_32.rc",
+    sub_dir: "init/hw",
+}
+
+prebuilt_etc {
+    name: "mediaserver.zygote64.rc",
+    src: "mediaserver.zygote64.rc",
+    sub_dir: "init/hw",
+}
+
+soong_config_module_type {
+    name: "mediaserver_cc_binary",
+    module_type: "cc_binary",
+    config_namespace: "ANDROID",
+    bool_variables: ["TARGET_DYNAMIC_64_32_MEDIASERVER"],
+    properties: [
+        "compile_multilib",
+        "init_rc",
+        "multilib.lib32.suffix",
+        "multilib.lib64.suffix",
+        "required",
+    ],
+}
+
+mediaserver_cc_binary {
     name: "mediaserver",
 
     srcs: ["main_mediaserver.cpp"],
@@ -55,12 +81,32 @@
     // ****************************************************************
     compile_multilib: "prefer32",
 
-    init_rc: ["mediaserver.rc"],
-
     cflags: [
         "-Werror",
         "-Wall",
     ],
 
     vintf_fragments: ["manifest_media_c2_software.xml"],
+
+    soong_config_variables: {
+        TARGET_DYNAMIC_64_32_MEDIASERVER: {
+            compile_multilib: "both",
+            multilib: {
+                lib32: {
+                    suffix: "32",
+                },
+                lib64: {
+                    suffix: "64",
+                },
+            },
+            required: [
+                "mediaserver.zygote64_32.rc",
+                "mediaserver.zygote64.rc",
+            ],
+            init_rc: ["mediaserver_dynamic.rc"],
+            conditions_default: {
+                init_rc: ["mediaserver.rc"],
+            },
+        },
+    },
 }
diff --git a/media/mediaserver/mediaserver.zygote64.rc b/media/mediaserver/mediaserver.zygote64.rc
new file mode 100644
index 0000000..8842b01
--- /dev/null
+++ b/media/mediaserver/mediaserver.zygote64.rc
@@ -0,0 +1,6 @@
+service media /system/bin/mediaserver64
+    class main
+    user media
+    group audio camera inet net_bt net_bt_admin net_bw_acct drmrpc mediadrm
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh HighPerformance
diff --git a/media/mediaserver/mediaserver.zygote64_32.rc b/media/mediaserver/mediaserver.zygote64_32.rc
new file mode 100644
index 0000000..4039073
--- /dev/null
+++ b/media/mediaserver/mediaserver.zygote64_32.rc
@@ -0,0 +1,6 @@
+service media /system/bin/mediaserver32
+    class main
+    user media
+    group audio camera inet net_bt net_bt_admin net_bw_acct drmrpc mediadrm
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh HighPerformance
diff --git a/media/mediaserver/mediaserver_dynamic.rc b/media/mediaserver/mediaserver_dynamic.rc
new file mode 100644
index 0000000..65d5c40
--- /dev/null
+++ b/media/mediaserver/mediaserver_dynamic.rc
@@ -0,0 +1,4 @@
+on property:init.svc.media=*
+    setprop init.svc.mediadrm ${init.svc.media}
+
+import /system/etc/init/hw/mediaserver.${ro.zygote}.rc
diff --git a/media/libstagefright/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
similarity index 97%
rename from media/libstagefright/bqhelper/Android.bp
rename to media/module/bqhelper/Android.bp
index 0e2b472..c4dadd0 100644
--- a/media/libstagefright/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -4,7 +4,6 @@
     // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
 }
 
 cc_defaults {
@@ -12,6 +11,7 @@
     double_loadable: true,
 
     srcs: [
+        ":libgui_frame_event_aidl",
         "FrameDropper.cpp",
         "GraphicBufferSource.cpp",
     ],
diff --git a/media/libstagefright/bqhelper/FrameDropper.cpp b/media/module/bqhelper/FrameDropper.cpp
similarity index 100%
rename from media/libstagefright/bqhelper/FrameDropper.cpp
rename to media/module/bqhelper/FrameDropper.cpp
diff --git a/media/libstagefright/bqhelper/GraphicBufferSource.cpp b/media/module/bqhelper/GraphicBufferSource.cpp
similarity index 99%
rename from media/libstagefright/bqhelper/GraphicBufferSource.cpp
rename to media/module/bqhelper/GraphicBufferSource.cpp
index cff14ac..3202cc5 100644
--- a/media/libstagefright/bqhelper/GraphicBufferSource.cpp
+++ b/media/module/bqhelper/GraphicBufferSource.cpp
@@ -1150,7 +1150,7 @@
         int32_t bufferCount,
         uint32_t frameWidth,
         uint32_t frameHeight,
-        uint32_t consumerUsage) {
+        uint64_t consumerUsage) {
     if (component == NULL) {
         return BAD_VALUE;
     }
diff --git a/media/libstagefright/bqhelper/TEST_MAPPING b/media/module/bqhelper/TEST_MAPPING
similarity index 100%
rename from media/libstagefright/bqhelper/TEST_MAPPING
rename to media/module/bqhelper/TEST_MAPPING
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/ComponentWrapper.h b/media/module/bqhelper/include/media/stagefright/bqhelper/ComponentWrapper.h
similarity index 100%
rename from media/libstagefright/bqhelper/include/media/stagefright/bqhelper/ComponentWrapper.h
rename to media/module/bqhelper/include/media/stagefright/bqhelper/ComponentWrapper.h
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h b/media/module/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
similarity index 100%
rename from media/libstagefright/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
rename to media/module/bqhelper/include/media/stagefright/bqhelper/FrameDropper.h
diff --git a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h b/media/module/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
similarity index 99%
rename from media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
rename to media/module/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
index fe6bcce..4e4fbfd 100644
--- a/media/libstagefright/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
+++ b/media/module/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
@@ -129,7 +129,7 @@
         int32_t bufferCount,
         uint32_t frameWidth,
         uint32_t frameHeight,
-        uint32_t consumerUsage);
+        uint64_t consumerUsage);
 
     // This is called after the last input frame has been submitted or buffer
     // timestamp is greater or equal than stopTimeUs. We need to submit an empty
diff --git a/media/libstagefright/bqhelper/tests/Android.bp b/media/module/bqhelper/tests/Android.bp
similarity index 88%
rename from media/libstagefright/bqhelper/tests/Android.bp
rename to media/module/bqhelper/tests/Android.bp
index 95953ee..3004ec4 100644
--- a/media/libstagefright/bqhelper/tests/Android.bp
+++ b/media/module/bqhelper/tests/Android.bp
@@ -4,7 +4,6 @@
     // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
 }
 
 cc_test {
diff --git a/media/libstagefright/bqhelper/tests/FrameDropper_test.cpp b/media/module/bqhelper/tests/FrameDropper_test.cpp
similarity index 100%
rename from media/libstagefright/bqhelper/tests/FrameDropper_test.cpp
rename to media/module/bqhelper/tests/FrameDropper_test.cpp
diff --git a/media/bufferpool/1.0/Accessor.cpp b/media/module/bufferpool/1.0/Accessor.cpp
similarity index 100%
rename from media/bufferpool/1.0/Accessor.cpp
rename to media/module/bufferpool/1.0/Accessor.cpp
diff --git a/media/bufferpool/1.0/Accessor.h b/media/module/bufferpool/1.0/Accessor.h
similarity index 100%
rename from media/bufferpool/1.0/Accessor.h
rename to media/module/bufferpool/1.0/Accessor.h
diff --git a/media/bufferpool/1.0/AccessorImpl.cpp b/media/module/bufferpool/1.0/AccessorImpl.cpp
similarity index 100%
rename from media/bufferpool/1.0/AccessorImpl.cpp
rename to media/module/bufferpool/1.0/AccessorImpl.cpp
diff --git a/media/bufferpool/1.0/AccessorImpl.h b/media/module/bufferpool/1.0/AccessorImpl.h
similarity index 100%
rename from media/bufferpool/1.0/AccessorImpl.h
rename to media/module/bufferpool/1.0/AccessorImpl.h
diff --git a/media/bufferpool/1.0/Android.bp b/media/module/bufferpool/1.0/Android.bp
similarity index 100%
rename from media/bufferpool/1.0/Android.bp
rename to media/module/bufferpool/1.0/Android.bp
diff --git a/media/bufferpool/1.0/BufferPoolClient.cpp b/media/module/bufferpool/1.0/BufferPoolClient.cpp
similarity index 100%
rename from media/bufferpool/1.0/BufferPoolClient.cpp
rename to media/module/bufferpool/1.0/BufferPoolClient.cpp
diff --git a/media/bufferpool/1.0/BufferPoolClient.h b/media/module/bufferpool/1.0/BufferPoolClient.h
similarity index 100%
rename from media/bufferpool/1.0/BufferPoolClient.h
rename to media/module/bufferpool/1.0/BufferPoolClient.h
diff --git a/media/bufferpool/1.0/BufferStatus.cpp b/media/module/bufferpool/1.0/BufferStatus.cpp
similarity index 100%
rename from media/bufferpool/1.0/BufferStatus.cpp
rename to media/module/bufferpool/1.0/BufferStatus.cpp
diff --git a/media/bufferpool/1.0/BufferStatus.h b/media/module/bufferpool/1.0/BufferStatus.h
similarity index 100%
rename from media/bufferpool/1.0/BufferStatus.h
rename to media/module/bufferpool/1.0/BufferStatus.h
diff --git a/media/bufferpool/1.0/ClientManager.cpp b/media/module/bufferpool/1.0/ClientManager.cpp
similarity index 100%
rename from media/bufferpool/1.0/ClientManager.cpp
rename to media/module/bufferpool/1.0/ClientManager.cpp
diff --git a/media/bufferpool/1.0/Connection.cpp b/media/module/bufferpool/1.0/Connection.cpp
similarity index 100%
rename from media/bufferpool/1.0/Connection.cpp
rename to media/module/bufferpool/1.0/Connection.cpp
diff --git a/media/bufferpool/1.0/Connection.h b/media/module/bufferpool/1.0/Connection.h
similarity index 100%
rename from media/bufferpool/1.0/Connection.h
rename to media/module/bufferpool/1.0/Connection.h
diff --git a/media/bufferpool/1.0/TEST_MAPPING b/media/module/bufferpool/1.0/TEST_MAPPING
similarity index 100%
rename from media/bufferpool/1.0/TEST_MAPPING
rename to media/module/bufferpool/1.0/TEST_MAPPING
diff --git a/media/bufferpool/1.0/include/bufferpool/BufferPoolTypes.h b/media/module/bufferpool/1.0/include/bufferpool/BufferPoolTypes.h
similarity index 100%
rename from media/bufferpool/1.0/include/bufferpool/BufferPoolTypes.h
rename to media/module/bufferpool/1.0/include/bufferpool/BufferPoolTypes.h
diff --git a/media/bufferpool/1.0/include/bufferpool/ClientManager.h b/media/module/bufferpool/1.0/include/bufferpool/ClientManager.h
similarity index 100%
rename from media/bufferpool/1.0/include/bufferpool/ClientManager.h
rename to media/module/bufferpool/1.0/include/bufferpool/ClientManager.h
diff --git a/media/bufferpool/1.0/vts/Android.bp b/media/module/bufferpool/1.0/vts/Android.bp
similarity index 100%
rename from media/bufferpool/1.0/vts/Android.bp
rename to media/module/bufferpool/1.0/vts/Android.bp
diff --git a/media/bufferpool/1.0/vts/OWNERS b/media/module/bufferpool/1.0/vts/OWNERS
similarity index 100%
rename from media/bufferpool/1.0/vts/OWNERS
rename to media/module/bufferpool/1.0/vts/OWNERS
diff --git a/media/bufferpool/1.0/vts/allocator.cpp b/media/module/bufferpool/1.0/vts/allocator.cpp
similarity index 100%
rename from media/bufferpool/1.0/vts/allocator.cpp
rename to media/module/bufferpool/1.0/vts/allocator.cpp
diff --git a/media/bufferpool/1.0/vts/allocator.h b/media/module/bufferpool/1.0/vts/allocator.h
similarity index 100%
rename from media/bufferpool/1.0/vts/allocator.h
rename to media/module/bufferpool/1.0/vts/allocator.h
diff --git a/media/bufferpool/1.0/vts/multi.cpp b/media/module/bufferpool/1.0/vts/multi.cpp
similarity index 100%
rename from media/bufferpool/1.0/vts/multi.cpp
rename to media/module/bufferpool/1.0/vts/multi.cpp
diff --git a/media/bufferpool/1.0/vts/single.cpp b/media/module/bufferpool/1.0/vts/single.cpp
similarity index 100%
rename from media/bufferpool/1.0/vts/single.cpp
rename to media/module/bufferpool/1.0/vts/single.cpp
diff --git a/media/bufferpool/2.0/Accessor.cpp b/media/module/bufferpool/2.0/Accessor.cpp
similarity index 100%
rename from media/bufferpool/2.0/Accessor.cpp
rename to media/module/bufferpool/2.0/Accessor.cpp
diff --git a/media/bufferpool/2.0/Accessor.h b/media/module/bufferpool/2.0/Accessor.h
similarity index 100%
rename from media/bufferpool/2.0/Accessor.h
rename to media/module/bufferpool/2.0/Accessor.h
diff --git a/media/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
similarity index 100%
rename from media/bufferpool/2.0/AccessorImpl.cpp
rename to media/module/bufferpool/2.0/AccessorImpl.cpp
diff --git a/media/bufferpool/2.0/AccessorImpl.h b/media/module/bufferpool/2.0/AccessorImpl.h
similarity index 100%
rename from media/bufferpool/2.0/AccessorImpl.h
rename to media/module/bufferpool/2.0/AccessorImpl.h
diff --git a/media/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
similarity index 100%
rename from media/bufferpool/2.0/Android.bp
rename to media/module/bufferpool/2.0/Android.bp
diff --git a/media/bufferpool/2.0/BufferPoolClient.cpp b/media/module/bufferpool/2.0/BufferPoolClient.cpp
similarity index 100%
rename from media/bufferpool/2.0/BufferPoolClient.cpp
rename to media/module/bufferpool/2.0/BufferPoolClient.cpp
diff --git a/media/bufferpool/2.0/BufferPoolClient.h b/media/module/bufferpool/2.0/BufferPoolClient.h
similarity index 100%
rename from media/bufferpool/2.0/BufferPoolClient.h
rename to media/module/bufferpool/2.0/BufferPoolClient.h
diff --git a/media/bufferpool/2.0/BufferStatus.cpp b/media/module/bufferpool/2.0/BufferStatus.cpp
similarity index 100%
rename from media/bufferpool/2.0/BufferStatus.cpp
rename to media/module/bufferpool/2.0/BufferStatus.cpp
diff --git a/media/bufferpool/2.0/BufferStatus.h b/media/module/bufferpool/2.0/BufferStatus.h
similarity index 100%
rename from media/bufferpool/2.0/BufferStatus.h
rename to media/module/bufferpool/2.0/BufferStatus.h
diff --git a/media/bufferpool/2.0/ClientManager.cpp b/media/module/bufferpool/2.0/ClientManager.cpp
similarity index 100%
rename from media/bufferpool/2.0/ClientManager.cpp
rename to media/module/bufferpool/2.0/ClientManager.cpp
diff --git a/media/bufferpool/2.0/Connection.cpp b/media/module/bufferpool/2.0/Connection.cpp
similarity index 100%
rename from media/bufferpool/2.0/Connection.cpp
rename to media/module/bufferpool/2.0/Connection.cpp
diff --git a/media/bufferpool/2.0/Connection.h b/media/module/bufferpool/2.0/Connection.h
similarity index 100%
rename from media/bufferpool/2.0/Connection.h
rename to media/module/bufferpool/2.0/Connection.h
diff --git a/media/bufferpool/2.0/Observer.cpp b/media/module/bufferpool/2.0/Observer.cpp
similarity index 100%
rename from media/bufferpool/2.0/Observer.cpp
rename to media/module/bufferpool/2.0/Observer.cpp
diff --git a/media/bufferpool/2.0/Observer.h b/media/module/bufferpool/2.0/Observer.h
similarity index 100%
rename from media/bufferpool/2.0/Observer.h
rename to media/module/bufferpool/2.0/Observer.h
diff --git a/media/bufferpool/2.0/TEST_MAPPING b/media/module/bufferpool/2.0/TEST_MAPPING
similarity index 100%
rename from media/bufferpool/2.0/TEST_MAPPING
rename to media/module/bufferpool/2.0/TEST_MAPPING
diff --git a/media/bufferpool/2.0/include/bufferpool/BufferPoolTypes.h b/media/module/bufferpool/2.0/include/bufferpool/BufferPoolTypes.h
similarity index 100%
rename from media/bufferpool/2.0/include/bufferpool/BufferPoolTypes.h
rename to media/module/bufferpool/2.0/include/bufferpool/BufferPoolTypes.h
diff --git a/media/bufferpool/2.0/include/bufferpool/ClientManager.h b/media/module/bufferpool/2.0/include/bufferpool/ClientManager.h
similarity index 100%
rename from media/bufferpool/2.0/include/bufferpool/ClientManager.h
rename to media/module/bufferpool/2.0/include/bufferpool/ClientManager.h
diff --git a/media/bufferpool/2.0/tests/Android.bp b/media/module/bufferpool/2.0/tests/Android.bp
similarity index 100%
rename from media/bufferpool/2.0/tests/Android.bp
rename to media/module/bufferpool/2.0/tests/Android.bp
diff --git a/media/bufferpool/2.0/tests/AndroidTest.xml b/media/module/bufferpool/2.0/tests/AndroidTest.xml
similarity index 100%
rename from media/bufferpool/2.0/tests/AndroidTest.xml
rename to media/module/bufferpool/2.0/tests/AndroidTest.xml
diff --git a/media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp b/media/module/bufferpool/2.0/tests/BufferpoolUnitTest.cpp
similarity index 100%
rename from media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp
rename to media/module/bufferpool/2.0/tests/BufferpoolUnitTest.cpp
diff --git a/media/bufferpool/2.0/tests/OWNERS b/media/module/bufferpool/2.0/tests/OWNERS
similarity index 100%
rename from media/bufferpool/2.0/tests/OWNERS
rename to media/module/bufferpool/2.0/tests/OWNERS
diff --git a/media/bufferpool/2.0/tests/README.md b/media/module/bufferpool/2.0/tests/README.md
similarity index 100%
rename from media/bufferpool/2.0/tests/README.md
rename to media/module/bufferpool/2.0/tests/README.md
diff --git a/media/bufferpool/2.0/tests/allocator.cpp b/media/module/bufferpool/2.0/tests/allocator.cpp
similarity index 100%
rename from media/bufferpool/2.0/tests/allocator.cpp
rename to media/module/bufferpool/2.0/tests/allocator.cpp
diff --git a/media/bufferpool/2.0/tests/allocator.h b/media/module/bufferpool/2.0/tests/allocator.h
similarity index 100%
rename from media/bufferpool/2.0/tests/allocator.h
rename to media/module/bufferpool/2.0/tests/allocator.h
diff --git a/media/bufferpool/2.0/tests/cond.cpp b/media/module/bufferpool/2.0/tests/cond.cpp
similarity index 100%
rename from media/bufferpool/2.0/tests/cond.cpp
rename to media/module/bufferpool/2.0/tests/cond.cpp
diff --git a/media/bufferpool/2.0/tests/multi.cpp b/media/module/bufferpool/2.0/tests/multi.cpp
similarity index 100%
rename from media/bufferpool/2.0/tests/multi.cpp
rename to media/module/bufferpool/2.0/tests/multi.cpp
diff --git a/media/bufferpool/2.0/tests/single.cpp b/media/module/bufferpool/2.0/tests/single.cpp
similarity index 100%
rename from media/bufferpool/2.0/tests/single.cpp
rename to media/module/bufferpool/2.0/tests/single.cpp
diff --git a/media/codecs/amrnb/TEST_MAPPING b/media/module/codecs/amrnb/TEST_MAPPING
similarity index 100%
rename from media/codecs/amrnb/TEST_MAPPING
rename to media/module/codecs/amrnb/TEST_MAPPING
diff --git a/media/codecs/amrnb/common/Android.bp b/media/module/codecs/amrnb/common/Android.bp
similarity index 100%
rename from media/codecs/amrnb/common/Android.bp
rename to media/module/codecs/amrnb/common/Android.bp
diff --git a/media/codecs/amrnb/common/MODULE_LICENSE_APACHE2 b/media/module/codecs/amrnb/common/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/amrnb/common/MODULE_LICENSE_APACHE2
rename to media/module/codecs/amrnb/common/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/amrnb/common/NOTICE b/media/module/codecs/amrnb/common/NOTICE
similarity index 100%
rename from media/codecs/amrnb/common/NOTICE
rename to media/module/codecs/amrnb/common/NOTICE
diff --git a/media/codecs/amrnb/common/include/abs_s.h b/media/module/codecs/amrnb/common/include/abs_s.h
similarity index 100%
rename from media/codecs/amrnb/common/include/abs_s.h
rename to media/module/codecs/amrnb/common/include/abs_s.h
diff --git a/media/codecs/amrnb/common/include/add.h b/media/module/codecs/amrnb/common/include/add.h
similarity index 100%
rename from media/codecs/amrnb/common/include/add.h
rename to media/module/codecs/amrnb/common/include/add.h
diff --git a/media/codecs/amrnb/common/include/az_lsp.h b/media/module/codecs/amrnb/common/include/az_lsp.h
similarity index 100%
rename from media/codecs/amrnb/common/include/az_lsp.h
rename to media/module/codecs/amrnb/common/include/az_lsp.h
diff --git a/media/codecs/amrnb/common/include/basic_op.h b/media/module/codecs/amrnb/common/include/basic_op.h
similarity index 100%
rename from media/codecs/amrnb/common/include/basic_op.h
rename to media/module/codecs/amrnb/common/include/basic_op.h
diff --git a/media/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h b/media/module/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
similarity index 100%
rename from media/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
rename to media/module/codecs/amrnb/common/include/basic_op_arm_gcc_v5.h
diff --git a/media/codecs/amrnb/common/include/basic_op_arm_v5.h b/media/module/codecs/amrnb/common/include/basic_op_arm_v5.h
similarity index 100%
rename from media/codecs/amrnb/common/include/basic_op_arm_v5.h
rename to media/module/codecs/amrnb/common/include/basic_op_arm_v5.h
diff --git a/media/codecs/amrnb/common/include/basic_op_c_equivalent.h b/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
similarity index 100%
rename from media/codecs/amrnb/common/include/basic_op_c_equivalent.h
rename to media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
diff --git a/media/codecs/amrnb/common/include/basicop_malloc.h b/media/module/codecs/amrnb/common/include/basicop_malloc.h
similarity index 100%
rename from media/codecs/amrnb/common/include/basicop_malloc.h
rename to media/module/codecs/amrnb/common/include/basicop_malloc.h
diff --git a/media/codecs/amrnb/common/include/bitno_tab.h b/media/module/codecs/amrnb/common/include/bitno_tab.h
similarity index 100%
rename from media/codecs/amrnb/common/include/bitno_tab.h
rename to media/module/codecs/amrnb/common/include/bitno_tab.h
diff --git a/media/codecs/amrnb/common/include/bitreorder_tab.h b/media/module/codecs/amrnb/common/include/bitreorder_tab.h
similarity index 100%
rename from media/codecs/amrnb/common/include/bitreorder_tab.h
rename to media/module/codecs/amrnb/common/include/bitreorder_tab.h
diff --git a/media/codecs/amrnb/common/include/bits2prm.h b/media/module/codecs/amrnb/common/include/bits2prm.h
similarity index 100%
rename from media/codecs/amrnb/common/include/bits2prm.h
rename to media/module/codecs/amrnb/common/include/bits2prm.h
diff --git a/media/codecs/amrnb/common/include/cnst.h b/media/module/codecs/amrnb/common/include/cnst.h
similarity index 100%
rename from media/codecs/amrnb/common/include/cnst.h
rename to media/module/codecs/amrnb/common/include/cnst.h
diff --git a/media/codecs/amrnb/common/include/cnst_vad.h b/media/module/codecs/amrnb/common/include/cnst_vad.h
similarity index 100%
rename from media/codecs/amrnb/common/include/cnst_vad.h
rename to media/module/codecs/amrnb/common/include/cnst_vad.h
diff --git a/media/codecs/amrnb/common/include/copy.h b/media/module/codecs/amrnb/common/include/copy.h
similarity index 100%
rename from media/codecs/amrnb/common/include/copy.h
rename to media/module/codecs/amrnb/common/include/copy.h
diff --git a/media/codecs/amrnb/common/include/d_gain_c.h b/media/module/codecs/amrnb/common/include/d_gain_c.h
similarity index 100%
rename from media/codecs/amrnb/common/include/d_gain_c.h
rename to media/module/codecs/amrnb/common/include/d_gain_c.h
diff --git a/media/codecs/amrnb/common/include/d_gain_p.h b/media/module/codecs/amrnb/common/include/d_gain_p.h
similarity index 100%
rename from media/codecs/amrnb/common/include/d_gain_p.h
rename to media/module/codecs/amrnb/common/include/d_gain_p.h
diff --git a/media/codecs/amrnb/common/include/d_plsf.h b/media/module/codecs/amrnb/common/include/d_plsf.h
similarity index 100%
rename from media/codecs/amrnb/common/include/d_plsf.h
rename to media/module/codecs/amrnb/common/include/d_plsf.h
diff --git a/media/codecs/amrnb/common/include/div_32.h b/media/module/codecs/amrnb/common/include/div_32.h
similarity index 100%
rename from media/codecs/amrnb/common/include/div_32.h
rename to media/module/codecs/amrnb/common/include/div_32.h
diff --git a/media/codecs/amrnb/common/include/div_s.h b/media/module/codecs/amrnb/common/include/div_s.h
similarity index 100%
rename from media/codecs/amrnb/common/include/div_s.h
rename to media/module/codecs/amrnb/common/include/div_s.h
diff --git a/media/codecs/amrnb/common/include/dtx_common_def.h b/media/module/codecs/amrnb/common/include/dtx_common_def.h
similarity index 100%
rename from media/codecs/amrnb/common/include/dtx_common_def.h
rename to media/module/codecs/amrnb/common/include/dtx_common_def.h
diff --git a/media/codecs/amrnb/common/include/extract_h.h b/media/module/codecs/amrnb/common/include/extract_h.h
similarity index 100%
rename from media/codecs/amrnb/common/include/extract_h.h
rename to media/module/codecs/amrnb/common/include/extract_h.h
diff --git a/media/codecs/amrnb/common/include/extract_l.h b/media/module/codecs/amrnb/common/include/extract_l.h
similarity index 100%
rename from media/codecs/amrnb/common/include/extract_l.h
rename to media/module/codecs/amrnb/common/include/extract_l.h
diff --git a/media/codecs/amrnb/common/include/frame.h b/media/module/codecs/amrnb/common/include/frame.h
similarity index 100%
rename from media/codecs/amrnb/common/include/frame.h
rename to media/module/codecs/amrnb/common/include/frame.h
diff --git a/media/codecs/amrnb/common/include/frame_type_3gpp.h b/media/module/codecs/amrnb/common/include/frame_type_3gpp.h
similarity index 100%
rename from media/codecs/amrnb/common/include/frame_type_3gpp.h
rename to media/module/codecs/amrnb/common/include/frame_type_3gpp.h
diff --git a/media/codecs/amrnb/common/include/gc_pred.h b/media/module/codecs/amrnb/common/include/gc_pred.h
similarity index 100%
rename from media/codecs/amrnb/common/include/gc_pred.h
rename to media/module/codecs/amrnb/common/include/gc_pred.h
diff --git a/media/codecs/amrnb/common/include/gmed_n.h b/media/module/codecs/amrnb/common/include/gmed_n.h
similarity index 100%
rename from media/codecs/amrnb/common/include/gmed_n.h
rename to media/module/codecs/amrnb/common/include/gmed_n.h
diff --git a/media/codecs/amrnb/common/include/gsm_amr_typedefs.h b/media/module/codecs/amrnb/common/include/gsm_amr_typedefs.h
similarity index 100%
rename from media/codecs/amrnb/common/include/gsm_amr_typedefs.h
rename to media/module/codecs/amrnb/common/include/gsm_amr_typedefs.h
diff --git a/media/codecs/amrnb/common/include/int_lpc.h b/media/module/codecs/amrnb/common/include/int_lpc.h
similarity index 100%
rename from media/codecs/amrnb/common/include/int_lpc.h
rename to media/module/codecs/amrnb/common/include/int_lpc.h
diff --git a/media/codecs/amrnb/common/include/int_lsf.h b/media/module/codecs/amrnb/common/include/int_lsf.h
similarity index 100%
rename from media/codecs/amrnb/common/include/int_lsf.h
rename to media/module/codecs/amrnb/common/include/int_lsf.h
diff --git a/media/codecs/amrnb/common/include/inv_sqrt.h b/media/module/codecs/amrnb/common/include/inv_sqrt.h
similarity index 100%
rename from media/codecs/amrnb/common/include/inv_sqrt.h
rename to media/module/codecs/amrnb/common/include/inv_sqrt.h
diff --git a/media/codecs/amrnb/common/include/l_abs.h b/media/module/codecs/amrnb/common/include/l_abs.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_abs.h
rename to media/module/codecs/amrnb/common/include/l_abs.h
diff --git a/media/codecs/amrnb/common/include/l_add.h b/media/module/codecs/amrnb/common/include/l_add.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_add.h
rename to media/module/codecs/amrnb/common/include/l_add.h
diff --git a/media/codecs/amrnb/common/include/l_add_c.h b/media/module/codecs/amrnb/common/include/l_add_c.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_add_c.h
rename to media/module/codecs/amrnb/common/include/l_add_c.h
diff --git a/media/codecs/amrnb/common/include/l_comp.h b/media/module/codecs/amrnb/common/include/l_comp.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_comp.h
rename to media/module/codecs/amrnb/common/include/l_comp.h
diff --git a/media/codecs/amrnb/common/include/l_deposit_h.h b/media/module/codecs/amrnb/common/include/l_deposit_h.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_deposit_h.h
rename to media/module/codecs/amrnb/common/include/l_deposit_h.h
diff --git a/media/codecs/amrnb/common/include/l_deposit_l.h b/media/module/codecs/amrnb/common/include/l_deposit_l.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_deposit_l.h
rename to media/module/codecs/amrnb/common/include/l_deposit_l.h
diff --git a/media/codecs/amrnb/common/include/l_extract.h b/media/module/codecs/amrnb/common/include/l_extract.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_extract.h
rename to media/module/codecs/amrnb/common/include/l_extract.h
diff --git a/media/codecs/amrnb/common/include/l_mac.h b/media/module/codecs/amrnb/common/include/l_mac.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_mac.h
rename to media/module/codecs/amrnb/common/include/l_mac.h
diff --git a/media/codecs/amrnb/common/include/l_msu.h b/media/module/codecs/amrnb/common/include/l_msu.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_msu.h
rename to media/module/codecs/amrnb/common/include/l_msu.h
diff --git a/media/codecs/amrnb/common/include/l_mult.h b/media/module/codecs/amrnb/common/include/l_mult.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_mult.h
rename to media/module/codecs/amrnb/common/include/l_mult.h
diff --git a/media/codecs/amrnb/common/include/l_negate.h b/media/module/codecs/amrnb/common/include/l_negate.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_negate.h
rename to media/module/codecs/amrnb/common/include/l_negate.h
diff --git a/media/codecs/amrnb/common/include/l_shl.h b/media/module/codecs/amrnb/common/include/l_shl.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_shl.h
rename to media/module/codecs/amrnb/common/include/l_shl.h
diff --git a/media/codecs/amrnb/common/include/l_shr.h b/media/module/codecs/amrnb/common/include/l_shr.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_shr.h
rename to media/module/codecs/amrnb/common/include/l_shr.h
diff --git a/media/codecs/amrnb/common/include/l_shr_r.h b/media/module/codecs/amrnb/common/include/l_shr_r.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_shr_r.h
rename to media/module/codecs/amrnb/common/include/l_shr_r.h
diff --git a/media/codecs/amrnb/common/include/l_sub.h b/media/module/codecs/amrnb/common/include/l_sub.h
similarity index 100%
rename from media/codecs/amrnb/common/include/l_sub.h
rename to media/module/codecs/amrnb/common/include/l_sub.h
diff --git a/media/codecs/amrnb/common/include/log2.h b/media/module/codecs/amrnb/common/include/log2.h
similarity index 100%
rename from media/codecs/amrnb/common/include/log2.h
rename to media/module/codecs/amrnb/common/include/log2.h
diff --git a/media/codecs/amrnb/common/include/log2_norm.h b/media/module/codecs/amrnb/common/include/log2_norm.h
similarity index 100%
rename from media/codecs/amrnb/common/include/log2_norm.h
rename to media/module/codecs/amrnb/common/include/log2_norm.h
diff --git a/media/codecs/amrnb/common/include/lsfwt.h b/media/module/codecs/amrnb/common/include/lsfwt.h
similarity index 100%
rename from media/codecs/amrnb/common/include/lsfwt.h
rename to media/module/codecs/amrnb/common/include/lsfwt.h
diff --git a/media/codecs/amrnb/common/include/lsp.h b/media/module/codecs/amrnb/common/include/lsp.h
similarity index 100%
rename from media/codecs/amrnb/common/include/lsp.h
rename to media/module/codecs/amrnb/common/include/lsp.h
diff --git a/media/codecs/amrnb/common/include/lsp_az.h b/media/module/codecs/amrnb/common/include/lsp_az.h
similarity index 100%
rename from media/codecs/amrnb/common/include/lsp_az.h
rename to media/module/codecs/amrnb/common/include/lsp_az.h
diff --git a/media/codecs/amrnb/common/include/lsp_lsf.h b/media/module/codecs/amrnb/common/include/lsp_lsf.h
similarity index 100%
rename from media/codecs/amrnb/common/include/lsp_lsf.h
rename to media/module/codecs/amrnb/common/include/lsp_lsf.h
diff --git a/media/codecs/amrnb/common/include/lsp_tab.h b/media/module/codecs/amrnb/common/include/lsp_tab.h
similarity index 100%
rename from media/codecs/amrnb/common/include/lsp_tab.h
rename to media/module/codecs/amrnb/common/include/lsp_tab.h
diff --git a/media/codecs/amrnb/common/include/mac_32.h b/media/module/codecs/amrnb/common/include/mac_32.h
similarity index 100%
rename from media/codecs/amrnb/common/include/mac_32.h
rename to media/module/codecs/amrnb/common/include/mac_32.h
diff --git a/media/codecs/amrnb/common/include/mode.h b/media/module/codecs/amrnb/common/include/mode.h
similarity index 100%
rename from media/codecs/amrnb/common/include/mode.h
rename to media/module/codecs/amrnb/common/include/mode.h
diff --git a/media/codecs/amrnb/common/include/mpy_32.h b/media/module/codecs/amrnb/common/include/mpy_32.h
similarity index 100%
rename from media/codecs/amrnb/common/include/mpy_32.h
rename to media/module/codecs/amrnb/common/include/mpy_32.h
diff --git a/media/codecs/amrnb/common/include/mpy_32_16.h b/media/module/codecs/amrnb/common/include/mpy_32_16.h
similarity index 100%
rename from media/codecs/amrnb/common/include/mpy_32_16.h
rename to media/module/codecs/amrnb/common/include/mpy_32_16.h
diff --git a/media/codecs/amrnb/common/include/mult.h b/media/module/codecs/amrnb/common/include/mult.h
similarity index 100%
rename from media/codecs/amrnb/common/include/mult.h
rename to media/module/codecs/amrnb/common/include/mult.h
diff --git a/media/codecs/amrnb/common/include/mult_r.h b/media/module/codecs/amrnb/common/include/mult_r.h
similarity index 100%
rename from media/codecs/amrnb/common/include/mult_r.h
rename to media/module/codecs/amrnb/common/include/mult_r.h
diff --git a/media/codecs/amrnb/common/include/n_proc.h b/media/module/codecs/amrnb/common/include/n_proc.h
similarity index 100%
rename from media/codecs/amrnb/common/include/n_proc.h
rename to media/module/codecs/amrnb/common/include/n_proc.h
diff --git a/media/codecs/amrnb/common/include/negate.h b/media/module/codecs/amrnb/common/include/negate.h
similarity index 100%
rename from media/codecs/amrnb/common/include/negate.h
rename to media/module/codecs/amrnb/common/include/negate.h
diff --git a/media/codecs/amrnb/common/include/norm_l.h b/media/module/codecs/amrnb/common/include/norm_l.h
similarity index 100%
rename from media/codecs/amrnb/common/include/norm_l.h
rename to media/module/codecs/amrnb/common/include/norm_l.h
diff --git a/media/codecs/amrnb/common/include/norm_s.h b/media/module/codecs/amrnb/common/include/norm_s.h
similarity index 100%
rename from media/codecs/amrnb/common/include/norm_s.h
rename to media/module/codecs/amrnb/common/include/norm_s.h
diff --git a/media/codecs/amrnb/common/include/oper_32b.h b/media/module/codecs/amrnb/common/include/oper_32b.h
similarity index 100%
rename from media/codecs/amrnb/common/include/oper_32b.h
rename to media/module/codecs/amrnb/common/include/oper_32b.h
diff --git a/media/codecs/amrnb/common/include/p_ol_wgh.h b/media/module/codecs/amrnb/common/include/p_ol_wgh.h
similarity index 100%
rename from media/codecs/amrnb/common/include/p_ol_wgh.h
rename to media/module/codecs/amrnb/common/include/p_ol_wgh.h
diff --git a/media/codecs/amrnb/common/include/pow2.h b/media/module/codecs/amrnb/common/include/pow2.h
similarity index 100%
rename from media/codecs/amrnb/common/include/pow2.h
rename to media/module/codecs/amrnb/common/include/pow2.h
diff --git a/media/codecs/amrnb/common/include/pred_lt.h b/media/module/codecs/amrnb/common/include/pred_lt.h
similarity index 100%
rename from media/codecs/amrnb/common/include/pred_lt.h
rename to media/module/codecs/amrnb/common/include/pred_lt.h
diff --git a/media/codecs/amrnb/common/include/q_plsf.h b/media/module/codecs/amrnb/common/include/q_plsf.h
similarity index 100%
rename from media/codecs/amrnb/common/include/q_plsf.h
rename to media/module/codecs/amrnb/common/include/q_plsf.h
diff --git a/media/codecs/amrnb/common/include/q_plsf_3_tbl.h b/media/module/codecs/amrnb/common/include/q_plsf_3_tbl.h
similarity index 100%
rename from media/codecs/amrnb/common/include/q_plsf_3_tbl.h
rename to media/module/codecs/amrnb/common/include/q_plsf_3_tbl.h
diff --git a/media/codecs/amrnb/common/include/q_plsf_5_tbl.h b/media/module/codecs/amrnb/common/include/q_plsf_5_tbl.h
similarity index 100%
rename from media/codecs/amrnb/common/include/q_plsf_5_tbl.h
rename to media/module/codecs/amrnb/common/include/q_plsf_5_tbl.h
diff --git a/media/codecs/amrnb/common/include/qgain475_tab.h b/media/module/codecs/amrnb/common/include/qgain475_tab.h
similarity index 100%
rename from media/codecs/amrnb/common/include/qgain475_tab.h
rename to media/module/codecs/amrnb/common/include/qgain475_tab.h
diff --git a/media/codecs/amrnb/common/include/qua_gain.h b/media/module/codecs/amrnb/common/include/qua_gain.h
similarity index 100%
rename from media/codecs/amrnb/common/include/qua_gain.h
rename to media/module/codecs/amrnb/common/include/qua_gain.h
diff --git a/media/codecs/amrnb/common/include/qua_gain_tbl.h b/media/module/codecs/amrnb/common/include/qua_gain_tbl.h
similarity index 100%
rename from media/codecs/amrnb/common/include/qua_gain_tbl.h
rename to media/module/codecs/amrnb/common/include/qua_gain_tbl.h
diff --git a/media/codecs/amrnb/common/include/reorder.h b/media/module/codecs/amrnb/common/include/reorder.h
similarity index 100%
rename from media/codecs/amrnb/common/include/reorder.h
rename to media/module/codecs/amrnb/common/include/reorder.h
diff --git a/media/codecs/amrnb/common/include/residu.h b/media/module/codecs/amrnb/common/include/residu.h
similarity index 100%
rename from media/codecs/amrnb/common/include/residu.h
rename to media/module/codecs/amrnb/common/include/residu.h
diff --git a/media/codecs/amrnb/common/include/reverse_bits.h b/media/module/codecs/amrnb/common/include/reverse_bits.h
similarity index 100%
rename from media/codecs/amrnb/common/include/reverse_bits.h
rename to media/module/codecs/amrnb/common/include/reverse_bits.h
diff --git a/media/codecs/amrnb/common/include/round.h b/media/module/codecs/amrnb/common/include/round.h
similarity index 100%
rename from media/codecs/amrnb/common/include/round.h
rename to media/module/codecs/amrnb/common/include/round.h
diff --git a/media/codecs/amrnb/common/include/set_zero.h b/media/module/codecs/amrnb/common/include/set_zero.h
similarity index 100%
rename from media/codecs/amrnb/common/include/set_zero.h
rename to media/module/codecs/amrnb/common/include/set_zero.h
diff --git a/media/codecs/amrnb/common/include/shl.h b/media/module/codecs/amrnb/common/include/shl.h
similarity index 100%
rename from media/codecs/amrnb/common/include/shl.h
rename to media/module/codecs/amrnb/common/include/shl.h
diff --git a/media/codecs/amrnb/common/include/shr.h b/media/module/codecs/amrnb/common/include/shr.h
similarity index 100%
rename from media/codecs/amrnb/common/include/shr.h
rename to media/module/codecs/amrnb/common/include/shr.h
diff --git a/media/codecs/amrnb/common/include/shr_r.h b/media/module/codecs/amrnb/common/include/shr_r.h
similarity index 100%
rename from media/codecs/amrnb/common/include/shr_r.h
rename to media/module/codecs/amrnb/common/include/shr_r.h
diff --git a/media/codecs/amrnb/common/include/sqrt_l.h b/media/module/codecs/amrnb/common/include/sqrt_l.h
similarity index 100%
rename from media/codecs/amrnb/common/include/sqrt_l.h
rename to media/module/codecs/amrnb/common/include/sqrt_l.h
diff --git a/media/codecs/amrnb/common/include/sub.h b/media/module/codecs/amrnb/common/include/sub.h
similarity index 100%
rename from media/codecs/amrnb/common/include/sub.h
rename to media/module/codecs/amrnb/common/include/sub.h
diff --git a/media/codecs/amrnb/common/include/syn_filt.h b/media/module/codecs/amrnb/common/include/syn_filt.h
similarity index 100%
rename from media/codecs/amrnb/common/include/syn_filt.h
rename to media/module/codecs/amrnb/common/include/syn_filt.h
diff --git a/media/codecs/amrnb/common/include/typedef.h b/media/module/codecs/amrnb/common/include/typedef.h
similarity index 100%
rename from media/codecs/amrnb/common/include/typedef.h
rename to media/module/codecs/amrnb/common/include/typedef.h
diff --git a/media/codecs/amrnb/common/include/vad.h b/media/module/codecs/amrnb/common/include/vad.h
similarity index 100%
rename from media/codecs/amrnb/common/include/vad.h
rename to media/module/codecs/amrnb/common/include/vad.h
diff --git a/media/codecs/amrnb/common/include/vad1.h b/media/module/codecs/amrnb/common/include/vad1.h
similarity index 100%
rename from media/codecs/amrnb/common/include/vad1.h
rename to media/module/codecs/amrnb/common/include/vad1.h
diff --git a/media/codecs/amrnb/common/include/vad2.h b/media/module/codecs/amrnb/common/include/vad2.h
similarity index 100%
rename from media/codecs/amrnb/common/include/vad2.h
rename to media/module/codecs/amrnb/common/include/vad2.h
diff --git a/media/codecs/amrnb/common/include/weight_a.h b/media/module/codecs/amrnb/common/include/weight_a.h
similarity index 100%
rename from media/codecs/amrnb/common/include/weight_a.h
rename to media/module/codecs/amrnb/common/include/weight_a.h
diff --git a/media/codecs/amrnb/common/include/window_tab.h b/media/module/codecs/amrnb/common/include/window_tab.h
similarity index 100%
rename from media/codecs/amrnb/common/include/window_tab.h
rename to media/module/codecs/amrnb/common/include/window_tab.h
diff --git a/media/codecs/amrnb/common/include/wmf_to_ets.h b/media/module/codecs/amrnb/common/include/wmf_to_ets.h
similarity index 100%
rename from media/codecs/amrnb/common/include/wmf_to_ets.h
rename to media/module/codecs/amrnb/common/include/wmf_to_ets.h
diff --git a/media/codecs/amrnb/common/src/add.cpp b/media/module/codecs/amrnb/common/src/add.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/add.cpp
rename to media/module/codecs/amrnb/common/src/add.cpp
diff --git a/media/codecs/amrnb/common/src/az_lsp.cpp b/media/module/codecs/amrnb/common/src/az_lsp.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/az_lsp.cpp
rename to media/module/codecs/amrnb/common/src/az_lsp.cpp
diff --git a/media/codecs/amrnb/common/src/bitno_tab.cpp b/media/module/codecs/amrnb/common/src/bitno_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/bitno_tab.cpp
rename to media/module/codecs/amrnb/common/src/bitno_tab.cpp
diff --git a/media/codecs/amrnb/common/src/bitreorder_tab.cpp b/media/module/codecs/amrnb/common/src/bitreorder_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/bitreorder_tab.cpp
rename to media/module/codecs/amrnb/common/src/bitreorder_tab.cpp
diff --git a/media/codecs/amrnb/common/src/bits2prm.cpp b/media/module/codecs/amrnb/common/src/bits2prm.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/bits2prm.cpp
rename to media/module/codecs/amrnb/common/src/bits2prm.cpp
diff --git a/media/codecs/amrnb/common/src/c2_9pf_tab.cpp b/media/module/codecs/amrnb/common/src/c2_9pf_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/c2_9pf_tab.cpp
rename to media/module/codecs/amrnb/common/src/c2_9pf_tab.cpp
diff --git a/media/codecs/amrnb/common/src/copy.cpp b/media/module/codecs/amrnb/common/src/copy.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/copy.cpp
rename to media/module/codecs/amrnb/common/src/copy.cpp
diff --git a/media/codecs/amrnb/common/src/div_32.cpp b/media/module/codecs/amrnb/common/src/div_32.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/div_32.cpp
rename to media/module/codecs/amrnb/common/src/div_32.cpp
diff --git a/media/codecs/amrnb/common/src/div_s.cpp b/media/module/codecs/amrnb/common/src/div_s.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/div_s.cpp
rename to media/module/codecs/amrnb/common/src/div_s.cpp
diff --git a/media/codecs/amrnb/common/src/extract_h.cpp b/media/module/codecs/amrnb/common/src/extract_h.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/extract_h.cpp
rename to media/module/codecs/amrnb/common/src/extract_h.cpp
diff --git a/media/codecs/amrnb/common/src/extract_l.cpp b/media/module/codecs/amrnb/common/src/extract_l.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/extract_l.cpp
rename to media/module/codecs/amrnb/common/src/extract_l.cpp
diff --git a/media/codecs/amrnb/common/src/gains_tbl.cpp b/media/module/codecs/amrnb/common/src/gains_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/gains_tbl.cpp
rename to media/module/codecs/amrnb/common/src/gains_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/gc_pred.cpp b/media/module/codecs/amrnb/common/src/gc_pred.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/gc_pred.cpp
rename to media/module/codecs/amrnb/common/src/gc_pred.cpp
diff --git a/media/codecs/amrnb/common/src/gmed_n.cpp b/media/module/codecs/amrnb/common/src/gmed_n.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/gmed_n.cpp
rename to media/module/codecs/amrnb/common/src/gmed_n.cpp
diff --git a/media/codecs/amrnb/common/src/gray_tbl.cpp b/media/module/codecs/amrnb/common/src/gray_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/gray_tbl.cpp
rename to media/module/codecs/amrnb/common/src/gray_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/grid_tbl.cpp b/media/module/codecs/amrnb/common/src/grid_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/grid_tbl.cpp
rename to media/module/codecs/amrnb/common/src/grid_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/int_lpc.cpp b/media/module/codecs/amrnb/common/src/int_lpc.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/int_lpc.cpp
rename to media/module/codecs/amrnb/common/src/int_lpc.cpp
diff --git a/media/codecs/amrnb/common/src/inv_sqrt.cpp b/media/module/codecs/amrnb/common/src/inv_sqrt.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/inv_sqrt.cpp
rename to media/module/codecs/amrnb/common/src/inv_sqrt.cpp
diff --git a/media/codecs/amrnb/common/src/inv_sqrt_tbl.cpp b/media/module/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
rename to media/module/codecs/amrnb/common/src/inv_sqrt_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/l_abs.cpp b/media/module/codecs/amrnb/common/src/l_abs.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/l_abs.cpp
rename to media/module/codecs/amrnb/common/src/l_abs.cpp
diff --git a/media/codecs/amrnb/common/src/l_deposit_h.cpp b/media/module/codecs/amrnb/common/src/l_deposit_h.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/l_deposit_h.cpp
rename to media/module/codecs/amrnb/common/src/l_deposit_h.cpp
diff --git a/media/codecs/amrnb/common/src/l_deposit_l.cpp b/media/module/codecs/amrnb/common/src/l_deposit_l.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/l_deposit_l.cpp
rename to media/module/codecs/amrnb/common/src/l_deposit_l.cpp
diff --git a/media/codecs/amrnb/common/src/l_shr_r.cpp b/media/module/codecs/amrnb/common/src/l_shr_r.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/l_shr_r.cpp
rename to media/module/codecs/amrnb/common/src/l_shr_r.cpp
diff --git a/media/codecs/amrnb/common/src/log2.cpp b/media/module/codecs/amrnb/common/src/log2.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/log2.cpp
rename to media/module/codecs/amrnb/common/src/log2.cpp
diff --git a/media/codecs/amrnb/common/src/log2_norm.cpp b/media/module/codecs/amrnb/common/src/log2_norm.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/log2_norm.cpp
rename to media/module/codecs/amrnb/common/src/log2_norm.cpp
diff --git a/media/codecs/amrnb/common/src/log2_tbl.cpp b/media/module/codecs/amrnb/common/src/log2_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/log2_tbl.cpp
rename to media/module/codecs/amrnb/common/src/log2_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/lsfwt.cpp b/media/module/codecs/amrnb/common/src/lsfwt.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/lsfwt.cpp
rename to media/module/codecs/amrnb/common/src/lsfwt.cpp
diff --git a/media/codecs/amrnb/common/src/lsp.cpp b/media/module/codecs/amrnb/common/src/lsp.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/lsp.cpp
rename to media/module/codecs/amrnb/common/src/lsp.cpp
diff --git a/media/codecs/amrnb/common/src/lsp_az.cpp b/media/module/codecs/amrnb/common/src/lsp_az.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/lsp_az.cpp
rename to media/module/codecs/amrnb/common/src/lsp_az.cpp
diff --git a/media/codecs/amrnb/common/src/lsp_lsf.cpp b/media/module/codecs/amrnb/common/src/lsp_lsf.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/lsp_lsf.cpp
rename to media/module/codecs/amrnb/common/src/lsp_lsf.cpp
diff --git a/media/codecs/amrnb/common/src/lsp_lsf_tbl.cpp b/media/module/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
rename to media/module/codecs/amrnb/common/src/lsp_lsf_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/lsp_tab.cpp b/media/module/codecs/amrnb/common/src/lsp_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/lsp_tab.cpp
rename to media/module/codecs/amrnb/common/src/lsp_tab.cpp
diff --git a/media/codecs/amrnb/common/src/mult_r.cpp b/media/module/codecs/amrnb/common/src/mult_r.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/mult_r.cpp
rename to media/module/codecs/amrnb/common/src/mult_r.cpp
diff --git a/media/codecs/amrnb/common/src/negate.cpp b/media/module/codecs/amrnb/common/src/negate.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/negate.cpp
rename to media/module/codecs/amrnb/common/src/negate.cpp
diff --git a/media/codecs/amrnb/common/src/norm_l.cpp b/media/module/codecs/amrnb/common/src/norm_l.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/norm_l.cpp
rename to media/module/codecs/amrnb/common/src/norm_l.cpp
diff --git a/media/codecs/amrnb/common/src/norm_s.cpp b/media/module/codecs/amrnb/common/src/norm_s.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/norm_s.cpp
rename to media/module/codecs/amrnb/common/src/norm_s.cpp
diff --git a/media/codecs/amrnb/common/src/ph_disp_tab.cpp b/media/module/codecs/amrnb/common/src/ph_disp_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/ph_disp_tab.cpp
rename to media/module/codecs/amrnb/common/src/ph_disp_tab.cpp
diff --git a/media/codecs/amrnb/common/src/pow2.cpp b/media/module/codecs/amrnb/common/src/pow2.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/pow2.cpp
rename to media/module/codecs/amrnb/common/src/pow2.cpp
diff --git a/media/codecs/amrnb/common/src/pow2_tbl.cpp b/media/module/codecs/amrnb/common/src/pow2_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/pow2_tbl.cpp
rename to media/module/codecs/amrnb/common/src/pow2_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/pred_lt.cpp b/media/module/codecs/amrnb/common/src/pred_lt.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/pred_lt.cpp
rename to media/module/codecs/amrnb/common/src/pred_lt.cpp
diff --git a/media/codecs/amrnb/common/src/q_plsf.cpp b/media/module/codecs/amrnb/common/src/q_plsf.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/q_plsf.cpp
rename to media/module/codecs/amrnb/common/src/q_plsf.cpp
diff --git a/media/codecs/amrnb/common/src/q_plsf_3.cpp b/media/module/codecs/amrnb/common/src/q_plsf_3.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/q_plsf_3.cpp
rename to media/module/codecs/amrnb/common/src/q_plsf_3.cpp
diff --git a/media/codecs/amrnb/common/src/q_plsf_3_tbl.cpp b/media/module/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
rename to media/module/codecs/amrnb/common/src/q_plsf_3_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/q_plsf_5.cpp b/media/module/codecs/amrnb/common/src/q_plsf_5.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/q_plsf_5.cpp
rename to media/module/codecs/amrnb/common/src/q_plsf_5.cpp
diff --git a/media/codecs/amrnb/common/src/q_plsf_5_tbl.cpp b/media/module/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
rename to media/module/codecs/amrnb/common/src/q_plsf_5_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/qua_gain_tbl.cpp b/media/module/codecs/amrnb/common/src/qua_gain_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/qua_gain_tbl.cpp
rename to media/module/codecs/amrnb/common/src/qua_gain_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/reorder.cpp b/media/module/codecs/amrnb/common/src/reorder.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/reorder.cpp
rename to media/module/codecs/amrnb/common/src/reorder.cpp
diff --git a/media/codecs/amrnb/common/src/residu.cpp b/media/module/codecs/amrnb/common/src/residu.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/residu.cpp
rename to media/module/codecs/amrnb/common/src/residu.cpp
diff --git a/media/codecs/amrnb/common/src/round.cpp b/media/module/codecs/amrnb/common/src/round.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/round.cpp
rename to media/module/codecs/amrnb/common/src/round.cpp
diff --git a/media/codecs/amrnb/common/src/set_zero.cpp b/media/module/codecs/amrnb/common/src/set_zero.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/set_zero.cpp
rename to media/module/codecs/amrnb/common/src/set_zero.cpp
diff --git a/media/codecs/amrnb/common/src/shr.cpp b/media/module/codecs/amrnb/common/src/shr.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/shr.cpp
rename to media/module/codecs/amrnb/common/src/shr.cpp
diff --git a/media/codecs/amrnb/common/src/shr_r.cpp b/media/module/codecs/amrnb/common/src/shr_r.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/shr_r.cpp
rename to media/module/codecs/amrnb/common/src/shr_r.cpp
diff --git a/media/codecs/amrnb/common/src/sqrt_l.cpp b/media/module/codecs/amrnb/common/src/sqrt_l.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/sqrt_l.cpp
rename to media/module/codecs/amrnb/common/src/sqrt_l.cpp
diff --git a/media/codecs/amrnb/common/src/sqrt_l_tbl.cpp b/media/module/codecs/amrnb/common/src/sqrt_l_tbl.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/sqrt_l_tbl.cpp
rename to media/module/codecs/amrnb/common/src/sqrt_l_tbl.cpp
diff --git a/media/codecs/amrnb/common/src/sub.cpp b/media/module/codecs/amrnb/common/src/sub.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/sub.cpp
rename to media/module/codecs/amrnb/common/src/sub.cpp
diff --git a/media/codecs/amrnb/common/src/syn_filt.cpp b/media/module/codecs/amrnb/common/src/syn_filt.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/syn_filt.cpp
rename to media/module/codecs/amrnb/common/src/syn_filt.cpp
diff --git a/media/codecs/amrnb/common/src/vad1.cpp b/media/module/codecs/amrnb/common/src/vad1.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/vad1.cpp
rename to media/module/codecs/amrnb/common/src/vad1.cpp
diff --git a/media/codecs/amrnb/common/src/weight_a.cpp b/media/module/codecs/amrnb/common/src/weight_a.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/weight_a.cpp
rename to media/module/codecs/amrnb/common/src/weight_a.cpp
diff --git a/media/codecs/amrnb/common/src/window_tab.cpp b/media/module/codecs/amrnb/common/src/window_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/common/src/window_tab.cpp
rename to media/module/codecs/amrnb/common/src/window_tab.cpp
diff --git a/media/codecs/amrnb/dec/Android.bp b/media/module/codecs/amrnb/dec/Android.bp
similarity index 100%
rename from media/codecs/amrnb/dec/Android.bp
rename to media/module/codecs/amrnb/dec/Android.bp
diff --git a/media/codecs/amrnb/dec/MODULE_LICENSE_APACHE2 b/media/module/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
rename to media/module/codecs/amrnb/dec/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/amrnb/dec/NOTICE b/media/module/codecs/amrnb/dec/NOTICE
similarity index 100%
rename from media/codecs/amrnb/dec/NOTICE
rename to media/module/codecs/amrnb/dec/NOTICE
diff --git a/media/codecs/amrnb/dec/src/a_refl.cpp b/media/module/codecs/amrnb/dec/src/a_refl.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/a_refl.cpp
rename to media/module/codecs/amrnb/dec/src/a_refl.cpp
diff --git a/media/codecs/amrnb/dec/src/a_refl.h b/media/module/codecs/amrnb/dec/src/a_refl.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/a_refl.h
rename to media/module/codecs/amrnb/dec/src/a_refl.h
diff --git a/media/codecs/amrnb/dec/src/agc.cpp b/media/module/codecs/amrnb/dec/src/agc.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/agc.cpp
rename to media/module/codecs/amrnb/dec/src/agc.cpp
diff --git a/media/codecs/amrnb/dec/src/agc.h b/media/module/codecs/amrnb/dec/src/agc.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/agc.h
rename to media/module/codecs/amrnb/dec/src/agc.h
diff --git a/media/codecs/amrnb/dec/src/amrdecode.cpp b/media/module/codecs/amrnb/dec/src/amrdecode.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/amrdecode.cpp
rename to media/module/codecs/amrnb/dec/src/amrdecode.cpp
diff --git a/media/codecs/amrnb/dec/src/amrdecode.h b/media/module/codecs/amrnb/dec/src/amrdecode.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/amrdecode.h
rename to media/module/codecs/amrnb/dec/src/amrdecode.h
diff --git a/media/codecs/amrnb/dec/src/b_cn_cod.cpp b/media/module/codecs/amrnb/dec/src/b_cn_cod.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/b_cn_cod.cpp
rename to media/module/codecs/amrnb/dec/src/b_cn_cod.cpp
diff --git a/media/codecs/amrnb/dec/src/b_cn_cod.h b/media/module/codecs/amrnb/dec/src/b_cn_cod.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/b_cn_cod.h
rename to media/module/codecs/amrnb/dec/src/b_cn_cod.h
diff --git a/media/codecs/amrnb/dec/src/bgnscd.cpp b/media/module/codecs/amrnb/dec/src/bgnscd.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/bgnscd.cpp
rename to media/module/codecs/amrnb/dec/src/bgnscd.cpp
diff --git a/media/codecs/amrnb/dec/src/bgnscd.h b/media/module/codecs/amrnb/dec/src/bgnscd.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/bgnscd.h
rename to media/module/codecs/amrnb/dec/src/bgnscd.h
diff --git a/media/codecs/amrnb/dec/src/c_g_aver.cpp b/media/module/codecs/amrnb/dec/src/c_g_aver.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/c_g_aver.cpp
rename to media/module/codecs/amrnb/dec/src/c_g_aver.cpp
diff --git a/media/codecs/amrnb/dec/src/c_g_aver.h b/media/module/codecs/amrnb/dec/src/c_g_aver.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/c_g_aver.h
rename to media/module/codecs/amrnb/dec/src/c_g_aver.h
diff --git a/media/codecs/amrnb/dec/src/d1035pf.cpp b/media/module/codecs/amrnb/dec/src/d1035pf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d1035pf.cpp
rename to media/module/codecs/amrnb/dec/src/d1035pf.cpp
diff --git a/media/codecs/amrnb/dec/src/d1035pf.h b/media/module/codecs/amrnb/dec/src/d1035pf.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/d1035pf.h
rename to media/module/codecs/amrnb/dec/src/d1035pf.h
diff --git a/media/codecs/amrnb/dec/src/d2_11pf.cpp b/media/module/codecs/amrnb/dec/src/d2_11pf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d2_11pf.cpp
rename to media/module/codecs/amrnb/dec/src/d2_11pf.cpp
diff --git a/media/codecs/amrnb/dec/src/d2_11pf.h b/media/module/codecs/amrnb/dec/src/d2_11pf.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/d2_11pf.h
rename to media/module/codecs/amrnb/dec/src/d2_11pf.h
diff --git a/media/codecs/amrnb/dec/src/d2_9pf.cpp b/media/module/codecs/amrnb/dec/src/d2_9pf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d2_9pf.cpp
rename to media/module/codecs/amrnb/dec/src/d2_9pf.cpp
diff --git a/media/codecs/amrnb/dec/src/d2_9pf.h b/media/module/codecs/amrnb/dec/src/d2_9pf.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/d2_9pf.h
rename to media/module/codecs/amrnb/dec/src/d2_9pf.h
diff --git a/media/codecs/amrnb/dec/src/d3_14pf.cpp b/media/module/codecs/amrnb/dec/src/d3_14pf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d3_14pf.cpp
rename to media/module/codecs/amrnb/dec/src/d3_14pf.cpp
diff --git a/media/codecs/amrnb/dec/src/d3_14pf.h b/media/module/codecs/amrnb/dec/src/d3_14pf.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/d3_14pf.h
rename to media/module/codecs/amrnb/dec/src/d3_14pf.h
diff --git a/media/codecs/amrnb/dec/src/d4_17pf.cpp b/media/module/codecs/amrnb/dec/src/d4_17pf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d4_17pf.cpp
rename to media/module/codecs/amrnb/dec/src/d4_17pf.cpp
diff --git a/media/codecs/amrnb/dec/src/d4_17pf.h b/media/module/codecs/amrnb/dec/src/d4_17pf.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/d4_17pf.h
rename to media/module/codecs/amrnb/dec/src/d4_17pf.h
diff --git a/media/codecs/amrnb/dec/src/d8_31pf.cpp b/media/module/codecs/amrnb/dec/src/d8_31pf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d8_31pf.cpp
rename to media/module/codecs/amrnb/dec/src/d8_31pf.cpp
diff --git a/media/codecs/amrnb/dec/src/d8_31pf.h b/media/module/codecs/amrnb/dec/src/d8_31pf.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/d8_31pf.h
rename to media/module/codecs/amrnb/dec/src/d8_31pf.h
diff --git a/media/codecs/amrnb/dec/src/d_gain_c.cpp b/media/module/codecs/amrnb/dec/src/d_gain_c.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d_gain_c.cpp
rename to media/module/codecs/amrnb/dec/src/d_gain_c.cpp
diff --git a/media/codecs/amrnb/dec/src/d_gain_p.cpp b/media/module/codecs/amrnb/dec/src/d_gain_p.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d_gain_p.cpp
rename to media/module/codecs/amrnb/dec/src/d_gain_p.cpp
diff --git a/media/codecs/amrnb/dec/src/d_plsf.cpp b/media/module/codecs/amrnb/dec/src/d_plsf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d_plsf.cpp
rename to media/module/codecs/amrnb/dec/src/d_plsf.cpp
diff --git a/media/codecs/amrnb/dec/src/d_plsf_3.cpp b/media/module/codecs/amrnb/dec/src/d_plsf_3.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d_plsf_3.cpp
rename to media/module/codecs/amrnb/dec/src/d_plsf_3.cpp
diff --git a/media/codecs/amrnb/dec/src/d_plsf_5.cpp b/media/module/codecs/amrnb/dec/src/d_plsf_5.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/d_plsf_5.cpp
rename to media/module/codecs/amrnb/dec/src/d_plsf_5.cpp
diff --git a/media/codecs/amrnb/dec/src/dec_amr.cpp b/media/module/codecs/amrnb/dec/src/dec_amr.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_amr.cpp
rename to media/module/codecs/amrnb/dec/src/dec_amr.cpp
diff --git a/media/codecs/amrnb/dec/src/dec_amr.h b/media/module/codecs/amrnb/dec/src/dec_amr.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_amr.h
rename to media/module/codecs/amrnb/dec/src/dec_amr.h
diff --git a/media/codecs/amrnb/dec/src/dec_gain.cpp b/media/module/codecs/amrnb/dec/src/dec_gain.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_gain.cpp
rename to media/module/codecs/amrnb/dec/src/dec_gain.cpp
diff --git a/media/codecs/amrnb/dec/src/dec_gain.h b/media/module/codecs/amrnb/dec/src/dec_gain.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_gain.h
rename to media/module/codecs/amrnb/dec/src/dec_gain.h
diff --git a/media/codecs/amrnb/dec/src/dec_input_format_tab.cpp b/media/module/codecs/amrnb/dec/src/dec_input_format_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_input_format_tab.cpp
rename to media/module/codecs/amrnb/dec/src/dec_input_format_tab.cpp
diff --git a/media/codecs/amrnb/dec/src/dec_lag3.cpp b/media/module/codecs/amrnb/dec/src/dec_lag3.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_lag3.cpp
rename to media/module/codecs/amrnb/dec/src/dec_lag3.cpp
diff --git a/media/codecs/amrnb/dec/src/dec_lag3.h b/media/module/codecs/amrnb/dec/src/dec_lag3.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_lag3.h
rename to media/module/codecs/amrnb/dec/src/dec_lag3.h
diff --git a/media/codecs/amrnb/dec/src/dec_lag6.cpp b/media/module/codecs/amrnb/dec/src/dec_lag6.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_lag6.cpp
rename to media/module/codecs/amrnb/dec/src/dec_lag6.cpp
diff --git a/media/codecs/amrnb/dec/src/dec_lag6.h b/media/module/codecs/amrnb/dec/src/dec_lag6.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/dec_lag6.h
rename to media/module/codecs/amrnb/dec/src/dec_lag6.h
diff --git a/media/codecs/amrnb/dec/src/dtx_dec.cpp b/media/module/codecs/amrnb/dec/src/dtx_dec.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/dtx_dec.cpp
rename to media/module/codecs/amrnb/dec/src/dtx_dec.cpp
diff --git a/media/codecs/amrnb/dec/src/dtx_dec.h b/media/module/codecs/amrnb/dec/src/dtx_dec.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/dtx_dec.h
rename to media/module/codecs/amrnb/dec/src/dtx_dec.h
diff --git a/media/codecs/amrnb/dec/src/ec_gains.cpp b/media/module/codecs/amrnb/dec/src/ec_gains.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/ec_gains.cpp
rename to media/module/codecs/amrnb/dec/src/ec_gains.cpp
diff --git a/media/codecs/amrnb/dec/src/ec_gains.h b/media/module/codecs/amrnb/dec/src/ec_gains.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/ec_gains.h
rename to media/module/codecs/amrnb/dec/src/ec_gains.h
diff --git a/media/codecs/amrnb/dec/src/ex_ctrl.cpp b/media/module/codecs/amrnb/dec/src/ex_ctrl.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/ex_ctrl.cpp
rename to media/module/codecs/amrnb/dec/src/ex_ctrl.cpp
diff --git a/media/codecs/amrnb/dec/src/ex_ctrl.h b/media/module/codecs/amrnb/dec/src/ex_ctrl.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/ex_ctrl.h
rename to media/module/codecs/amrnb/dec/src/ex_ctrl.h
diff --git a/media/codecs/amrnb/dec/src/gsmamr_dec.h b/media/module/codecs/amrnb/dec/src/gsmamr_dec.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/gsmamr_dec.h
rename to media/module/codecs/amrnb/dec/src/gsmamr_dec.h
diff --git a/media/codecs/amrnb/dec/src/if2_to_ets.cpp b/media/module/codecs/amrnb/dec/src/if2_to_ets.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/if2_to_ets.cpp
rename to media/module/codecs/amrnb/dec/src/if2_to_ets.cpp
diff --git a/media/codecs/amrnb/dec/src/if2_to_ets.h b/media/module/codecs/amrnb/dec/src/if2_to_ets.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/if2_to_ets.h
rename to media/module/codecs/amrnb/dec/src/if2_to_ets.h
diff --git a/media/codecs/amrnb/dec/src/int_lsf.cpp b/media/module/codecs/amrnb/dec/src/int_lsf.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/int_lsf.cpp
rename to media/module/codecs/amrnb/dec/src/int_lsf.cpp
diff --git a/media/codecs/amrnb/dec/src/lsp_avg.cpp b/media/module/codecs/amrnb/dec/src/lsp_avg.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/lsp_avg.cpp
rename to media/module/codecs/amrnb/dec/src/lsp_avg.cpp
diff --git a/media/codecs/amrnb/dec/src/lsp_avg.h b/media/module/codecs/amrnb/dec/src/lsp_avg.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/lsp_avg.h
rename to media/module/codecs/amrnb/dec/src/lsp_avg.h
diff --git a/media/codecs/amrnb/dec/src/ph_disp.cpp b/media/module/codecs/amrnb/dec/src/ph_disp.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/ph_disp.cpp
rename to media/module/codecs/amrnb/dec/src/ph_disp.cpp
diff --git a/media/codecs/amrnb/dec/src/ph_disp.h b/media/module/codecs/amrnb/dec/src/ph_disp.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/ph_disp.h
rename to media/module/codecs/amrnb/dec/src/ph_disp.h
diff --git a/media/codecs/amrnb/dec/src/post_pro.cpp b/media/module/codecs/amrnb/dec/src/post_pro.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/post_pro.cpp
rename to media/module/codecs/amrnb/dec/src/post_pro.cpp
diff --git a/media/codecs/amrnb/dec/src/post_pro.h b/media/module/codecs/amrnb/dec/src/post_pro.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/post_pro.h
rename to media/module/codecs/amrnb/dec/src/post_pro.h
diff --git a/media/codecs/amrnb/dec/src/preemph.cpp b/media/module/codecs/amrnb/dec/src/preemph.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/preemph.cpp
rename to media/module/codecs/amrnb/dec/src/preemph.cpp
diff --git a/media/codecs/amrnb/dec/src/preemph.h b/media/module/codecs/amrnb/dec/src/preemph.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/preemph.h
rename to media/module/codecs/amrnb/dec/src/preemph.h
diff --git a/media/codecs/amrnb/dec/src/pstfilt.cpp b/media/module/codecs/amrnb/dec/src/pstfilt.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/pstfilt.cpp
rename to media/module/codecs/amrnb/dec/src/pstfilt.cpp
diff --git a/media/codecs/amrnb/dec/src/pstfilt.h b/media/module/codecs/amrnb/dec/src/pstfilt.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/pstfilt.h
rename to media/module/codecs/amrnb/dec/src/pstfilt.h
diff --git a/media/codecs/amrnb/dec/src/qgain475_tab.cpp b/media/module/codecs/amrnb/dec/src/qgain475_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/qgain475_tab.cpp
rename to media/module/codecs/amrnb/dec/src/qgain475_tab.cpp
diff --git a/media/codecs/amrnb/dec/src/sp_dec.cpp b/media/module/codecs/amrnb/dec/src/sp_dec.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/sp_dec.cpp
rename to media/module/codecs/amrnb/dec/src/sp_dec.cpp
diff --git a/media/codecs/amrnb/dec/src/sp_dec.h b/media/module/codecs/amrnb/dec/src/sp_dec.h
similarity index 100%
rename from media/codecs/amrnb/dec/src/sp_dec.h
rename to media/module/codecs/amrnb/dec/src/sp_dec.h
diff --git a/media/codecs/amrnb/dec/src/wmf_to_ets.cpp b/media/module/codecs/amrnb/dec/src/wmf_to_ets.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/src/wmf_to_ets.cpp
rename to media/module/codecs/amrnb/dec/src/wmf_to_ets.cpp
diff --git a/media/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h b/media/module/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
similarity index 100%
rename from media/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
rename to media/module/codecs/amrnb/dec/test/AmrnbDecTestEnvironment.h
diff --git a/media/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp b/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
rename to media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
diff --git a/media/codecs/amrnb/dec/test/Android.bp b/media/module/codecs/amrnb/dec/test/Android.bp
similarity index 100%
rename from media/codecs/amrnb/dec/test/Android.bp
rename to media/module/codecs/amrnb/dec/test/Android.bp
diff --git a/media/codecs/amrnb/dec/test/AndroidTest.xml b/media/module/codecs/amrnb/dec/test/AndroidTest.xml
similarity index 100%
rename from media/codecs/amrnb/dec/test/AndroidTest.xml
rename to media/module/codecs/amrnb/dec/test/AndroidTest.xml
diff --git a/media/codecs/amrnb/dec/test/README.md b/media/module/codecs/amrnb/dec/test/README.md
similarity index 100%
rename from media/codecs/amrnb/dec/test/README.md
rename to media/module/codecs/amrnb/dec/test/README.md
diff --git a/media/codecs/amrnb/dec/test/amrnbdec_test.cpp b/media/module/codecs/amrnb/dec/test/amrnbdec_test.cpp
similarity index 100%
rename from media/codecs/amrnb/dec/test/amrnbdec_test.cpp
rename to media/module/codecs/amrnb/dec/test/amrnbdec_test.cpp
diff --git a/media/codecs/amrnb/enc/Android.bp b/media/module/codecs/amrnb/enc/Android.bp
similarity index 100%
rename from media/codecs/amrnb/enc/Android.bp
rename to media/module/codecs/amrnb/enc/Android.bp
diff --git a/media/codecs/amrnb/enc/MODULE_LICENSE_APACHE2 b/media/module/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
rename to media/module/codecs/amrnb/enc/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/amrnb/enc/NOTICE b/media/module/codecs/amrnb/enc/NOTICE
similarity index 100%
rename from media/codecs/amrnb/enc/NOTICE
rename to media/module/codecs/amrnb/enc/NOTICE
diff --git a/media/codecs/amrnb/enc/fuzzer/Android.bp b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
similarity index 84%
rename from media/codecs/amrnb/enc/fuzzer/Android.bp
rename to media/module/codecs/amrnb/enc/fuzzer/Android.bp
index 2c041b7..bcbcee2 100644
--- a/media/codecs/amrnb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrnbenc library",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/codecs/amrnb/enc/fuzzer/README.md b/media/module/codecs/amrnb/enc/fuzzer/README.md
similarity index 100%
rename from media/codecs/amrnb/enc/fuzzer/README.md
rename to media/module/codecs/amrnb/enc/fuzzer/README.md
diff --git a/media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp b/media/module/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
rename to media/module/codecs/amrnb/enc/fuzzer/amrnb_enc_fuzzer.cpp
diff --git a/media/codecs/amrnb/enc/src/amrencode.cpp b/media/module/codecs/amrnb/enc/src/amrencode.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/amrencode.cpp
rename to media/module/codecs/amrnb/enc/src/amrencode.cpp
diff --git a/media/codecs/amrnb/enc/src/amrencode.h b/media/module/codecs/amrnb/enc/src/amrencode.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/amrencode.h
rename to media/module/codecs/amrnb/enc/src/amrencode.h
diff --git a/media/codecs/amrnb/enc/src/autocorr.cpp b/media/module/codecs/amrnb/enc/src/autocorr.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/autocorr.cpp
rename to media/module/codecs/amrnb/enc/src/autocorr.cpp
diff --git a/media/codecs/amrnb/enc/src/autocorr.h b/media/module/codecs/amrnb/enc/src/autocorr.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/autocorr.h
rename to media/module/codecs/amrnb/enc/src/autocorr.h
diff --git a/media/codecs/amrnb/enc/src/c1035pf.cpp b/media/module/codecs/amrnb/enc/src/c1035pf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/c1035pf.cpp
rename to media/module/codecs/amrnb/enc/src/c1035pf.cpp
diff --git a/media/codecs/amrnb/enc/src/c1035pf.h b/media/module/codecs/amrnb/enc/src/c1035pf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/c1035pf.h
rename to media/module/codecs/amrnb/enc/src/c1035pf.h
diff --git a/media/codecs/amrnb/enc/src/c2_11pf.cpp b/media/module/codecs/amrnb/enc/src/c2_11pf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/c2_11pf.cpp
rename to media/module/codecs/amrnb/enc/src/c2_11pf.cpp
diff --git a/media/codecs/amrnb/enc/src/c2_11pf.h b/media/module/codecs/amrnb/enc/src/c2_11pf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/c2_11pf.h
rename to media/module/codecs/amrnb/enc/src/c2_11pf.h
diff --git a/media/codecs/amrnb/enc/src/c2_9pf.cpp b/media/module/codecs/amrnb/enc/src/c2_9pf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/c2_9pf.cpp
rename to media/module/codecs/amrnb/enc/src/c2_9pf.cpp
diff --git a/media/codecs/amrnb/enc/src/c2_9pf.h b/media/module/codecs/amrnb/enc/src/c2_9pf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/c2_9pf.h
rename to media/module/codecs/amrnb/enc/src/c2_9pf.h
diff --git a/media/codecs/amrnb/enc/src/c3_14pf.cpp b/media/module/codecs/amrnb/enc/src/c3_14pf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/c3_14pf.cpp
rename to media/module/codecs/amrnb/enc/src/c3_14pf.cpp
diff --git a/media/codecs/amrnb/enc/src/c3_14pf.h b/media/module/codecs/amrnb/enc/src/c3_14pf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/c3_14pf.h
rename to media/module/codecs/amrnb/enc/src/c3_14pf.h
diff --git a/media/codecs/amrnb/enc/src/c4_17pf.cpp b/media/module/codecs/amrnb/enc/src/c4_17pf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/c4_17pf.cpp
rename to media/module/codecs/amrnb/enc/src/c4_17pf.cpp
diff --git a/media/codecs/amrnb/enc/src/c4_17pf.h b/media/module/codecs/amrnb/enc/src/c4_17pf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/c4_17pf.h
rename to media/module/codecs/amrnb/enc/src/c4_17pf.h
diff --git a/media/codecs/amrnb/enc/src/c8_31pf.cpp b/media/module/codecs/amrnb/enc/src/c8_31pf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/c8_31pf.cpp
rename to media/module/codecs/amrnb/enc/src/c8_31pf.cpp
diff --git a/media/codecs/amrnb/enc/src/c8_31pf.h b/media/module/codecs/amrnb/enc/src/c8_31pf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/c8_31pf.h
rename to media/module/codecs/amrnb/enc/src/c8_31pf.h
diff --git a/media/codecs/amrnb/enc/src/calc_cor.cpp b/media/module/codecs/amrnb/enc/src/calc_cor.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/calc_cor.cpp
rename to media/module/codecs/amrnb/enc/src/calc_cor.cpp
diff --git a/media/codecs/amrnb/enc/src/calc_cor.h b/media/module/codecs/amrnb/enc/src/calc_cor.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/calc_cor.h
rename to media/module/codecs/amrnb/enc/src/calc_cor.h
diff --git a/media/codecs/amrnb/enc/src/calc_en.cpp b/media/module/codecs/amrnb/enc/src/calc_en.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/calc_en.cpp
rename to media/module/codecs/amrnb/enc/src/calc_en.cpp
diff --git a/media/codecs/amrnb/enc/src/calc_en.h b/media/module/codecs/amrnb/enc/src/calc_en.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/calc_en.h
rename to media/module/codecs/amrnb/enc/src/calc_en.h
diff --git a/media/codecs/amrnb/enc/src/cbsearch.cpp b/media/module/codecs/amrnb/enc/src/cbsearch.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/cbsearch.cpp
rename to media/module/codecs/amrnb/enc/src/cbsearch.cpp
diff --git a/media/codecs/amrnb/enc/src/cbsearch.h b/media/module/codecs/amrnb/enc/src/cbsearch.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/cbsearch.h
rename to media/module/codecs/amrnb/enc/src/cbsearch.h
diff --git a/media/codecs/amrnb/enc/src/cl_ltp.cpp b/media/module/codecs/amrnb/enc/src/cl_ltp.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/cl_ltp.cpp
rename to media/module/codecs/amrnb/enc/src/cl_ltp.cpp
diff --git a/media/codecs/amrnb/enc/src/cl_ltp.h b/media/module/codecs/amrnb/enc/src/cl_ltp.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/cl_ltp.h
rename to media/module/codecs/amrnb/enc/src/cl_ltp.h
diff --git a/media/codecs/amrnb/enc/src/cod_amr.cpp b/media/module/codecs/amrnb/enc/src/cod_amr.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/cod_amr.cpp
rename to media/module/codecs/amrnb/enc/src/cod_amr.cpp
diff --git a/media/codecs/amrnb/enc/src/cod_amr.h b/media/module/codecs/amrnb/enc/src/cod_amr.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/cod_amr.h
rename to media/module/codecs/amrnb/enc/src/cod_amr.h
diff --git a/media/codecs/amrnb/enc/src/convolve.cpp b/media/module/codecs/amrnb/enc/src/convolve.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/convolve.cpp
rename to media/module/codecs/amrnb/enc/src/convolve.cpp
diff --git a/media/codecs/amrnb/enc/src/convolve.h b/media/module/codecs/amrnb/enc/src/convolve.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/convolve.h
rename to media/module/codecs/amrnb/enc/src/convolve.h
diff --git a/media/codecs/amrnb/enc/src/cor_h.cpp b/media/module/codecs/amrnb/enc/src/cor_h.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/cor_h.cpp
rename to media/module/codecs/amrnb/enc/src/cor_h.cpp
diff --git a/media/codecs/amrnb/enc/src/cor_h.h b/media/module/codecs/amrnb/enc/src/cor_h.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/cor_h.h
rename to media/module/codecs/amrnb/enc/src/cor_h.h
diff --git a/media/codecs/amrnb/enc/src/cor_h_x.cpp b/media/module/codecs/amrnb/enc/src/cor_h_x.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/cor_h_x.cpp
rename to media/module/codecs/amrnb/enc/src/cor_h_x.cpp
diff --git a/media/codecs/amrnb/enc/src/cor_h_x.h b/media/module/codecs/amrnb/enc/src/cor_h_x.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/cor_h_x.h
rename to media/module/codecs/amrnb/enc/src/cor_h_x.h
diff --git a/media/codecs/amrnb/enc/src/cor_h_x2.cpp b/media/module/codecs/amrnb/enc/src/cor_h_x2.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/cor_h_x2.cpp
rename to media/module/codecs/amrnb/enc/src/cor_h_x2.cpp
diff --git a/media/codecs/amrnb/enc/src/cor_h_x2.h b/media/module/codecs/amrnb/enc/src/cor_h_x2.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/cor_h_x2.h
rename to media/module/codecs/amrnb/enc/src/cor_h_x2.h
diff --git a/media/codecs/amrnb/enc/src/corrwght_tab.cpp b/media/module/codecs/amrnb/enc/src/corrwght_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/corrwght_tab.cpp
rename to media/module/codecs/amrnb/enc/src/corrwght_tab.cpp
diff --git a/media/codecs/amrnb/enc/src/dtx_enc.cpp b/media/module/codecs/amrnb/enc/src/dtx_enc.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/dtx_enc.cpp
rename to media/module/codecs/amrnb/enc/src/dtx_enc.cpp
diff --git a/media/codecs/amrnb/enc/src/dtx_enc.h b/media/module/codecs/amrnb/enc/src/dtx_enc.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/dtx_enc.h
rename to media/module/codecs/amrnb/enc/src/dtx_enc.h
diff --git a/media/codecs/amrnb/enc/src/enc_lag3.cpp b/media/module/codecs/amrnb/enc/src/enc_lag3.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/enc_lag3.cpp
rename to media/module/codecs/amrnb/enc/src/enc_lag3.cpp
diff --git a/media/codecs/amrnb/enc/src/enc_lag3.h b/media/module/codecs/amrnb/enc/src/enc_lag3.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/enc_lag3.h
rename to media/module/codecs/amrnb/enc/src/enc_lag3.h
diff --git a/media/codecs/amrnb/enc/src/enc_lag6.cpp b/media/module/codecs/amrnb/enc/src/enc_lag6.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/enc_lag6.cpp
rename to media/module/codecs/amrnb/enc/src/enc_lag6.cpp
diff --git a/media/codecs/amrnb/enc/src/enc_lag6.h b/media/module/codecs/amrnb/enc/src/enc_lag6.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/enc_lag6.h
rename to media/module/codecs/amrnb/enc/src/enc_lag6.h
diff --git a/media/codecs/amrnb/enc/src/enc_output_format_tab.cpp b/media/module/codecs/amrnb/enc/src/enc_output_format_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/enc_output_format_tab.cpp
rename to media/module/codecs/amrnb/enc/src/enc_output_format_tab.cpp
diff --git a/media/codecs/amrnb/enc/src/ets_to_if2.cpp b/media/module/codecs/amrnb/enc/src/ets_to_if2.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/ets_to_if2.cpp
rename to media/module/codecs/amrnb/enc/src/ets_to_if2.cpp
diff --git a/media/codecs/amrnb/enc/src/ets_to_if2.h b/media/module/codecs/amrnb/enc/src/ets_to_if2.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/ets_to_if2.h
rename to media/module/codecs/amrnb/enc/src/ets_to_if2.h
diff --git a/media/codecs/amrnb/enc/src/ets_to_wmf.cpp b/media/module/codecs/amrnb/enc/src/ets_to_wmf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/ets_to_wmf.cpp
rename to media/module/codecs/amrnb/enc/src/ets_to_wmf.cpp
diff --git a/media/codecs/amrnb/enc/src/ets_to_wmf.h b/media/module/codecs/amrnb/enc/src/ets_to_wmf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/ets_to_wmf.h
rename to media/module/codecs/amrnb/enc/src/ets_to_wmf.h
diff --git a/media/codecs/amrnb/enc/src/g_adapt.cpp b/media/module/codecs/amrnb/enc/src/g_adapt.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/g_adapt.cpp
rename to media/module/codecs/amrnb/enc/src/g_adapt.cpp
diff --git a/media/codecs/amrnb/enc/src/g_adapt.h b/media/module/codecs/amrnb/enc/src/g_adapt.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/g_adapt.h
rename to media/module/codecs/amrnb/enc/src/g_adapt.h
diff --git a/media/codecs/amrnb/enc/src/g_code.cpp b/media/module/codecs/amrnb/enc/src/g_code.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/g_code.cpp
rename to media/module/codecs/amrnb/enc/src/g_code.cpp
diff --git a/media/codecs/amrnb/enc/src/g_code.h b/media/module/codecs/amrnb/enc/src/g_code.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/g_code.h
rename to media/module/codecs/amrnb/enc/src/g_code.h
diff --git a/media/codecs/amrnb/enc/src/g_pitch.cpp b/media/module/codecs/amrnb/enc/src/g_pitch.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/g_pitch.cpp
rename to media/module/codecs/amrnb/enc/src/g_pitch.cpp
diff --git a/media/codecs/amrnb/enc/src/g_pitch.h b/media/module/codecs/amrnb/enc/src/g_pitch.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/g_pitch.h
rename to media/module/codecs/amrnb/enc/src/g_pitch.h
diff --git a/media/codecs/amrnb/enc/src/gain_q.cpp b/media/module/codecs/amrnb/enc/src/gain_q.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/gain_q.cpp
rename to media/module/codecs/amrnb/enc/src/gain_q.cpp
diff --git a/media/codecs/amrnb/enc/src/gain_q.h b/media/module/codecs/amrnb/enc/src/gain_q.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/gain_q.h
rename to media/module/codecs/amrnb/enc/src/gain_q.h
diff --git a/media/codecs/amrnb/enc/src/gsmamr_enc.h b/media/module/codecs/amrnb/enc/src/gsmamr_enc.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/gsmamr_enc.h
rename to media/module/codecs/amrnb/enc/src/gsmamr_enc.h
diff --git a/media/codecs/amrnb/enc/src/hp_max.cpp b/media/module/codecs/amrnb/enc/src/hp_max.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/hp_max.cpp
rename to media/module/codecs/amrnb/enc/src/hp_max.cpp
diff --git a/media/codecs/amrnb/enc/src/hp_max.h b/media/module/codecs/amrnb/enc/src/hp_max.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/hp_max.h
rename to media/module/codecs/amrnb/enc/src/hp_max.h
diff --git a/media/codecs/amrnb/enc/src/inter_36.cpp b/media/module/codecs/amrnb/enc/src/inter_36.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/inter_36.cpp
rename to media/module/codecs/amrnb/enc/src/inter_36.cpp
diff --git a/media/codecs/amrnb/enc/src/inter_36.h b/media/module/codecs/amrnb/enc/src/inter_36.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/inter_36.h
rename to media/module/codecs/amrnb/enc/src/inter_36.h
diff --git a/media/codecs/amrnb/enc/src/inter_36_tab.cpp b/media/module/codecs/amrnb/enc/src/inter_36_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/inter_36_tab.cpp
rename to media/module/codecs/amrnb/enc/src/inter_36_tab.cpp
diff --git a/media/codecs/amrnb/enc/src/inter_36_tab.h b/media/module/codecs/amrnb/enc/src/inter_36_tab.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/inter_36_tab.h
rename to media/module/codecs/amrnb/enc/src/inter_36_tab.h
diff --git a/media/codecs/amrnb/enc/src/l_comp.cpp b/media/module/codecs/amrnb/enc/src/l_comp.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/l_comp.cpp
rename to media/module/codecs/amrnb/enc/src/l_comp.cpp
diff --git a/media/codecs/amrnb/enc/src/l_extract.cpp b/media/module/codecs/amrnb/enc/src/l_extract.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/l_extract.cpp
rename to media/module/codecs/amrnb/enc/src/l_extract.cpp
diff --git a/media/codecs/amrnb/enc/src/l_negate.cpp b/media/module/codecs/amrnb/enc/src/l_negate.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/l_negate.cpp
rename to media/module/codecs/amrnb/enc/src/l_negate.cpp
diff --git a/media/codecs/amrnb/enc/src/lag_wind.cpp b/media/module/codecs/amrnb/enc/src/lag_wind.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/lag_wind.cpp
rename to media/module/codecs/amrnb/enc/src/lag_wind.cpp
diff --git a/media/codecs/amrnb/enc/src/lag_wind.h b/media/module/codecs/amrnb/enc/src/lag_wind.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/lag_wind.h
rename to media/module/codecs/amrnb/enc/src/lag_wind.h
diff --git a/media/codecs/amrnb/enc/src/lag_wind_tab.cpp b/media/module/codecs/amrnb/enc/src/lag_wind_tab.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/lag_wind_tab.cpp
rename to media/module/codecs/amrnb/enc/src/lag_wind_tab.cpp
diff --git a/media/codecs/amrnb/enc/src/lag_wind_tab.h b/media/module/codecs/amrnb/enc/src/lag_wind_tab.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/lag_wind_tab.h
rename to media/module/codecs/amrnb/enc/src/lag_wind_tab.h
diff --git a/media/codecs/amrnb/enc/src/levinson.cpp b/media/module/codecs/amrnb/enc/src/levinson.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/levinson.cpp
rename to media/module/codecs/amrnb/enc/src/levinson.cpp
diff --git a/media/codecs/amrnb/enc/src/levinson.h b/media/module/codecs/amrnb/enc/src/levinson.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/levinson.h
rename to media/module/codecs/amrnb/enc/src/levinson.h
diff --git a/media/codecs/amrnb/enc/src/lpc.cpp b/media/module/codecs/amrnb/enc/src/lpc.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/lpc.cpp
rename to media/module/codecs/amrnb/enc/src/lpc.cpp
diff --git a/media/codecs/amrnb/enc/src/lpc.h b/media/module/codecs/amrnb/enc/src/lpc.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/lpc.h
rename to media/module/codecs/amrnb/enc/src/lpc.h
diff --git a/media/codecs/amrnb/enc/src/ol_ltp.cpp b/media/module/codecs/amrnb/enc/src/ol_ltp.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/ol_ltp.cpp
rename to media/module/codecs/amrnb/enc/src/ol_ltp.cpp
diff --git a/media/codecs/amrnb/enc/src/ol_ltp.h b/media/module/codecs/amrnb/enc/src/ol_ltp.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/ol_ltp.h
rename to media/module/codecs/amrnb/enc/src/ol_ltp.h
diff --git a/media/codecs/amrnb/enc/src/p_ol_wgh.cpp b/media/module/codecs/amrnb/enc/src/p_ol_wgh.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/p_ol_wgh.cpp
rename to media/module/codecs/amrnb/enc/src/p_ol_wgh.cpp
diff --git a/media/codecs/amrnb/enc/src/pitch_fr.cpp b/media/module/codecs/amrnb/enc/src/pitch_fr.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/pitch_fr.cpp
rename to media/module/codecs/amrnb/enc/src/pitch_fr.cpp
diff --git a/media/codecs/amrnb/enc/src/pitch_fr.h b/media/module/codecs/amrnb/enc/src/pitch_fr.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/pitch_fr.h
rename to media/module/codecs/amrnb/enc/src/pitch_fr.h
diff --git a/media/codecs/amrnb/enc/src/pitch_ol.cpp b/media/module/codecs/amrnb/enc/src/pitch_ol.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/pitch_ol.cpp
rename to media/module/codecs/amrnb/enc/src/pitch_ol.cpp
diff --git a/media/codecs/amrnb/enc/src/pitch_ol.h b/media/module/codecs/amrnb/enc/src/pitch_ol.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/pitch_ol.h
rename to media/module/codecs/amrnb/enc/src/pitch_ol.h
diff --git a/media/codecs/amrnb/enc/src/pre_big.cpp b/media/module/codecs/amrnb/enc/src/pre_big.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/pre_big.cpp
rename to media/module/codecs/amrnb/enc/src/pre_big.cpp
diff --git a/media/codecs/amrnb/enc/src/pre_big.h b/media/module/codecs/amrnb/enc/src/pre_big.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/pre_big.h
rename to media/module/codecs/amrnb/enc/src/pre_big.h
diff --git a/media/codecs/amrnb/enc/src/pre_proc.cpp b/media/module/codecs/amrnb/enc/src/pre_proc.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/pre_proc.cpp
rename to media/module/codecs/amrnb/enc/src/pre_proc.cpp
diff --git a/media/codecs/amrnb/enc/src/pre_proc.h b/media/module/codecs/amrnb/enc/src/pre_proc.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/pre_proc.h
rename to media/module/codecs/amrnb/enc/src/pre_proc.h
diff --git a/media/codecs/amrnb/enc/src/prm2bits.cpp b/media/module/codecs/amrnb/enc/src/prm2bits.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/prm2bits.cpp
rename to media/module/codecs/amrnb/enc/src/prm2bits.cpp
diff --git a/media/codecs/amrnb/enc/src/prm2bits.h b/media/module/codecs/amrnb/enc/src/prm2bits.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/prm2bits.h
rename to media/module/codecs/amrnb/enc/src/prm2bits.h
diff --git a/media/codecs/amrnb/enc/src/q_gain_c.cpp b/media/module/codecs/amrnb/enc/src/q_gain_c.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/q_gain_c.cpp
rename to media/module/codecs/amrnb/enc/src/q_gain_c.cpp
diff --git a/media/codecs/amrnb/enc/src/q_gain_c.h b/media/module/codecs/amrnb/enc/src/q_gain_c.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/q_gain_c.h
rename to media/module/codecs/amrnb/enc/src/q_gain_c.h
diff --git a/media/codecs/amrnb/enc/src/q_gain_p.cpp b/media/module/codecs/amrnb/enc/src/q_gain_p.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/q_gain_p.cpp
rename to media/module/codecs/amrnb/enc/src/q_gain_p.cpp
diff --git a/media/codecs/amrnb/enc/src/q_gain_p.h b/media/module/codecs/amrnb/enc/src/q_gain_p.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/q_gain_p.h
rename to media/module/codecs/amrnb/enc/src/q_gain_p.h
diff --git a/media/codecs/amrnb/enc/src/qgain475.cpp b/media/module/codecs/amrnb/enc/src/qgain475.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/qgain475.cpp
rename to media/module/codecs/amrnb/enc/src/qgain475.cpp
diff --git a/media/codecs/amrnb/enc/src/qgain475.h b/media/module/codecs/amrnb/enc/src/qgain475.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/qgain475.h
rename to media/module/codecs/amrnb/enc/src/qgain475.h
diff --git a/media/codecs/amrnb/enc/src/qgain795.cpp b/media/module/codecs/amrnb/enc/src/qgain795.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/qgain795.cpp
rename to media/module/codecs/amrnb/enc/src/qgain795.cpp
diff --git a/media/codecs/amrnb/enc/src/qgain795.h b/media/module/codecs/amrnb/enc/src/qgain795.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/qgain795.h
rename to media/module/codecs/amrnb/enc/src/qgain795.h
diff --git a/media/codecs/amrnb/enc/src/qua_gain.cpp b/media/module/codecs/amrnb/enc/src/qua_gain.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/qua_gain.cpp
rename to media/module/codecs/amrnb/enc/src/qua_gain.cpp
diff --git a/media/codecs/amrnb/enc/src/s10_8pf.cpp b/media/module/codecs/amrnb/enc/src/s10_8pf.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/s10_8pf.cpp
rename to media/module/codecs/amrnb/enc/src/s10_8pf.cpp
diff --git a/media/codecs/amrnb/enc/src/s10_8pf.h b/media/module/codecs/amrnb/enc/src/s10_8pf.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/s10_8pf.h
rename to media/module/codecs/amrnb/enc/src/s10_8pf.h
diff --git a/media/codecs/amrnb/enc/src/set_sign.cpp b/media/module/codecs/amrnb/enc/src/set_sign.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/set_sign.cpp
rename to media/module/codecs/amrnb/enc/src/set_sign.cpp
diff --git a/media/codecs/amrnb/enc/src/set_sign.h b/media/module/codecs/amrnb/enc/src/set_sign.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/set_sign.h
rename to media/module/codecs/amrnb/enc/src/set_sign.h
diff --git a/media/codecs/amrnb/enc/src/sid_sync.cpp b/media/module/codecs/amrnb/enc/src/sid_sync.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/sid_sync.cpp
rename to media/module/codecs/amrnb/enc/src/sid_sync.cpp
diff --git a/media/codecs/amrnb/enc/src/sid_sync.h b/media/module/codecs/amrnb/enc/src/sid_sync.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/sid_sync.h
rename to media/module/codecs/amrnb/enc/src/sid_sync.h
diff --git a/media/codecs/amrnb/enc/src/sp_enc.cpp b/media/module/codecs/amrnb/enc/src/sp_enc.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/sp_enc.cpp
rename to media/module/codecs/amrnb/enc/src/sp_enc.cpp
diff --git a/media/codecs/amrnb/enc/src/sp_enc.h b/media/module/codecs/amrnb/enc/src/sp_enc.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/sp_enc.h
rename to media/module/codecs/amrnb/enc/src/sp_enc.h
diff --git a/media/codecs/amrnb/enc/src/spreproc.cpp b/media/module/codecs/amrnb/enc/src/spreproc.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/spreproc.cpp
rename to media/module/codecs/amrnb/enc/src/spreproc.cpp
diff --git a/media/codecs/amrnb/enc/src/spreproc.h b/media/module/codecs/amrnb/enc/src/spreproc.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/spreproc.h
rename to media/module/codecs/amrnb/enc/src/spreproc.h
diff --git a/media/codecs/amrnb/enc/src/spstproc.cpp b/media/module/codecs/amrnb/enc/src/spstproc.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/spstproc.cpp
rename to media/module/codecs/amrnb/enc/src/spstproc.cpp
diff --git a/media/codecs/amrnb/enc/src/spstproc.h b/media/module/codecs/amrnb/enc/src/spstproc.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/spstproc.h
rename to media/module/codecs/amrnb/enc/src/spstproc.h
diff --git a/media/codecs/amrnb/enc/src/ton_stab.cpp b/media/module/codecs/amrnb/enc/src/ton_stab.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/src/ton_stab.cpp
rename to media/module/codecs/amrnb/enc/src/ton_stab.cpp
diff --git a/media/codecs/amrnb/enc/src/ton_stab.h b/media/module/codecs/amrnb/enc/src/ton_stab.h
similarity index 100%
rename from media/codecs/amrnb/enc/src/ton_stab.h
rename to media/module/codecs/amrnb/enc/src/ton_stab.h
diff --git a/media/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h b/media/module/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
similarity index 100%
rename from media/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
rename to media/module/codecs/amrnb/enc/test/AmrnbEncTestEnvironment.h
diff --git a/media/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp b/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
rename to media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
diff --git a/media/codecs/amrnb/enc/test/Android.bp b/media/module/codecs/amrnb/enc/test/Android.bp
similarity index 100%
rename from media/codecs/amrnb/enc/test/Android.bp
rename to media/module/codecs/amrnb/enc/test/Android.bp
diff --git a/media/codecs/amrnb/enc/test/AndroidTest.xml b/media/module/codecs/amrnb/enc/test/AndroidTest.xml
similarity index 100%
rename from media/codecs/amrnb/enc/test/AndroidTest.xml
rename to media/module/codecs/amrnb/enc/test/AndroidTest.xml
diff --git a/media/codecs/amrnb/enc/test/README.md b/media/module/codecs/amrnb/enc/test/README.md
similarity index 100%
rename from media/codecs/amrnb/enc/test/README.md
rename to media/module/codecs/amrnb/enc/test/README.md
diff --git a/media/codecs/amrnb/enc/test/amrnb_enc_test.cpp b/media/module/codecs/amrnb/enc/test/amrnb_enc_test.cpp
similarity index 100%
rename from media/codecs/amrnb/enc/test/amrnb_enc_test.cpp
rename to media/module/codecs/amrnb/enc/test/amrnb_enc_test.cpp
diff --git a/media/codecs/amrnb/fuzzer/Android.bp b/media/module/codecs/amrnb/fuzzer/Android.bp
similarity index 85%
rename from media/codecs/amrnb/fuzzer/Android.bp
rename to media/module/codecs/amrnb/fuzzer/Android.bp
index 833a7ba..3f29267 100644
--- a/media/codecs/amrnb/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrnbdec library",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/codecs/amrnb/fuzzer/README.md b/media/module/codecs/amrnb/fuzzer/README.md
similarity index 100%
rename from media/codecs/amrnb/fuzzer/README.md
rename to media/module/codecs/amrnb/fuzzer/README.md
diff --git a/media/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp b/media/module/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
similarity index 100%
rename from media/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
rename to media/module/codecs/amrnb/fuzzer/amrnb_dec_fuzzer.cpp
diff --git a/media/codecs/amrnb/patent_disclaimer.txt b/media/module/codecs/amrnb/patent_disclaimer.txt
similarity index 100%
rename from media/codecs/amrnb/patent_disclaimer.txt
rename to media/module/codecs/amrnb/patent_disclaimer.txt
diff --git a/media/codecs/amrwb/dec/Android.bp b/media/module/codecs/amrwb/dec/Android.bp
similarity index 100%
rename from media/codecs/amrwb/dec/Android.bp
rename to media/module/codecs/amrwb/dec/Android.bp
diff --git a/media/codecs/amrwb/dec/MODULE_LICENSE_APACHE2 b/media/module/codecs/amrwb/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/amrwb/dec/MODULE_LICENSE_APACHE2
rename to media/module/codecs/amrwb/dec/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/amrwb/dec/NOTICE b/media/module/codecs/amrwb/dec/NOTICE
similarity index 100%
rename from media/codecs/amrwb/dec/NOTICE
rename to media/module/codecs/amrwb/dec/NOTICE
diff --git a/media/codecs/amrwb/dec/TEST_MAPPING b/media/module/codecs/amrwb/dec/TEST_MAPPING
similarity index 100%
rename from media/codecs/amrwb/dec/TEST_MAPPING
rename to media/module/codecs/amrwb/dec/TEST_MAPPING
diff --git a/media/codecs/amrwb/dec/fuzzer/Android.bp b/media/module/codecs/amrwb/dec/fuzzer/Android.bp
similarity index 85%
rename from media/codecs/amrwb/dec/fuzzer/Android.bp
rename to media/module/codecs/amrwb/dec/fuzzer/Android.bp
index 16f08fa..31a20ff 100644
--- a/media/codecs/amrwb/dec/fuzzer/Android.bp
+++ b/media/module/codecs/amrwb/dec/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrwbdec library",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/codecs/amrwb/dec/fuzzer/README.md b/media/module/codecs/amrwb/dec/fuzzer/README.md
similarity index 100%
rename from media/codecs/amrwb/dec/fuzzer/README.md
rename to media/module/codecs/amrwb/dec/fuzzer/README.md
diff --git a/media/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp b/media/module/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp
rename to media/module/codecs/amrwb/dec/fuzzer/amrwb_dec_fuzzer.cpp
diff --git a/media/codecs/amrwb/dec/include/pvamrwbdecoder_api.h b/media/module/codecs/amrwb/dec/include/pvamrwbdecoder_api.h
similarity index 100%
rename from media/codecs/amrwb/dec/include/pvamrwbdecoder_api.h
rename to media/module/codecs/amrwb/dec/include/pvamrwbdecoder_api.h
diff --git a/media/codecs/amrwb/dec/patent_disclaimer.txt b/media/module/codecs/amrwb/dec/patent_disclaimer.txt
similarity index 100%
rename from media/codecs/amrwb/dec/patent_disclaimer.txt
rename to media/module/codecs/amrwb/dec/patent_disclaimer.txt
diff --git a/media/codecs/amrwb/dec/src/agc2_amr_wb.cpp b/media/module/codecs/amrwb/dec/src/agc2_amr_wb.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/agc2_amr_wb.cpp
rename to media/module/codecs/amrwb/dec/src/agc2_amr_wb.cpp
diff --git a/media/codecs/amrwb/dec/src/band_pass_6k_7k.cpp b/media/module/codecs/amrwb/dec/src/band_pass_6k_7k.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/band_pass_6k_7k.cpp
rename to media/module/codecs/amrwb/dec/src/band_pass_6k_7k.cpp
diff --git a/media/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp b/media/module/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp
rename to media/module/codecs/amrwb/dec/src/dec_acelp_2p_in_64.cpp
diff --git a/media/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp b/media/module/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp
rename to media/module/codecs/amrwb/dec/src/dec_acelp_4p_in_64.cpp
diff --git a/media/codecs/amrwb/dec/src/dec_alg_codebook.cpp b/media/module/codecs/amrwb/dec/src/dec_alg_codebook.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/dec_alg_codebook.cpp
rename to media/module/codecs/amrwb/dec/src/dec_alg_codebook.cpp
diff --git a/media/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp b/media/module/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp
rename to media/module/codecs/amrwb/dec/src/dec_gain2_amr_wb.cpp
diff --git a/media/codecs/amrwb/dec/src/deemphasis_32.cpp b/media/module/codecs/amrwb/dec/src/deemphasis_32.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/deemphasis_32.cpp
rename to media/module/codecs/amrwb/dec/src/deemphasis_32.cpp
diff --git a/media/codecs/amrwb/dec/src/dtx.h b/media/module/codecs/amrwb/dec/src/dtx.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/dtx.h
rename to media/module/codecs/amrwb/dec/src/dtx.h
diff --git a/media/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp b/media/module/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp
rename to media/module/codecs/amrwb/dec/src/dtx_decoder_amr_wb.cpp
diff --git a/media/codecs/amrwb/dec/src/e_pv_amrwbdec.h b/media/module/codecs/amrwb/dec/src/e_pv_amrwbdec.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/e_pv_amrwbdec.h
rename to media/module/codecs/amrwb/dec/src/e_pv_amrwbdec.h
diff --git a/media/codecs/amrwb/dec/src/get_amr_wb_bits.cpp b/media/module/codecs/amrwb/dec/src/get_amr_wb_bits.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/get_amr_wb_bits.cpp
rename to media/module/codecs/amrwb/dec/src/get_amr_wb_bits.cpp
diff --git a/media/codecs/amrwb/dec/src/get_amr_wb_bits.h b/media/module/codecs/amrwb/dec/src/get_amr_wb_bits.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/get_amr_wb_bits.h
rename to media/module/codecs/amrwb/dec/src/get_amr_wb_bits.h
diff --git a/media/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp b/media/module/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp
rename to media/module/codecs/amrwb/dec/src/highpass_400hz_at_12k8.cpp
diff --git a/media/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp b/media/module/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp
rename to media/module/codecs/amrwb/dec/src/highpass_50hz_at_12k8.cpp
diff --git a/media/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp b/media/module/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp
rename to media/module/codecs/amrwb/dec/src/homing_amr_wb_dec.cpp
diff --git a/media/codecs/amrwb/dec/src/interpolate_isp.cpp b/media/module/codecs/amrwb/dec/src/interpolate_isp.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/interpolate_isp.cpp
rename to media/module/codecs/amrwb/dec/src/interpolate_isp.cpp
diff --git a/media/codecs/amrwb/dec/src/isf_extrapolation.cpp b/media/module/codecs/amrwb/dec/src/isf_extrapolation.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/isf_extrapolation.cpp
rename to media/module/codecs/amrwb/dec/src/isf_extrapolation.cpp
diff --git a/media/codecs/amrwb/dec/src/isp_az.cpp b/media/module/codecs/amrwb/dec/src/isp_az.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/isp_az.cpp
rename to media/module/codecs/amrwb/dec/src/isp_az.cpp
diff --git a/media/codecs/amrwb/dec/src/isp_isf.cpp b/media/module/codecs/amrwb/dec/src/isp_isf.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/isp_isf.cpp
rename to media/module/codecs/amrwb/dec/src/isp_isf.cpp
diff --git a/media/codecs/amrwb/dec/src/lagconceal.cpp b/media/module/codecs/amrwb/dec/src/lagconceal.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/lagconceal.cpp
rename to media/module/codecs/amrwb/dec/src/lagconceal.cpp
diff --git a/media/codecs/amrwb/dec/src/low_pass_filt_7k.cpp b/media/module/codecs/amrwb/dec/src/low_pass_filt_7k.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/low_pass_filt_7k.cpp
rename to media/module/codecs/amrwb/dec/src/low_pass_filt_7k.cpp
diff --git a/media/codecs/amrwb/dec/src/median5.cpp b/media/module/codecs/amrwb/dec/src/median5.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/median5.cpp
rename to media/module/codecs/amrwb/dec/src/median5.cpp
diff --git a/media/codecs/amrwb/dec/src/mime_io.cpp b/media/module/codecs/amrwb/dec/src/mime_io.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/mime_io.cpp
rename to media/module/codecs/amrwb/dec/src/mime_io.cpp
diff --git a/media/codecs/amrwb/dec/src/mime_io.h b/media/module/codecs/amrwb/dec/src/mime_io.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/mime_io.h
rename to media/module/codecs/amrwb/dec/src/mime_io.h
diff --git a/media/codecs/amrwb/dec/src/noise_gen_amrwb.cpp b/media/module/codecs/amrwb/dec/src/noise_gen_amrwb.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/noise_gen_amrwb.cpp
rename to media/module/codecs/amrwb/dec/src/noise_gen_amrwb.cpp
diff --git a/media/codecs/amrwb/dec/src/normalize_amr_wb.cpp b/media/module/codecs/amrwb/dec/src/normalize_amr_wb.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/normalize_amr_wb.cpp
rename to media/module/codecs/amrwb/dec/src/normalize_amr_wb.cpp
diff --git a/media/codecs/amrwb/dec/src/normalize_amr_wb.h b/media/module/codecs/amrwb/dec/src/normalize_amr_wb.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/normalize_amr_wb.h
rename to media/module/codecs/amrwb/dec/src/normalize_amr_wb.h
diff --git a/media/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp b/media/module/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp
rename to media/module/codecs/amrwb/dec/src/oversamp_12k8_to_16k.cpp
diff --git a/media/codecs/amrwb/dec/src/phase_dispersion.cpp b/media/module/codecs/amrwb/dec/src/phase_dispersion.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/phase_dispersion.cpp
rename to media/module/codecs/amrwb/dec/src/phase_dispersion.cpp
diff --git a/media/codecs/amrwb/dec/src/pit_shrp.cpp b/media/module/codecs/amrwb/dec/src/pit_shrp.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/pit_shrp.cpp
rename to media/module/codecs/amrwb/dec/src/pit_shrp.cpp
diff --git a/media/codecs/amrwb/dec/src/pred_lt4.cpp b/media/module/codecs/amrwb/dec/src/pred_lt4.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/pred_lt4.cpp
rename to media/module/codecs/amrwb/dec/src/pred_lt4.cpp
diff --git a/media/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp b/media/module/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp
rename to media/module/codecs/amrwb/dec/src/preemph_amrwb_dec.cpp
diff --git a/media/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h b/media/module/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h
rename to media/module/codecs/amrwb/dec/src/pv_amr_wb_type_defs.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwb_math_op.cpp b/media/module/codecs/amrwb/dec/src/pvamrwb_math_op.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwb_math_op.cpp
rename to media/module/codecs/amrwb/dec/src/pvamrwb_math_op.cpp
diff --git a/media/codecs/amrwb/dec/src/pvamrwb_math_op.h b/media/module/codecs/amrwb/dec/src/pvamrwb_math_op.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwb_math_op.h
rename to media/module/codecs/amrwb/dec/src/pvamrwb_math_op.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder.cpp b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder.cpp
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder.cpp
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder_acelp.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_armv5.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_cequivalent.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder_basic_op_gcc_armv5.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder_cnst.h
diff --git a/media/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h b/media/module/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h
rename to media/module/codecs/amrwb/dec/src/pvamrwbdecoder_mem_funcs.h
diff --git a/media/codecs/amrwb/dec/src/q_gain2_tab.cpp b/media/module/codecs/amrwb/dec/src/q_gain2_tab.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/q_gain2_tab.cpp
rename to media/module/codecs/amrwb/dec/src/q_gain2_tab.cpp
diff --git a/media/codecs/amrwb/dec/src/q_pulse.h b/media/module/codecs/amrwb/dec/src/q_pulse.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/q_pulse.h
rename to media/module/codecs/amrwb/dec/src/q_pulse.h
diff --git a/media/codecs/amrwb/dec/src/qisf_ns.cpp b/media/module/codecs/amrwb/dec/src/qisf_ns.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/qisf_ns.cpp
rename to media/module/codecs/amrwb/dec/src/qisf_ns.cpp
diff --git a/media/codecs/amrwb/dec/src/qisf_ns.h b/media/module/codecs/amrwb/dec/src/qisf_ns.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/qisf_ns.h
rename to media/module/codecs/amrwb/dec/src/qisf_ns.h
diff --git a/media/codecs/amrwb/dec/src/qisf_ns_tab.cpp b/media/module/codecs/amrwb/dec/src/qisf_ns_tab.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/qisf_ns_tab.cpp
rename to media/module/codecs/amrwb/dec/src/qisf_ns_tab.cpp
diff --git a/media/codecs/amrwb/dec/src/qpisf_2s.cpp b/media/module/codecs/amrwb/dec/src/qpisf_2s.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/qpisf_2s.cpp
rename to media/module/codecs/amrwb/dec/src/qpisf_2s.cpp
diff --git a/media/codecs/amrwb/dec/src/qpisf_2s.h b/media/module/codecs/amrwb/dec/src/qpisf_2s.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/qpisf_2s.h
rename to media/module/codecs/amrwb/dec/src/qpisf_2s.h
diff --git a/media/codecs/amrwb/dec/src/qpisf_2s_tab.cpp b/media/module/codecs/amrwb/dec/src/qpisf_2s_tab.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/qpisf_2s_tab.cpp
rename to media/module/codecs/amrwb/dec/src/qpisf_2s_tab.cpp
diff --git a/media/codecs/amrwb/dec/src/scale_signal.cpp b/media/module/codecs/amrwb/dec/src/scale_signal.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/scale_signal.cpp
rename to media/module/codecs/amrwb/dec/src/scale_signal.cpp
diff --git a/media/codecs/amrwb/dec/src/synthesis_amr_wb.cpp b/media/module/codecs/amrwb/dec/src/synthesis_amr_wb.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/synthesis_amr_wb.cpp
rename to media/module/codecs/amrwb/dec/src/synthesis_amr_wb.cpp
diff --git a/media/codecs/amrwb/dec/src/synthesis_amr_wb.h b/media/module/codecs/amrwb/dec/src/synthesis_amr_wb.h
similarity index 100%
rename from media/codecs/amrwb/dec/src/synthesis_amr_wb.h
rename to media/module/codecs/amrwb/dec/src/synthesis_amr_wb.h
diff --git a/media/codecs/amrwb/dec/src/voice_factor.cpp b/media/module/codecs/amrwb/dec/src/voice_factor.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/voice_factor.cpp
rename to media/module/codecs/amrwb/dec/src/voice_factor.cpp
diff --git a/media/codecs/amrwb/dec/src/wb_syn_filt.cpp b/media/module/codecs/amrwb/dec/src/wb_syn_filt.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/wb_syn_filt.cpp
rename to media/module/codecs/amrwb/dec/src/wb_syn_filt.cpp
diff --git a/media/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp b/media/module/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp
rename to media/module/codecs/amrwb/dec/src/weight_amrwb_lpc.cpp
diff --git a/media/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h b/media/module/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h
similarity index 100%
rename from media/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h
rename to media/module/codecs/amrwb/dec/test/AmrwbDecTestEnvironment.h
diff --git a/media/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp b/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
similarity index 89%
rename from media/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
rename to media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
index 7221b92..2cc88ce 100644
--- a/media/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
+++ b/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
@@ -21,6 +21,7 @@
 #include <utils/Log.h>
 
 #include <audio_utils/sndfile.h>
+#include <memory>
 #include <stdio.h>
 
 #include "pvamrwbdecoder.h"
@@ -121,7 +122,7 @@
 
 TEST_F(AmrwbDecoderTest, MultiCreateAmrwbDecoderTest) {
     uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
-    void *decoderBuf = malloc(memRequirements);
+    std::unique_ptr<char[]> decoderBuf(new char[memRequirements]);
     ASSERT_NE(decoderBuf, nullptr)
             << "Failed to allocate decoder memory of size " << memRequirements;
 
@@ -129,25 +130,21 @@
     void *amrHandle = nullptr;
     int16_t *decoderCookie;
     for (int count = 0; count < kMaxCount; count++) {
-        pvDecoder_AmrWb_Init(&amrHandle, decoderBuf, &decoderCookie);
+        pvDecoder_AmrWb_Init(&amrHandle, decoderBuf.get(), &decoderCookie);
         ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
         ALOGV("Decoder created successfully");
     }
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
 }
 
 TEST_P(AmrwbDecoderTest, DecodeTest) {
     uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
-    void *decoderBuf = malloc(memRequirements);
+    std::unique_ptr<char[]> decoderBuf(new char[memRequirements]);
     ASSERT_NE(decoderBuf, nullptr)
             << "Failed to allocate decoder memory of size " << memRequirements;
 
     void *amrHandle = nullptr;
     int16_t *decoderCookie;
-    pvDecoder_AmrWb_Init(&amrHandle, decoderBuf, &decoderCookie);
+    pvDecoder_AmrWb_Init(&amrHandle, decoderBuf.get(), &decoderCookie);
     ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
 
     string inputFile = gEnv->getRes() + GetParam();
@@ -159,25 +156,21 @@
     SNDFILE *outFileHandle = openOutputFile(&sfInfo);
     ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
 
-    int32_t decoderErr = DecodeFrames(decoderCookie, decoderBuf, outFileHandle);
+    int32_t decoderErr = DecodeFrames(decoderCookie, decoderBuf.get(), outFileHandle);
     ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
 
     sf_close(outFileHandle);
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
 }
 
 TEST_P(AmrwbDecoderTest, ResetDecoderTest) {
     uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
-    void *decoderBuf = malloc(memRequirements);
+    std::unique_ptr<char[]> decoderBuf(new char[memRequirements]);
     ASSERT_NE(decoderBuf, nullptr)
             << "Failed to allocate decoder memory of size " << memRequirements;
 
     void *amrHandle = nullptr;
     int16_t *decoderCookie;
-    pvDecoder_AmrWb_Init(&amrHandle, decoderBuf, &decoderCookie);
+    pvDecoder_AmrWb_Init(&amrHandle, decoderBuf.get(), &decoderCookie);
     ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
 
     string inputFile = gEnv->getRes() + GetParam();
@@ -190,20 +183,18 @@
     ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
 
     // Decode 150 frames first
-    int32_t decoderErr = DecodeFrames(decoderCookie, decoderBuf, outFileHandle, kNumFrameReset);
+    int32_t decoderErr =
+            DecodeFrames(decoderCookie, decoderBuf.get(), outFileHandle, kNumFrameReset);
     ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
 
     // Reset Decoder
-    pvDecoder_AmrWb_Reset(decoderBuf, 1);
+    pvDecoder_AmrWb_Reset(decoderBuf.get(), 1);
 
     // Start decoding again
-    decoderErr = DecodeFrames(decoderCookie, decoderBuf, outFileHandle);
+    decoderErr = DecodeFrames(decoderCookie, decoderBuf.get(), outFileHandle);
     ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
 
     sf_close(outFileHandle);
-    if (decoderBuf) {
-        free(decoderBuf);
-    }
 }
 
 INSTANTIATE_TEST_SUITE_P(AmrwbDecoderTestAll, AmrwbDecoderTest,
diff --git a/media/codecs/amrwb/dec/test/Android.bp b/media/module/codecs/amrwb/dec/test/Android.bp
similarity index 100%
rename from media/codecs/amrwb/dec/test/Android.bp
rename to media/module/codecs/amrwb/dec/test/Android.bp
diff --git a/media/codecs/amrwb/dec/test/AndroidTest.xml b/media/module/codecs/amrwb/dec/test/AndroidTest.xml
similarity index 100%
rename from media/codecs/amrwb/dec/test/AndroidTest.xml
rename to media/module/codecs/amrwb/dec/test/AndroidTest.xml
diff --git a/media/codecs/amrwb/dec/test/README.md b/media/module/codecs/amrwb/dec/test/README.md
similarity index 100%
rename from media/codecs/amrwb/dec/test/README.md
rename to media/module/codecs/amrwb/dec/test/README.md
diff --git a/media/codecs/amrwb/dec/test/amrwbdec_test.cpp b/media/module/codecs/amrwb/dec/test/amrwbdec_test.cpp
similarity index 100%
rename from media/codecs/amrwb/dec/test/amrwbdec_test.cpp
rename to media/module/codecs/amrwb/dec/test/amrwbdec_test.cpp
diff --git a/media/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
similarity index 100%
rename from media/codecs/amrwb/enc/Android.bp
rename to media/module/codecs/amrwb/enc/Android.bp
diff --git a/media/codecs/amrwb/enc/MODULE_LICENSE_APACHE2 b/media/module/codecs/amrwb/enc/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/amrwb/enc/MODULE_LICENSE_APACHE2
rename to media/module/codecs/amrwb/enc/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/amrwb/enc/NOTICE b/media/module/codecs/amrwb/enc/NOTICE
similarity index 100%
rename from media/codecs/amrwb/enc/NOTICE
rename to media/module/codecs/amrwb/enc/NOTICE
diff --git a/media/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c b/media/module/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c
similarity index 100%
rename from media/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c
rename to media/module/codecs/amrwb/enc/SampleCode/AMRWB_E_SAMPLE.c
diff --git a/media/codecs/amrwb/enc/SampleCode/Android.bp b/media/module/codecs/amrwb/enc/SampleCode/Android.bp
similarity index 100%
rename from media/codecs/amrwb/enc/SampleCode/Android.bp
rename to media/module/codecs/amrwb/enc/SampleCode/Android.bp
diff --git a/media/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2 b/media/module/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2
rename to media/module/codecs/amrwb/enc/SampleCode/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/amrwb/enc/SampleCode/NOTICE b/media/module/codecs/amrwb/enc/SampleCode/NOTICE
similarity index 100%
rename from media/codecs/amrwb/enc/SampleCode/NOTICE
rename to media/module/codecs/amrwb/enc/SampleCode/NOTICE
diff --git a/media/codecs/amrwb/enc/TEST_MAPPING b/media/module/codecs/amrwb/enc/TEST_MAPPING
similarity index 100%
rename from media/codecs/amrwb/enc/TEST_MAPPING
rename to media/module/codecs/amrwb/enc/TEST_MAPPING
diff --git a/media/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf b/media/module/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf
similarity index 100%
rename from media/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf
rename to media/module/codecs/amrwb/enc/doc/voAMRWBEncoderSDK.pdf
Binary files differ
diff --git a/media/codecs/amrwb/enc/fuzzer/Android.bp b/media/module/codecs/amrwb/enc/fuzzer/Android.bp
similarity index 84%
rename from media/codecs/amrwb/enc/fuzzer/Android.bp
rename to media/module/codecs/amrwb/enc/fuzzer/Android.bp
index f74fa4f..c2c13e1 100644
--- a/media/codecs/amrwb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrwb/enc/fuzzer/Android.bp
@@ -48,5 +48,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_amrwbenc library",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/codecs/amrwb/enc/fuzzer/README.md b/media/module/codecs/amrwb/enc/fuzzer/README.md
similarity index 100%
rename from media/codecs/amrwb/enc/fuzzer/README.md
rename to media/module/codecs/amrwb/enc/fuzzer/README.md
diff --git a/media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp b/media/module/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp
similarity index 100%
rename from media/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp
rename to media/module/codecs/amrwb/enc/fuzzer/amrwb_enc_fuzzer.cpp
diff --git a/media/codecs/amrwb/enc/inc/acelp.h b/media/module/codecs/amrwb/enc/inc/acelp.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/acelp.h
rename to media/module/codecs/amrwb/enc/inc/acelp.h
diff --git a/media/codecs/amrwb/enc/inc/basic_op.h b/media/module/codecs/amrwb/enc/inc/basic_op.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/basic_op.h
rename to media/module/codecs/amrwb/enc/inc/basic_op.h
diff --git a/media/codecs/amrwb/enc/inc/bits.h b/media/module/codecs/amrwb/enc/inc/bits.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/bits.h
rename to media/module/codecs/amrwb/enc/inc/bits.h
diff --git a/media/codecs/amrwb/enc/inc/cnst.h b/media/module/codecs/amrwb/enc/inc/cnst.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/cnst.h
rename to media/module/codecs/amrwb/enc/inc/cnst.h
diff --git a/media/codecs/amrwb/enc/inc/cod_main.h b/media/module/codecs/amrwb/enc/inc/cod_main.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/cod_main.h
rename to media/module/codecs/amrwb/enc/inc/cod_main.h
diff --git a/media/codecs/amrwb/enc/inc/dtx.h b/media/module/codecs/amrwb/enc/inc/dtx.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/dtx.h
rename to media/module/codecs/amrwb/enc/inc/dtx.h
diff --git a/media/codecs/amrwb/enc/inc/grid100.tab b/media/module/codecs/amrwb/enc/inc/grid100.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/grid100.tab
rename to media/module/codecs/amrwb/enc/inc/grid100.tab
diff --git a/media/codecs/amrwb/enc/inc/ham_wind.tab b/media/module/codecs/amrwb/enc/inc/ham_wind.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/ham_wind.tab
rename to media/module/codecs/amrwb/enc/inc/ham_wind.tab
diff --git a/media/codecs/amrwb/enc/inc/homing.tab b/media/module/codecs/amrwb/enc/inc/homing.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/homing.tab
rename to media/module/codecs/amrwb/enc/inc/homing.tab
diff --git a/media/codecs/amrwb/enc/inc/isp_isf.tab b/media/module/codecs/amrwb/enc/inc/isp_isf.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/isp_isf.tab
rename to media/module/codecs/amrwb/enc/inc/isp_isf.tab
diff --git a/media/codecs/amrwb/enc/inc/lag_wind.tab b/media/module/codecs/amrwb/enc/inc/lag_wind.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/lag_wind.tab
rename to media/module/codecs/amrwb/enc/inc/lag_wind.tab
diff --git a/media/codecs/amrwb/enc/inc/log2.h b/media/module/codecs/amrwb/enc/inc/log2.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/log2.h
rename to media/module/codecs/amrwb/enc/inc/log2.h
diff --git a/media/codecs/amrwb/enc/inc/log2_tab.h b/media/module/codecs/amrwb/enc/inc/log2_tab.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/log2_tab.h
rename to media/module/codecs/amrwb/enc/inc/log2_tab.h
diff --git a/media/codecs/amrwb/enc/inc/main.h b/media/module/codecs/amrwb/enc/inc/main.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/main.h
rename to media/module/codecs/amrwb/enc/inc/main.h
diff --git a/media/codecs/amrwb/enc/inc/math_op.h b/media/module/codecs/amrwb/enc/inc/math_op.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/math_op.h
rename to media/module/codecs/amrwb/enc/inc/math_op.h
diff --git a/media/codecs/amrwb/enc/inc/mem_align.h b/media/module/codecs/amrwb/enc/inc/mem_align.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/mem_align.h
rename to media/module/codecs/amrwb/enc/inc/mem_align.h
diff --git a/media/codecs/amrwb/enc/inc/mime_io.tab b/media/module/codecs/amrwb/enc/inc/mime_io.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/mime_io.tab
rename to media/module/codecs/amrwb/enc/inc/mime_io.tab
diff --git a/media/codecs/amrwb/enc/inc/oper_32b.h b/media/module/codecs/amrwb/enc/inc/oper_32b.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/oper_32b.h
rename to media/module/codecs/amrwb/enc/inc/oper_32b.h
diff --git a/media/codecs/amrwb/enc/inc/p_med_o.h b/media/module/codecs/amrwb/enc/inc/p_med_o.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/p_med_o.h
rename to media/module/codecs/amrwb/enc/inc/p_med_o.h
diff --git a/media/codecs/amrwb/enc/inc/p_med_ol.tab b/media/module/codecs/amrwb/enc/inc/p_med_ol.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/p_med_ol.tab
rename to media/module/codecs/amrwb/enc/inc/p_med_ol.tab
diff --git a/media/codecs/amrwb/enc/inc/q_gain2.tab b/media/module/codecs/amrwb/enc/inc/q_gain2.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/q_gain2.tab
rename to media/module/codecs/amrwb/enc/inc/q_gain2.tab
diff --git a/media/codecs/amrwb/enc/inc/q_pulse.h b/media/module/codecs/amrwb/enc/inc/q_pulse.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/q_pulse.h
rename to media/module/codecs/amrwb/enc/inc/q_pulse.h
diff --git a/media/codecs/amrwb/enc/inc/qisf_ns.tab b/media/module/codecs/amrwb/enc/inc/qisf_ns.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/qisf_ns.tab
rename to media/module/codecs/amrwb/enc/inc/qisf_ns.tab
diff --git a/media/codecs/amrwb/enc/inc/qpisf_2s.tab b/media/module/codecs/amrwb/enc/inc/qpisf_2s.tab
similarity index 100%
rename from media/codecs/amrwb/enc/inc/qpisf_2s.tab
rename to media/module/codecs/amrwb/enc/inc/qpisf_2s.tab
diff --git a/media/codecs/amrwb/enc/inc/stream.h b/media/module/codecs/amrwb/enc/inc/stream.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/stream.h
rename to media/module/codecs/amrwb/enc/inc/stream.h
diff --git a/media/codecs/amrwb/enc/inc/typedef.h b/media/module/codecs/amrwb/enc/inc/typedef.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/typedef.h
rename to media/module/codecs/amrwb/enc/inc/typedef.h
diff --git a/media/codecs/amrwb/enc/inc/typedefs.h b/media/module/codecs/amrwb/enc/inc/typedefs.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/typedefs.h
rename to media/module/codecs/amrwb/enc/inc/typedefs.h
diff --git a/media/codecs/amrwb/enc/inc/wb_vad.h b/media/module/codecs/amrwb/enc/inc/wb_vad.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/wb_vad.h
rename to media/module/codecs/amrwb/enc/inc/wb_vad.h
diff --git a/media/codecs/amrwb/enc/inc/wb_vad_c.h b/media/module/codecs/amrwb/enc/inc/wb_vad_c.h
similarity index 100%
rename from media/codecs/amrwb/enc/inc/wb_vad_c.h
rename to media/module/codecs/amrwb/enc/inc/wb_vad_c.h
diff --git a/media/codecs/amrwb/enc/patent_disclaimer.txt b/media/module/codecs/amrwb/enc/patent_disclaimer.txt
similarity index 100%
rename from media/codecs/amrwb/enc/patent_disclaimer.txt
rename to media/module/codecs/amrwb/enc/patent_disclaimer.txt
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/Deemph_32_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/Dot_p_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/Filt_6k_7k_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/Norm_Corr_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/Syn_filt_32_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/convolve_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/cor_h_vec_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/pred_lt4_1_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/residu_asm_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/scale_sig_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s b/media/module/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV5E/syn_filt_opt.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/Deemph_32_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/Dot_p_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/Filt_6k_7k_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/Norm_Corr_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/Syn_filt_32_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/convolve_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/cor_h_vec_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/pred_lt4_1_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/residu_asm_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/scale_sig_neon.s
diff --git a/media/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s b/media/module/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s
similarity index 100%
rename from media/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s
rename to media/module/codecs/amrwb/enc/src/asm/ARMV7/syn_filt_neon.s
diff --git a/media/codecs/amrwb/enc/src/autocorr.c b/media/module/codecs/amrwb/enc/src/autocorr.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/autocorr.c
rename to media/module/codecs/amrwb/enc/src/autocorr.c
diff --git a/media/codecs/amrwb/enc/src/az_isp.c b/media/module/codecs/amrwb/enc/src/az_isp.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/az_isp.c
rename to media/module/codecs/amrwb/enc/src/az_isp.c
diff --git a/media/codecs/amrwb/enc/src/bits.c b/media/module/codecs/amrwb/enc/src/bits.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/bits.c
rename to media/module/codecs/amrwb/enc/src/bits.c
diff --git a/media/codecs/amrwb/enc/src/c2t64fx.c b/media/module/codecs/amrwb/enc/src/c2t64fx.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/c2t64fx.c
rename to media/module/codecs/amrwb/enc/src/c2t64fx.c
diff --git a/media/codecs/amrwb/enc/src/c4t64fx.c b/media/module/codecs/amrwb/enc/src/c4t64fx.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/c4t64fx.c
rename to media/module/codecs/amrwb/enc/src/c4t64fx.c
diff --git a/media/codecs/amrwb/enc/src/convolve.c b/media/module/codecs/amrwb/enc/src/convolve.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/convolve.c
rename to media/module/codecs/amrwb/enc/src/convolve.c
diff --git a/media/codecs/amrwb/enc/src/cor_h_x.c b/media/module/codecs/amrwb/enc/src/cor_h_x.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/cor_h_x.c
rename to media/module/codecs/amrwb/enc/src/cor_h_x.c
diff --git a/media/codecs/amrwb/enc/src/decim54.c b/media/module/codecs/amrwb/enc/src/decim54.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/decim54.c
rename to media/module/codecs/amrwb/enc/src/decim54.c
diff --git a/media/codecs/amrwb/enc/src/deemph.c b/media/module/codecs/amrwb/enc/src/deemph.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/deemph.c
rename to media/module/codecs/amrwb/enc/src/deemph.c
diff --git a/media/codecs/amrwb/enc/src/dtx.c b/media/module/codecs/amrwb/enc/src/dtx.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/dtx.c
rename to media/module/codecs/amrwb/enc/src/dtx.c
diff --git a/media/codecs/amrwb/enc/src/g_pitch.c b/media/module/codecs/amrwb/enc/src/g_pitch.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/g_pitch.c
rename to media/module/codecs/amrwb/enc/src/g_pitch.c
diff --git a/media/codecs/amrwb/enc/src/gpclip.c b/media/module/codecs/amrwb/enc/src/gpclip.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/gpclip.c
rename to media/module/codecs/amrwb/enc/src/gpclip.c
diff --git a/media/codecs/amrwb/enc/src/homing.c b/media/module/codecs/amrwb/enc/src/homing.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/homing.c
rename to media/module/codecs/amrwb/enc/src/homing.c
diff --git a/media/codecs/amrwb/enc/src/hp400.c b/media/module/codecs/amrwb/enc/src/hp400.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/hp400.c
rename to media/module/codecs/amrwb/enc/src/hp400.c
diff --git a/media/codecs/amrwb/enc/src/hp50.c b/media/module/codecs/amrwb/enc/src/hp50.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/hp50.c
rename to media/module/codecs/amrwb/enc/src/hp50.c
diff --git a/media/codecs/amrwb/enc/src/hp6k.c b/media/module/codecs/amrwb/enc/src/hp6k.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/hp6k.c
rename to media/module/codecs/amrwb/enc/src/hp6k.c
diff --git a/media/codecs/amrwb/enc/src/hp_wsp.c b/media/module/codecs/amrwb/enc/src/hp_wsp.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/hp_wsp.c
rename to media/module/codecs/amrwb/enc/src/hp_wsp.c
diff --git a/media/codecs/amrwb/enc/src/int_lpc.c b/media/module/codecs/amrwb/enc/src/int_lpc.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/int_lpc.c
rename to media/module/codecs/amrwb/enc/src/int_lpc.c
diff --git a/media/codecs/amrwb/enc/src/isp_az.c b/media/module/codecs/amrwb/enc/src/isp_az.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/isp_az.c
rename to media/module/codecs/amrwb/enc/src/isp_az.c
diff --git a/media/codecs/amrwb/enc/src/isp_isf.c b/media/module/codecs/amrwb/enc/src/isp_isf.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/isp_isf.c
rename to media/module/codecs/amrwb/enc/src/isp_isf.c
diff --git a/media/codecs/amrwb/enc/src/lag_wind.c b/media/module/codecs/amrwb/enc/src/lag_wind.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/lag_wind.c
rename to media/module/codecs/amrwb/enc/src/lag_wind.c
diff --git a/media/codecs/amrwb/enc/src/levinson.c b/media/module/codecs/amrwb/enc/src/levinson.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/levinson.c
rename to media/module/codecs/amrwb/enc/src/levinson.c
diff --git a/media/codecs/amrwb/enc/src/log2.c b/media/module/codecs/amrwb/enc/src/log2.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/log2.c
rename to media/module/codecs/amrwb/enc/src/log2.c
diff --git a/media/codecs/amrwb/enc/src/lp_dec2.c b/media/module/codecs/amrwb/enc/src/lp_dec2.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/lp_dec2.c
rename to media/module/codecs/amrwb/enc/src/lp_dec2.c
diff --git a/media/codecs/amrwb/enc/src/math_op.c b/media/module/codecs/amrwb/enc/src/math_op.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/math_op.c
rename to media/module/codecs/amrwb/enc/src/math_op.c
diff --git a/media/codecs/amrwb/enc/src/mem_align.c b/media/module/codecs/amrwb/enc/src/mem_align.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/mem_align.c
rename to media/module/codecs/amrwb/enc/src/mem_align.c
diff --git a/media/codecs/amrwb/enc/src/oper_32b.c b/media/module/codecs/amrwb/enc/src/oper_32b.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/oper_32b.c
rename to media/module/codecs/amrwb/enc/src/oper_32b.c
diff --git a/media/codecs/amrwb/enc/src/p_med_ol.c b/media/module/codecs/amrwb/enc/src/p_med_ol.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/p_med_ol.c
rename to media/module/codecs/amrwb/enc/src/p_med_ol.c
diff --git a/media/codecs/amrwb/enc/src/pit_shrp.c b/media/module/codecs/amrwb/enc/src/pit_shrp.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/pit_shrp.c
rename to media/module/codecs/amrwb/enc/src/pit_shrp.c
diff --git a/media/codecs/amrwb/enc/src/pitch_f4.c b/media/module/codecs/amrwb/enc/src/pitch_f4.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/pitch_f4.c
rename to media/module/codecs/amrwb/enc/src/pitch_f4.c
diff --git a/media/codecs/amrwb/enc/src/pred_lt4.c b/media/module/codecs/amrwb/enc/src/pred_lt4.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/pred_lt4.c
rename to media/module/codecs/amrwb/enc/src/pred_lt4.c
diff --git a/media/codecs/amrwb/enc/src/preemph.c b/media/module/codecs/amrwb/enc/src/preemph.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/preemph.c
rename to media/module/codecs/amrwb/enc/src/preemph.c
diff --git a/media/codecs/amrwb/enc/src/q_gain2.c b/media/module/codecs/amrwb/enc/src/q_gain2.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/q_gain2.c
rename to media/module/codecs/amrwb/enc/src/q_gain2.c
diff --git a/media/codecs/amrwb/enc/src/q_pulse.c b/media/module/codecs/amrwb/enc/src/q_pulse.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/q_pulse.c
rename to media/module/codecs/amrwb/enc/src/q_pulse.c
diff --git a/media/codecs/amrwb/enc/src/qisf_ns.c b/media/module/codecs/amrwb/enc/src/qisf_ns.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/qisf_ns.c
rename to media/module/codecs/amrwb/enc/src/qisf_ns.c
diff --git a/media/codecs/amrwb/enc/src/qpisf_2s.c b/media/module/codecs/amrwb/enc/src/qpisf_2s.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/qpisf_2s.c
rename to media/module/codecs/amrwb/enc/src/qpisf_2s.c
diff --git a/media/codecs/amrwb/enc/src/random.c b/media/module/codecs/amrwb/enc/src/random.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/random.c
rename to media/module/codecs/amrwb/enc/src/random.c
diff --git a/media/codecs/amrwb/enc/src/residu.c b/media/module/codecs/amrwb/enc/src/residu.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/residu.c
rename to media/module/codecs/amrwb/enc/src/residu.c
diff --git a/media/codecs/amrwb/enc/src/scale.c b/media/module/codecs/amrwb/enc/src/scale.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/scale.c
rename to media/module/codecs/amrwb/enc/src/scale.c
diff --git a/media/codecs/amrwb/enc/src/stream.c b/media/module/codecs/amrwb/enc/src/stream.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/stream.c
rename to media/module/codecs/amrwb/enc/src/stream.c
diff --git a/media/codecs/amrwb/enc/src/syn_filt.c b/media/module/codecs/amrwb/enc/src/syn_filt.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/syn_filt.c
rename to media/module/codecs/amrwb/enc/src/syn_filt.c
diff --git a/media/codecs/amrwb/enc/src/updt_tar.c b/media/module/codecs/amrwb/enc/src/updt_tar.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/updt_tar.c
rename to media/module/codecs/amrwb/enc/src/updt_tar.c
diff --git a/media/codecs/amrwb/enc/src/util.c b/media/module/codecs/amrwb/enc/src/util.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/util.c
rename to media/module/codecs/amrwb/enc/src/util.c
diff --git a/media/codecs/amrwb/enc/src/voAMRWBEnc.c b/media/module/codecs/amrwb/enc/src/voAMRWBEnc.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/voAMRWBEnc.c
rename to media/module/codecs/amrwb/enc/src/voAMRWBEnc.c
diff --git a/media/codecs/amrwb/enc/src/voicefac.c b/media/module/codecs/amrwb/enc/src/voicefac.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/voicefac.c
rename to media/module/codecs/amrwb/enc/src/voicefac.c
diff --git a/media/codecs/amrwb/enc/src/wb_vad.c b/media/module/codecs/amrwb/enc/src/wb_vad.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/wb_vad.c
rename to media/module/codecs/amrwb/enc/src/wb_vad.c
diff --git a/media/codecs/amrwb/enc/src/weight_a.c b/media/module/codecs/amrwb/enc/src/weight_a.c
similarity index 100%
rename from media/codecs/amrwb/enc/src/weight_a.c
rename to media/module/codecs/amrwb/enc/src/weight_a.c
diff --git a/media/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h b/media/module/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h
similarity index 100%
rename from media/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h
rename to media/module/codecs/amrwb/enc/test/AmrwbEncTestEnvironment.h
diff --git a/media/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp b/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
similarity index 100%
rename from media/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
rename to media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
diff --git a/media/codecs/amrwb/enc/test/Android.bp b/media/module/codecs/amrwb/enc/test/Android.bp
similarity index 100%
rename from media/codecs/amrwb/enc/test/Android.bp
rename to media/module/codecs/amrwb/enc/test/Android.bp
diff --git a/media/codecs/amrwb/enc/test/AndroidTest.xml b/media/module/codecs/amrwb/enc/test/AndroidTest.xml
similarity index 100%
rename from media/codecs/amrwb/enc/test/AndroidTest.xml
rename to media/module/codecs/amrwb/enc/test/AndroidTest.xml
diff --git a/media/codecs/amrwb/enc/test/README.md b/media/module/codecs/amrwb/enc/test/README.md
similarity index 100%
rename from media/codecs/amrwb/enc/test/README.md
rename to media/module/codecs/amrwb/enc/test/README.md
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/module/codecs/common/Android.bp
similarity index 100%
rename from media/libstagefright/codecs/common/Android.bp
rename to media/module/codecs/common/Android.bp
diff --git a/media/libstagefright/codecs/common/MODULE_LICENSE_APACHE2 b/media/module/codecs/common/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/codecs/common/MODULE_LICENSE_APACHE2
rename to media/module/codecs/common/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/common/NOTICE b/media/module/codecs/common/NOTICE
similarity index 100%
rename from media/libstagefright/codecs/common/NOTICE
rename to media/module/codecs/common/NOTICE
diff --git a/media/libstagefright/codecs/common/cmnMemory.c b/media/module/codecs/common/cmnMemory.c
similarity index 100%
rename from media/libstagefright/codecs/common/cmnMemory.c
rename to media/module/codecs/common/cmnMemory.c
diff --git a/media/libstagefright/codecs/common/include/cmnMemory.h b/media/module/codecs/common/include/cmnMemory.h
similarity index 100%
rename from media/libstagefright/codecs/common/include/cmnMemory.h
rename to media/module/codecs/common/include/cmnMemory.h
diff --git a/media/libstagefright/codecs/common/include/voAAC.h b/media/module/codecs/common/include/voAAC.h
similarity index 100%
rename from media/libstagefright/codecs/common/include/voAAC.h
rename to media/module/codecs/common/include/voAAC.h
diff --git a/media/libstagefright/codecs/common/include/voAMRWB.h b/media/module/codecs/common/include/voAMRWB.h
similarity index 100%
rename from media/libstagefright/codecs/common/include/voAMRWB.h
rename to media/module/codecs/common/include/voAMRWB.h
diff --git a/media/libstagefright/codecs/common/include/voAudio.h b/media/module/codecs/common/include/voAudio.h
similarity index 100%
rename from media/libstagefright/codecs/common/include/voAudio.h
rename to media/module/codecs/common/include/voAudio.h
diff --git a/media/libstagefright/codecs/common/include/voIndex.h b/media/module/codecs/common/include/voIndex.h
similarity index 100%
rename from media/libstagefright/codecs/common/include/voIndex.h
rename to media/module/codecs/common/include/voIndex.h
diff --git a/media/libstagefright/codecs/common/include/voMem.h b/media/module/codecs/common/include/voMem.h
similarity index 100%
rename from media/libstagefright/codecs/common/include/voMem.h
rename to media/module/codecs/common/include/voMem.h
diff --git a/media/libstagefright/codecs/common/include/voType.h b/media/module/codecs/common/include/voType.h
similarity index 100%
rename from media/libstagefright/codecs/common/include/voType.h
rename to media/module/codecs/common/include/voType.h
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/module/codecs/flac/dec/Android.bp
similarity index 100%
rename from media/libstagefright/flac/dec/Android.bp
rename to media/module/codecs/flac/dec/Android.bp
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/module/codecs/flac/dec/FLACDecoder.cpp
similarity index 100%
rename from media/libstagefright/flac/dec/FLACDecoder.cpp
rename to media/module/codecs/flac/dec/FLACDecoder.cpp
diff --git a/media/libstagefright/flac/dec/FLACDecoder.h b/media/module/codecs/flac/dec/FLACDecoder.h
similarity index 100%
rename from media/libstagefright/flac/dec/FLACDecoder.h
rename to media/module/codecs/flac/dec/FLACDecoder.h
diff --git a/media/libstagefright/flac/dec/MODULE_LICENSE_APACHE2 b/media/module/codecs/flac/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/flac/dec/MODULE_LICENSE_APACHE2
rename to media/module/codecs/flac/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/flac/dec/NOTICE b/media/module/codecs/flac/dec/NOTICE
similarity index 100%
rename from media/libstagefright/flac/dec/NOTICE
rename to media/module/codecs/flac/dec/NOTICE
diff --git a/media/libstagefright/flac/dec/test/Android.bp b/media/module/codecs/flac/dec/test/Android.bp
similarity index 100%
rename from media/libstagefright/flac/dec/test/Android.bp
rename to media/module/codecs/flac/dec/test/Android.bp
diff --git a/media/libstagefright/flac/dec/test/AndroidTest.xml b/media/module/codecs/flac/dec/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/flac/dec/test/AndroidTest.xml
rename to media/module/codecs/flac/dec/test/AndroidTest.xml
diff --git a/media/libstagefright/flac/dec/test/FlacDecoderTest.cpp b/media/module/codecs/flac/dec/test/FlacDecoderTest.cpp
similarity index 100%
rename from media/libstagefright/flac/dec/test/FlacDecoderTest.cpp
rename to media/module/codecs/flac/dec/test/FlacDecoderTest.cpp
diff --git a/media/libstagefright/flac/dec/test/FlacDecoderTestEnvironment.h b/media/module/codecs/flac/dec/test/FlacDecoderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/flac/dec/test/FlacDecoderTestEnvironment.h
rename to media/module/codecs/flac/dec/test/FlacDecoderTestEnvironment.h
diff --git a/media/libstagefright/flac/dec/test/README.md b/media/module/codecs/flac/dec/test/README.md
similarity index 100%
rename from media/libstagefright/flac/dec/test/README.md
rename to media/module/codecs/flac/dec/test/README.md
diff --git a/media/codecs/g711/decoder/Android.bp b/media/module/codecs/g711/decoder/Android.bp
similarity index 100%
rename from media/codecs/g711/decoder/Android.bp
rename to media/module/codecs/g711/decoder/Android.bp
diff --git a/media/codecs/g711/decoder/g711Dec.h b/media/module/codecs/g711/decoder/g711Dec.h
similarity index 100%
rename from media/codecs/g711/decoder/g711Dec.h
rename to media/module/codecs/g711/decoder/g711Dec.h
diff --git a/media/codecs/g711/decoder/g711DecAlaw.cpp b/media/module/codecs/g711/decoder/g711DecAlaw.cpp
similarity index 100%
rename from media/codecs/g711/decoder/g711DecAlaw.cpp
rename to media/module/codecs/g711/decoder/g711DecAlaw.cpp
diff --git a/media/codecs/g711/decoder/g711DecMlaw.cpp b/media/module/codecs/g711/decoder/g711DecMlaw.cpp
similarity index 100%
rename from media/codecs/g711/decoder/g711DecMlaw.cpp
rename to media/module/codecs/g711/decoder/g711DecMlaw.cpp
diff --git a/media/codecs/g711/fuzzer/Android.bp b/media/module/codecs/g711/fuzzer/Android.bp
similarity index 75%
rename from media/codecs/g711/fuzzer/Android.bp
rename to media/module/codecs/g711/fuzzer/Android.bp
index 376cce7..397fb9a 100644
--- a/media/codecs/g711/fuzzer/Android.bp
+++ b/media/module/codecs/g711/fuzzer/Android.bp
@@ -44,6 +44,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of codecs_g711dec library with a special focus on Alaw APIs",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -61,5 +69,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of codecs_g711dec library with a special focus on Mlaw APIs",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/codecs/g711/fuzzer/README.md b/media/module/codecs/g711/fuzzer/README.md
similarity index 100%
rename from media/codecs/g711/fuzzer/README.md
rename to media/module/codecs/g711/fuzzer/README.md
diff --git a/media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp b/media/module/codecs/g711/fuzzer/g711_dec_fuzzer.cpp
similarity index 100%
rename from media/codecs/g711/fuzzer/g711_dec_fuzzer.cpp
rename to media/module/codecs/g711/fuzzer/g711_dec_fuzzer.cpp
diff --git a/media/codecs/m4v_h263/TEST_MAPPING b/media/module/codecs/m4v_h263/TEST_MAPPING
similarity index 100%
rename from media/codecs/m4v_h263/TEST_MAPPING
rename to media/module/codecs/m4v_h263/TEST_MAPPING
diff --git a/media/codecs/m4v_h263/dec/Android.bp b/media/module/codecs/m4v_h263/dec/Android.bp
similarity index 100%
rename from media/codecs/m4v_h263/dec/Android.bp
rename to media/module/codecs/m4v_h263/dec/Android.bp
diff --git a/media/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2 b/media/module/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
rename to media/module/codecs/m4v_h263/dec/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/m4v_h263/dec/NOTICE b/media/module/codecs/m4v_h263/dec/NOTICE
similarity index 100%
rename from media/codecs/m4v_h263/dec/NOTICE
rename to media/module/codecs/m4v_h263/dec/NOTICE
diff --git a/media/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h b/media/module/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
rename to media/module/codecs/m4v_h263/dec/include/m4vh263_decoder_pv_types.h
diff --git a/media/codecs/m4v_h263/dec/include/mp4dec_api.h b/media/module/codecs/m4v_h263/dec/include/mp4dec_api.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/include/mp4dec_api.h
rename to media/module/codecs/m4v_h263/dec/include/mp4dec_api.h
diff --git a/media/codecs/m4v_h263/dec/include/visual_header.h b/media/module/codecs/m4v_h263/dec/include/visual_header.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/include/visual_header.h
rename to media/module/codecs/m4v_h263/dec/include/visual_header.h
diff --git a/media/codecs/m4v_h263/dec/src/bitstream.cpp b/media/module/codecs/m4v_h263/dec/src/bitstream.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/bitstream.cpp
rename to media/module/codecs/m4v_h263/dec/src/bitstream.cpp
diff --git a/media/codecs/m4v_h263/dec/src/bitstream.h b/media/module/codecs/m4v_h263/dec/src/bitstream.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/bitstream.h
rename to media/module/codecs/m4v_h263/dec/src/bitstream.h
diff --git a/media/codecs/m4v_h263/dec/src/block_idct.cpp b/media/module/codecs/m4v_h263/dec/src/block_idct.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/block_idct.cpp
rename to media/module/codecs/m4v_h263/dec/src/block_idct.cpp
diff --git a/media/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp b/media/module/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
rename to media/module/codecs/m4v_h263/dec/src/cal_dc_scaler.cpp
diff --git a/media/codecs/m4v_h263/dec/src/combined_decode.cpp b/media/module/codecs/m4v_h263/dec/src/combined_decode.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/combined_decode.cpp
rename to media/module/codecs/m4v_h263/dec/src/combined_decode.cpp
diff --git a/media/codecs/m4v_h263/dec/src/conceal.cpp b/media/module/codecs/m4v_h263/dec/src/conceal.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/conceal.cpp
rename to media/module/codecs/m4v_h263/dec/src/conceal.cpp
diff --git a/media/codecs/m4v_h263/dec/src/datapart_decode.cpp b/media/module/codecs/m4v_h263/dec/src/datapart_decode.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/datapart_decode.cpp
rename to media/module/codecs/m4v_h263/dec/src/datapart_decode.cpp
diff --git a/media/codecs/m4v_h263/dec/src/dcac_prediction.cpp b/media/module/codecs/m4v_h263/dec/src/dcac_prediction.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/dcac_prediction.cpp
rename to media/module/codecs/m4v_h263/dec/src/dcac_prediction.cpp
diff --git a/media/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp b/media/module/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
rename to media/module/codecs/m4v_h263/dec/src/dec_pred_intra_dc.cpp
diff --git a/media/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp b/media/module/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
rename to media/module/codecs/m4v_h263/dec/src/get_pred_adv_b_add.cpp
diff --git a/media/codecs/m4v_h263/dec/src/get_pred_outside.cpp b/media/module/codecs/m4v_h263/dec/src/get_pred_outside.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/get_pred_outside.cpp
rename to media/module/codecs/m4v_h263/dec/src/get_pred_outside.cpp
diff --git a/media/codecs/m4v_h263/dec/src/idct.cpp b/media/module/codecs/m4v_h263/dec/src/idct.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/idct.cpp
rename to media/module/codecs/m4v_h263/dec/src/idct.cpp
diff --git a/media/codecs/m4v_h263/dec/src/idct.h b/media/module/codecs/m4v_h263/dec/src/idct.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/idct.h
rename to media/module/codecs/m4v_h263/dec/src/idct.h
diff --git a/media/codecs/m4v_h263/dec/src/idct_vca.cpp b/media/module/codecs/m4v_h263/dec/src/idct_vca.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/idct_vca.cpp
rename to media/module/codecs/m4v_h263/dec/src/idct_vca.cpp
diff --git a/media/codecs/m4v_h263/dec/src/max_level.h b/media/module/codecs/m4v_h263/dec/src/max_level.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/max_level.h
rename to media/module/codecs/m4v_h263/dec/src/max_level.h
diff --git a/media/codecs/m4v_h263/dec/src/mb_motion_comp.cpp b/media/module/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
rename to media/module/codecs/m4v_h263/dec/src/mb_motion_comp.cpp
diff --git a/media/codecs/m4v_h263/dec/src/mb_utils.cpp b/media/module/codecs/m4v_h263/dec/src/mb_utils.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/mb_utils.cpp
rename to media/module/codecs/m4v_h263/dec/src/mb_utils.cpp
diff --git a/media/codecs/m4v_h263/dec/src/mbtype_mode.h b/media/module/codecs/m4v_h263/dec/src/mbtype_mode.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/mbtype_mode.h
rename to media/module/codecs/m4v_h263/dec/src/mbtype_mode.h
diff --git a/media/codecs/m4v_h263/dec/src/motion_comp.h b/media/module/codecs/m4v_h263/dec/src/motion_comp.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/motion_comp.h
rename to media/module/codecs/m4v_h263/dec/src/motion_comp.h
diff --git a/media/codecs/m4v_h263/dec/src/mp4dec_lib.h b/media/module/codecs/m4v_h263/dec/src/mp4dec_lib.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/mp4dec_lib.h
rename to media/module/codecs/m4v_h263/dec/src/mp4dec_lib.h
diff --git a/media/codecs/m4v_h263/dec/src/mp4def.h b/media/module/codecs/m4v_h263/dec/src/mp4def.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/mp4def.h
rename to media/module/codecs/m4v_h263/dec/src/mp4def.h
diff --git a/media/codecs/m4v_h263/dec/src/mp4lib_int.h b/media/module/codecs/m4v_h263/dec/src/mp4lib_int.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/mp4lib_int.h
rename to media/module/codecs/m4v_h263/dec/src/mp4lib_int.h
diff --git a/media/codecs/m4v_h263/dec/src/packet_util.cpp b/media/module/codecs/m4v_h263/dec/src/packet_util.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/packet_util.cpp
rename to media/module/codecs/m4v_h263/dec/src/packet_util.cpp
diff --git a/media/codecs/m4v_h263/dec/src/post_filter.cpp b/media/module/codecs/m4v_h263/dec/src/post_filter.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/post_filter.cpp
rename to media/module/codecs/m4v_h263/dec/src/post_filter.cpp
diff --git a/media/codecs/m4v_h263/dec/src/post_proc.h b/media/module/codecs/m4v_h263/dec/src/post_proc.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/post_proc.h
rename to media/module/codecs/m4v_h263/dec/src/post_proc.h
diff --git a/media/codecs/m4v_h263/dec/src/pvdec_api.cpp b/media/module/codecs/m4v_h263/dec/src/pvdec_api.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/pvdec_api.cpp
rename to media/module/codecs/m4v_h263/dec/src/pvdec_api.cpp
diff --git a/media/codecs/m4v_h263/dec/src/scaling.h b/media/module/codecs/m4v_h263/dec/src/scaling.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/scaling.h
rename to media/module/codecs/m4v_h263/dec/src/scaling.h
diff --git a/media/codecs/m4v_h263/dec/src/scaling_tab.cpp b/media/module/codecs/m4v_h263/dec/src/scaling_tab.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/scaling_tab.cpp
rename to media/module/codecs/m4v_h263/dec/src/scaling_tab.cpp
diff --git a/media/codecs/m4v_h263/dec/src/vlc_dec_tab.h b/media/module/codecs/m4v_h263/dec/src/vlc_dec_tab.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/vlc_dec_tab.h
rename to media/module/codecs/m4v_h263/dec/src/vlc_dec_tab.h
diff --git a/media/codecs/m4v_h263/dec/src/vlc_decode.cpp b/media/module/codecs/m4v_h263/dec/src/vlc_decode.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/vlc_decode.cpp
rename to media/module/codecs/m4v_h263/dec/src/vlc_decode.cpp
diff --git a/media/codecs/m4v_h263/dec/src/vlc_decode.h b/media/module/codecs/m4v_h263/dec/src/vlc_decode.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/vlc_decode.h
rename to media/module/codecs/m4v_h263/dec/src/vlc_decode.h
diff --git a/media/codecs/m4v_h263/dec/src/vlc_dequant.cpp b/media/module/codecs/m4v_h263/dec/src/vlc_dequant.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/vlc_dequant.cpp
rename to media/module/codecs/m4v_h263/dec/src/vlc_dequant.cpp
diff --git a/media/codecs/m4v_h263/dec/src/vlc_tab.cpp b/media/module/codecs/m4v_h263/dec/src/vlc_tab.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/vlc_tab.cpp
rename to media/module/codecs/m4v_h263/dec/src/vlc_tab.cpp
diff --git a/media/codecs/m4v_h263/dec/src/vop.cpp b/media/module/codecs/m4v_h263/dec/src/vop.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/vop.cpp
rename to media/module/codecs/m4v_h263/dec/src/vop.cpp
diff --git a/media/codecs/m4v_h263/dec/src/zigzag.h b/media/module/codecs/m4v_h263/dec/src/zigzag.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/zigzag.h
rename to media/module/codecs/m4v_h263/dec/src/zigzag.h
diff --git a/media/codecs/m4v_h263/dec/src/zigzag_tab.cpp b/media/module/codecs/m4v_h263/dec/src/zigzag_tab.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/src/zigzag_tab.cpp
rename to media/module/codecs/m4v_h263/dec/src/zigzag_tab.cpp
diff --git a/media/codecs/m4v_h263/dec/test/Android.bp b/media/module/codecs/m4v_h263/dec/test/Android.bp
similarity index 100%
rename from media/codecs/m4v_h263/dec/test/Android.bp
rename to media/module/codecs/m4v_h263/dec/test/Android.bp
diff --git a/media/codecs/m4v_h263/dec/test/AndroidTest.xml b/media/module/codecs/m4v_h263/dec/test/AndroidTest.xml
similarity index 100%
rename from media/codecs/m4v_h263/dec/test/AndroidTest.xml
rename to media/module/codecs/m4v_h263/dec/test/AndroidTest.xml
diff --git a/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp b/media/module/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
similarity index 100%
rename from media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
rename to media/module/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTest.cpp
diff --git a/media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h b/media/module/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
similarity index 100%
rename from media/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
rename to media/module/codecs/m4v_h263/dec/test/Mpeg4H263DecoderTestEnvironment.h
diff --git a/media/codecs/m4v_h263/dec/test/README.md b/media/module/codecs/m4v_h263/dec/test/README.md
similarity index 100%
rename from media/codecs/m4v_h263/dec/test/README.md
rename to media/module/codecs/m4v_h263/dec/test/README.md
diff --git a/media/codecs/m4v_h263/enc/Android.bp b/media/module/codecs/m4v_h263/enc/Android.bp
similarity index 100%
rename from media/codecs/m4v_h263/enc/Android.bp
rename to media/module/codecs/m4v_h263/enc/Android.bp
diff --git a/media/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2 b/media/module/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
rename to media/module/codecs/m4v_h263/enc/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/m4v_h263/enc/NOTICE b/media/module/codecs/m4v_h263/enc/NOTICE
similarity index 100%
rename from media/codecs/m4v_h263/enc/NOTICE
rename to media/module/codecs/m4v_h263/enc/NOTICE
diff --git a/media/codecs/m4v_h263/enc/include/cvei.h b/media/module/codecs/m4v_h263/enc/include/cvei.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/include/cvei.h
rename to media/module/codecs/m4v_h263/enc/include/cvei.h
diff --git a/media/codecs/m4v_h263/enc/include/mp4enc_api.h b/media/module/codecs/m4v_h263/enc/include/mp4enc_api.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/include/mp4enc_api.h
rename to media/module/codecs/m4v_h263/enc/include/mp4enc_api.h
diff --git a/media/codecs/m4v_h263/enc/src/bitstream_io.cpp b/media/module/codecs/m4v_h263/enc/src/bitstream_io.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/bitstream_io.cpp
rename to media/module/codecs/m4v_h263/enc/src/bitstream_io.cpp
diff --git a/media/codecs/m4v_h263/enc/src/bitstream_io.h b/media/module/codecs/m4v_h263/enc/src/bitstream_io.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/bitstream_io.h
rename to media/module/codecs/m4v_h263/enc/src/bitstream_io.h
diff --git a/media/codecs/m4v_h263/enc/src/combined_encode.cpp b/media/module/codecs/m4v_h263/enc/src/combined_encode.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/combined_encode.cpp
rename to media/module/codecs/m4v_h263/enc/src/combined_encode.cpp
diff --git a/media/codecs/m4v_h263/enc/src/datapart_encode.cpp b/media/module/codecs/m4v_h263/enc/src/datapart_encode.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/datapart_encode.cpp
rename to media/module/codecs/m4v_h263/enc/src/datapart_encode.cpp
diff --git a/media/codecs/m4v_h263/enc/src/dct.cpp b/media/module/codecs/m4v_h263/enc/src/dct.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/dct.cpp
rename to media/module/codecs/m4v_h263/enc/src/dct.cpp
diff --git a/media/codecs/m4v_h263/enc/src/dct.h b/media/module/codecs/m4v_h263/enc/src/dct.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/dct.h
rename to media/module/codecs/m4v_h263/enc/src/dct.h
diff --git a/media/codecs/m4v_h263/enc/src/dct_inline.h b/media/module/codecs/m4v_h263/enc/src/dct_inline.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/dct_inline.h
rename to media/module/codecs/m4v_h263/enc/src/dct_inline.h
diff --git a/media/codecs/m4v_h263/enc/src/fastcodemb.cpp b/media/module/codecs/m4v_h263/enc/src/fastcodemb.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/fastcodemb.cpp
rename to media/module/codecs/m4v_h263/enc/src/fastcodemb.cpp
diff --git a/media/codecs/m4v_h263/enc/src/fastcodemb.h b/media/module/codecs/m4v_h263/enc/src/fastcodemb.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/fastcodemb.h
rename to media/module/codecs/m4v_h263/enc/src/fastcodemb.h
diff --git a/media/codecs/m4v_h263/enc/src/fastidct.cpp b/media/module/codecs/m4v_h263/enc/src/fastidct.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/fastidct.cpp
rename to media/module/codecs/m4v_h263/enc/src/fastidct.cpp
diff --git a/media/codecs/m4v_h263/enc/src/fastquant.cpp b/media/module/codecs/m4v_h263/enc/src/fastquant.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/fastquant.cpp
rename to media/module/codecs/m4v_h263/enc/src/fastquant.cpp
diff --git a/media/codecs/m4v_h263/enc/src/fastquant_inline.h b/media/module/codecs/m4v_h263/enc/src/fastquant_inline.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/fastquant_inline.h
rename to media/module/codecs/m4v_h263/enc/src/fastquant_inline.h
diff --git a/media/codecs/m4v_h263/enc/src/findhalfpel.cpp b/media/module/codecs/m4v_h263/enc/src/findhalfpel.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/findhalfpel.cpp
rename to media/module/codecs/m4v_h263/enc/src/findhalfpel.cpp
diff --git a/media/codecs/m4v_h263/enc/src/m4venc_oscl.h b/media/module/codecs/m4v_h263/enc/src/m4venc_oscl.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/m4venc_oscl.h
rename to media/module/codecs/m4v_h263/enc/src/m4venc_oscl.h
diff --git a/media/codecs/m4v_h263/enc/src/me_utils.cpp b/media/module/codecs/m4v_h263/enc/src/me_utils.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/me_utils.cpp
rename to media/module/codecs/m4v_h263/enc/src/me_utils.cpp
diff --git a/media/codecs/m4v_h263/enc/src/motion_comp.cpp b/media/module/codecs/m4v_h263/enc/src/motion_comp.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/motion_comp.cpp
rename to media/module/codecs/m4v_h263/enc/src/motion_comp.cpp
diff --git a/media/codecs/m4v_h263/enc/src/motion_est.cpp b/media/module/codecs/m4v_h263/enc/src/motion_est.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/motion_est.cpp
rename to media/module/codecs/m4v_h263/enc/src/motion_est.cpp
diff --git a/media/codecs/m4v_h263/enc/src/mp4def.h b/media/module/codecs/m4v_h263/enc/src/mp4def.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/mp4def.h
rename to media/module/codecs/m4v_h263/enc/src/mp4def.h
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/module/codecs/m4v_h263/enc/src/mp4enc_api.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
rename to media/module/codecs/m4v_h263/enc/src/mp4enc_api.cpp
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_lib.h b/media/module/codecs/m4v_h263/enc/src/mp4enc_lib.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/mp4enc_lib.h
rename to media/module/codecs/m4v_h263/enc/src/mp4enc_lib.h
diff --git a/media/codecs/m4v_h263/enc/src/mp4lib_int.h b/media/module/codecs/m4v_h263/enc/src/mp4lib_int.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/mp4lib_int.h
rename to media/module/codecs/m4v_h263/enc/src/mp4lib_int.h
diff --git a/media/codecs/m4v_h263/enc/src/rate_control.cpp b/media/module/codecs/m4v_h263/enc/src/rate_control.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/rate_control.cpp
rename to media/module/codecs/m4v_h263/enc/src/rate_control.cpp
diff --git a/media/codecs/m4v_h263/enc/src/rate_control.h b/media/module/codecs/m4v_h263/enc/src/rate_control.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/rate_control.h
rename to media/module/codecs/m4v_h263/enc/src/rate_control.h
diff --git a/media/codecs/m4v_h263/enc/src/sad.cpp b/media/module/codecs/m4v_h263/enc/src/sad.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/sad.cpp
rename to media/module/codecs/m4v_h263/enc/src/sad.cpp
diff --git a/media/codecs/m4v_h263/enc/src/sad_halfpel.cpp b/media/module/codecs/m4v_h263/enc/src/sad_halfpel.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/sad_halfpel.cpp
rename to media/module/codecs/m4v_h263/enc/src/sad_halfpel.cpp
diff --git a/media/codecs/m4v_h263/enc/src/sad_halfpel_inline.h b/media/module/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
rename to media/module/codecs/m4v_h263/enc/src/sad_halfpel_inline.h
diff --git a/media/codecs/m4v_h263/enc/src/sad_inline.h b/media/module/codecs/m4v_h263/enc/src/sad_inline.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/sad_inline.h
rename to media/module/codecs/m4v_h263/enc/src/sad_inline.h
diff --git a/media/codecs/m4v_h263/enc/src/sad_mb_offset.h b/media/module/codecs/m4v_h263/enc/src/sad_mb_offset.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/sad_mb_offset.h
rename to media/module/codecs/m4v_h263/enc/src/sad_mb_offset.h
diff --git a/media/codecs/m4v_h263/enc/src/vlc_enc_tab.h b/media/module/codecs/m4v_h263/enc/src/vlc_enc_tab.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/vlc_enc_tab.h
rename to media/module/codecs/m4v_h263/enc/src/vlc_enc_tab.h
diff --git a/media/codecs/m4v_h263/enc/src/vlc_encode.cpp b/media/module/codecs/m4v_h263/enc/src/vlc_encode.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/vlc_encode.cpp
rename to media/module/codecs/m4v_h263/enc/src/vlc_encode.cpp
diff --git a/media/codecs/m4v_h263/enc/src/vlc_encode.h b/media/module/codecs/m4v_h263/enc/src/vlc_encode.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/vlc_encode.h
rename to media/module/codecs/m4v_h263/enc/src/vlc_encode.h
diff --git a/media/codecs/m4v_h263/enc/src/vlc_encode_inline.h b/media/module/codecs/m4v_h263/enc/src/vlc_encode_inline.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/vlc_encode_inline.h
rename to media/module/codecs/m4v_h263/enc/src/vlc_encode_inline.h
diff --git a/media/codecs/m4v_h263/enc/src/vop.cpp b/media/module/codecs/m4v_h263/enc/src/vop.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/src/vop.cpp
rename to media/module/codecs/m4v_h263/enc/src/vop.cpp
diff --git a/media/codecs/m4v_h263/enc/test/Android.bp b/media/module/codecs/m4v_h263/enc/test/Android.bp
similarity index 100%
rename from media/codecs/m4v_h263/enc/test/Android.bp
rename to media/module/codecs/m4v_h263/enc/test/Android.bp
diff --git a/media/codecs/m4v_h263/enc/test/AndroidTest.xml b/media/module/codecs/m4v_h263/enc/test/AndroidTest.xml
similarity index 100%
rename from media/codecs/m4v_h263/enc/test/AndroidTest.xml
rename to media/module/codecs/m4v_h263/enc/test/AndroidTest.xml
diff --git a/media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp b/media/module/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
rename to media/module/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTest.cpp
diff --git a/media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h b/media/module/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
similarity index 100%
rename from media/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
rename to media/module/codecs/m4v_h263/enc/test/Mpeg4H263EncoderTestEnvironment.h
diff --git a/media/codecs/m4v_h263/enc/test/README.md b/media/module/codecs/m4v_h263/enc/test/README.md
similarity index 100%
rename from media/codecs/m4v_h263/enc/test/README.md
rename to media/module/codecs/m4v_h263/enc/test/README.md
diff --git a/media/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp b/media/module/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
similarity index 100%
rename from media/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
rename to media/module/codecs/m4v_h263/enc/test/m4v_h263_enc_test.cpp
diff --git a/media/codecs/m4v_h263/fuzzer/Android.bp b/media/module/codecs/m4v_h263/fuzzer/Android.bp
similarity index 81%
rename from media/codecs/m4v_h263/fuzzer/Android.bp
rename to media/module/codecs/m4v_h263/fuzzer/Android.bp
index a052c11..4d0ed18 100644
--- a/media/codecs/m4v_h263/fuzzer/Android.bp
+++ b/media/module/codecs/m4v_h263/fuzzer/Android.bp
@@ -50,6 +50,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers target the APIs of libstagefright_m4vh263dec library",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -98,6 +106,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers target the APIs of libstagefright_m4vh263enc library",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
diff --git a/media/codecs/m4v_h263/fuzzer/README.md b/media/module/codecs/m4v_h263/fuzzer/README.md
similarity index 100%
rename from media/codecs/m4v_h263/fuzzer/README.md
rename to media/module/codecs/m4v_h263/fuzzer/README.md
diff --git a/media/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict b/media/module/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
similarity index 100%
rename from media/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
rename to media/module/codecs/m4v_h263/fuzzer/h263_dec_fuzzer.dict
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict b/media/module/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
similarity index 100%
rename from media/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
rename to media/module/codecs/m4v_h263/fuzzer/mpeg4_dec_fuzzer.dict
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp b/media/module/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
similarity index 100%
rename from media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
rename to media/module/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp b/media/module/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
similarity index 100%
rename from media/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
rename to media/module/codecs/m4v_h263/fuzzer/mpeg4_h263_enc_fuzzer.cpp
diff --git a/media/codecs/m4v_h263/patent_disclaimer.txt b/media/module/codecs/m4v_h263/patent_disclaimer.txt
similarity index 100%
rename from media/codecs/m4v_h263/patent_disclaimer.txt
rename to media/module/codecs/m4v_h263/patent_disclaimer.txt
diff --git a/media/codecs/mp3dec/Android.bp b/media/module/codecs/mp3dec/Android.bp
similarity index 100%
rename from media/codecs/mp3dec/Android.bp
rename to media/module/codecs/mp3dec/Android.bp
diff --git a/media/codecs/mp3dec/MODULE_LICENSE_APACHE2 b/media/module/codecs/mp3dec/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/codecs/mp3dec/MODULE_LICENSE_APACHE2
rename to media/module/codecs/mp3dec/MODULE_LICENSE_APACHE2
diff --git a/media/codecs/mp3dec/NOTICE b/media/module/codecs/mp3dec/NOTICE
similarity index 100%
rename from media/codecs/mp3dec/NOTICE
rename to media/module/codecs/mp3dec/NOTICE
diff --git a/media/codecs/mp3dec/TEST_MAPPING b/media/module/codecs/mp3dec/TEST_MAPPING
similarity index 100%
rename from media/codecs/mp3dec/TEST_MAPPING
rename to media/module/codecs/mp3dec/TEST_MAPPING
diff --git a/media/codecs/mp3dec/fuzzer/Android.bp b/media/module/codecs/mp3dec/fuzzer/Android.bp
similarity index 85%
rename from media/codecs/mp3dec/fuzzer/Android.bp
rename to media/module/codecs/mp3dec/fuzzer/Android.bp
index 514a8a8..c5e0b1f 100644
--- a/media/codecs/mp3dec/fuzzer/Android.bp
+++ b/media/module/codecs/mp3dec/fuzzer/Android.bp
@@ -44,5 +44,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libstagefright_mp3dec",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/codecs/mp3dec/fuzzer/README.md b/media/module/codecs/mp3dec/fuzzer/README.md
similarity index 100%
rename from media/codecs/mp3dec/fuzzer/README.md
rename to media/module/codecs/mp3dec/fuzzer/README.md
diff --git a/media/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp b/media/module/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
similarity index 100%
rename from media/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
rename to media/module/codecs/mp3dec/fuzzer/mp3_dec_fuzzer.cpp
diff --git a/media/codecs/mp3dec/include/mp3_decoder_selection.h b/media/module/codecs/mp3dec/include/mp3_decoder_selection.h
similarity index 100%
rename from media/codecs/mp3dec/include/mp3_decoder_selection.h
rename to media/module/codecs/mp3dec/include/mp3_decoder_selection.h
diff --git a/media/codecs/mp3dec/include/pvmp3_audio_type_defs.h b/media/module/codecs/mp3dec/include/pvmp3_audio_type_defs.h
similarity index 100%
rename from media/codecs/mp3dec/include/pvmp3_audio_type_defs.h
rename to media/module/codecs/mp3dec/include/pvmp3_audio_type_defs.h
diff --git a/media/codecs/mp3dec/include/pvmp3decoder_api.h b/media/module/codecs/mp3dec/include/pvmp3decoder_api.h
similarity index 100%
rename from media/codecs/mp3dec/include/pvmp3decoder_api.h
rename to media/module/codecs/mp3dec/include/pvmp3decoder_api.h
diff --git a/media/codecs/mp3dec/patent_disclaimer.txt b/media/module/codecs/mp3dec/patent_disclaimer.txt
similarity index 100%
rename from media/codecs/mp3dec/patent_disclaimer.txt
rename to media/module/codecs/mp3dec/patent_disclaimer.txt
diff --git a/media/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s b/media/module/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
similarity index 100%
rename from media/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
rename to media/module/codecs/mp3dec/src/asm/pvmp3_dct_16_gcc.s
diff --git a/media/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s b/media/module/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
similarity index 100%
rename from media/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
rename to media/module/codecs/mp3dec/src/asm/pvmp3_dct_9_gcc.s
diff --git a/media/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s b/media/module/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
similarity index 100%
rename from media/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
rename to media/module/codecs/mp3dec/src/asm/pvmp3_mdct_18_gcc.s
diff --git a/media/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s b/media/module/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
similarity index 100%
rename from media/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
rename to media/module/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
diff --git a/media/codecs/mp3dec/src/mp3_mem_funcs.h b/media/module/codecs/mp3dec/src/mp3_mem_funcs.h
similarity index 100%
rename from media/codecs/mp3dec/src/mp3_mem_funcs.h
rename to media/module/codecs/mp3dec/src/mp3_mem_funcs.h
diff --git a/media/codecs/mp3dec/src/pv_mp3_huffman.h b/media/module/codecs/mp3dec/src/pv_mp3_huffman.h
similarity index 100%
rename from media/codecs/mp3dec/src/pv_mp3_huffman.h
rename to media/module/codecs/mp3dec/src/pv_mp3_huffman.h
diff --git a/media/codecs/mp3dec/src/pv_mp3dec_fxd_op.h b/media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
similarity index 100%
rename from media/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
rename to media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op.h
diff --git a/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h b/media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
similarity index 100%
rename from media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
rename to media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm.h
diff --git a/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h b/media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
similarity index 100%
rename from media/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
rename to media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_arm_gcc.h
diff --git a/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h b/media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
similarity index 100%
rename from media/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
rename to media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_c_equivalent.h
diff --git a/media/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h b/media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
similarity index 100%
rename from media/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
rename to media/module/codecs/mp3dec/src/pv_mp3dec_fxd_op_msc_evc.h
diff --git a/media/codecs/mp3dec/src/pvmp3_alias_reduction.cpp b/media/module/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_alias_reduction.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_alias_reduction.h b/media/module/codecs/mp3dec/src/pvmp3_alias_reduction.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_alias_reduction.h
rename to media/module/codecs/mp3dec/src/pvmp3_alias_reduction.h
diff --git a/media/codecs/mp3dec/src/pvmp3_crc.cpp b/media/module/codecs/mp3dec/src/pvmp3_crc.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_crc.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_crc.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_crc.h b/media/module/codecs/mp3dec/src/pvmp3_crc.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_crc.h
rename to media/module/codecs/mp3dec/src/pvmp3_crc.h
diff --git a/media/codecs/mp3dec/src/pvmp3_dct_16.cpp b/media/module/codecs/mp3dec/src/pvmp3_dct_16.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_dct_16.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_dct_16.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_dct_16.h b/media/module/codecs/mp3dec/src/pvmp3_dct_16.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_dct_16.h
rename to media/module/codecs/mp3dec/src/pvmp3_dct_16.h
diff --git a/media/codecs/mp3dec/src/pvmp3_dct_6.cpp b/media/module/codecs/mp3dec/src/pvmp3_dct_6.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_dct_6.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_dct_6.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_dct_9.cpp b/media/module/codecs/mp3dec/src/pvmp3_dct_9.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_dct_9.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_dct_9.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_dec_defs.h b/media/module/codecs/mp3dec/src/pvmp3_dec_defs.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_dec_defs.h
rename to media/module/codecs/mp3dec/src/pvmp3_dec_defs.h
diff --git a/media/codecs/mp3dec/src/pvmp3_decode_header.cpp b/media/module/codecs/mp3dec/src/pvmp3_decode_header.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_decode_header.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_decode_header.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_decode_header.h b/media/module/codecs/mp3dec/src/pvmp3_decode_header.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_decode_header.h
rename to media/module/codecs/mp3dec/src/pvmp3_decode_header.h
diff --git a/media/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp b/media/module/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_decode_huff_cw.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_decode_huff_cw.h b/media/module/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
rename to media/module/codecs/mp3dec/src/pvmp3_decode_huff_cw.h
diff --git a/media/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp b/media/module/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_dequantize_sample.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_dequantize_sample.h b/media/module/codecs/mp3dec/src/pvmp3_dequantize_sample.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_dequantize_sample.h
rename to media/module/codecs/mp3dec/src/pvmp3_dequantize_sample.h
diff --git a/media/codecs/mp3dec/src/pvmp3_equalizer.cpp b/media/module/codecs/mp3dec/src/pvmp3_equalizer.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_equalizer.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_equalizer.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_equalizer.h b/media/module/codecs/mp3dec/src/pvmp3_equalizer.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_equalizer.h
rename to media/module/codecs/mp3dec/src/pvmp3_equalizer.h
diff --git a/media/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_framedecoder.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_framedecoder.h b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_framedecoder.h
rename to media/module/codecs/mp3dec/src/pvmp3_framedecoder.h
diff --git a/media/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp b/media/module/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_get_main_data_size.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_get_main_data_size.h b/media/module/codecs/mp3dec/src/pvmp3_get_main_data_size.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_get_main_data_size.h
rename to media/module/codecs/mp3dec/src/pvmp3_get_main_data_size.h
diff --git a/media/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp b/media/module/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_get_scale_factors.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_get_scale_factors.h b/media/module/codecs/mp3dec/src/pvmp3_get_scale_factors.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_get_scale_factors.h
rename to media/module/codecs/mp3dec/src/pvmp3_get_scale_factors.h
diff --git a/media/codecs/mp3dec/src/pvmp3_get_side_info.cpp b/media/module/codecs/mp3dec/src/pvmp3_get_side_info.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_get_side_info.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_get_side_info.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_get_side_info.h b/media/module/codecs/mp3dec/src/pvmp3_get_side_info.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_get_side_info.h
rename to media/module/codecs/mp3dec/src/pvmp3_get_side_info.h
diff --git a/media/codecs/mp3dec/src/pvmp3_getbits.cpp b/media/module/codecs/mp3dec/src/pvmp3_getbits.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_getbits.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_getbits.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_getbits.h b/media/module/codecs/mp3dec/src/pvmp3_getbits.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_getbits.h
rename to media/module/codecs/mp3dec/src/pvmp3_getbits.h
diff --git a/media/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp b/media/module/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_huffman_decoding.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp b/media/module/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_huffman_parsing.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_imdct_synth.cpp b/media/module/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_imdct_synth.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_imdct_synth.h b/media/module/codecs/mp3dec/src/pvmp3_imdct_synth.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_imdct_synth.h
rename to media/module/codecs/mp3dec/src/pvmp3_imdct_synth.h
diff --git a/media/codecs/mp3dec/src/pvmp3_mdct_18.cpp b/media/module/codecs/mp3dec/src/pvmp3_mdct_18.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mdct_18.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_mdct_18.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_mdct_18.h b/media/module/codecs/mp3dec/src/pvmp3_mdct_18.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mdct_18.h
rename to media/module/codecs/mp3dec/src/pvmp3_mdct_18.h
diff --git a/media/codecs/mp3dec/src/pvmp3_mdct_6.cpp b/media/module/codecs/mp3dec/src/pvmp3_mdct_6.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mdct_6.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_mdct_6.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_mdct_6.h b/media/module/codecs/mp3dec/src/pvmp3_mdct_6.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mdct_6.h
rename to media/module/codecs/mp3dec/src/pvmp3_mdct_6.h
diff --git a/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp b/media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h b/media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
rename to media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.h
diff --git a/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp b/media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h b/media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
rename to media/module/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_factors.h
diff --git a/media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp b/media/module/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h b/media/module/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
rename to media/module/codecs/mp3dec/src/pvmp3_mpeg2_stereo_proc.h
diff --git a/media/codecs/mp3dec/src/pvmp3_normalize.cpp b/media/module/codecs/mp3dec/src/pvmp3_normalize.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_normalize.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_normalize.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_normalize.h b/media/module/codecs/mp3dec/src/pvmp3_normalize.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_normalize.h
rename to media/module/codecs/mp3dec/src/pvmp3_normalize.h
diff --git a/media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp b/media/module/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h b/media/module/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
rename to media/module/codecs/mp3dec/src/pvmp3_poly_phase_synthesis.h
diff --git a/media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp b/media/module/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_polyphase_filter_window.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h b/media/module/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
rename to media/module/codecs/mp3dec/src/pvmp3_polyphase_filter_window.h
diff --git a/media/codecs/mp3dec/src/pvmp3_reorder.cpp b/media/module/codecs/mp3dec/src/pvmp3_reorder.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_reorder.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_reorder.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_reorder.h b/media/module/codecs/mp3dec/src/pvmp3_reorder.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_reorder.h
rename to media/module/codecs/mp3dec/src/pvmp3_reorder.h
diff --git a/media/codecs/mp3dec/src/pvmp3_seek_synch.cpp b/media/module/codecs/mp3dec/src/pvmp3_seek_synch.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_seek_synch.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_seek_synch.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_seek_synch.h b/media/module/codecs/mp3dec/src/pvmp3_seek_synch.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_seek_synch.h
rename to media/module/codecs/mp3dec/src/pvmp3_seek_synch.h
diff --git a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp b/media/module/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_stereo_proc.h b/media/module/codecs/mp3dec/src/pvmp3_stereo_proc.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_stereo_proc.h
rename to media/module/codecs/mp3dec/src/pvmp3_stereo_proc.h
diff --git a/media/codecs/mp3dec/src/pvmp3_tables.cpp b/media/module/codecs/mp3dec/src/pvmp3_tables.cpp
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_tables.cpp
rename to media/module/codecs/mp3dec/src/pvmp3_tables.cpp
diff --git a/media/codecs/mp3dec/src/pvmp3_tables.h b/media/module/codecs/mp3dec/src/pvmp3_tables.h
similarity index 100%
rename from media/codecs/mp3dec/src/pvmp3_tables.h
rename to media/module/codecs/mp3dec/src/pvmp3_tables.h
diff --git a/media/codecs/mp3dec/src/s_huffcodetab.h b/media/module/codecs/mp3dec/src/s_huffcodetab.h
similarity index 100%
rename from media/codecs/mp3dec/src/s_huffcodetab.h
rename to media/module/codecs/mp3dec/src/s_huffcodetab.h
diff --git a/media/codecs/mp3dec/src/s_mp3bits.h b/media/module/codecs/mp3dec/src/s_mp3bits.h
similarity index 100%
rename from media/codecs/mp3dec/src/s_mp3bits.h
rename to media/module/codecs/mp3dec/src/s_mp3bits.h
diff --git a/media/codecs/mp3dec/src/s_tmp3dec_chan.h b/media/module/codecs/mp3dec/src/s_tmp3dec_chan.h
similarity index 100%
rename from media/codecs/mp3dec/src/s_tmp3dec_chan.h
rename to media/module/codecs/mp3dec/src/s_tmp3dec_chan.h
diff --git a/media/codecs/mp3dec/src/s_tmp3dec_file.h b/media/module/codecs/mp3dec/src/s_tmp3dec_file.h
similarity index 100%
rename from media/codecs/mp3dec/src/s_tmp3dec_file.h
rename to media/module/codecs/mp3dec/src/s_tmp3dec_file.h
diff --git a/media/codecs/mp3dec/test/Android.bp b/media/module/codecs/mp3dec/test/Android.bp
similarity index 100%
rename from media/codecs/mp3dec/test/Android.bp
rename to media/module/codecs/mp3dec/test/Android.bp
diff --git a/media/codecs/mp3dec/test/AndroidTest.xml b/media/module/codecs/mp3dec/test/AndroidTest.xml
similarity index 100%
rename from media/codecs/mp3dec/test/AndroidTest.xml
rename to media/module/codecs/mp3dec/test/AndroidTest.xml
diff --git a/media/codecs/mp3dec/test/Mp3DecoderTest.cpp b/media/module/codecs/mp3dec/test/Mp3DecoderTest.cpp
similarity index 89%
rename from media/codecs/mp3dec/test/Mp3DecoderTest.cpp
rename to media/module/codecs/mp3dec/test/Mp3DecoderTest.cpp
index 91326a8..88e9eae 100644
--- a/media/codecs/mp3dec/test/Mp3DecoderTest.cpp
+++ b/media/module/codecs/mp3dec/test/Mp3DecoderTest.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <audio_utils/sndfile.h>
+#include <memory>
 #include <stdio.h>
 
 #include "mp3reader.h"
@@ -99,27 +100,23 @@
 TEST_F(Mp3DecoderTest, MultiCreateMp3DecoderTest) {
     size_t memRequirements = pvmp3_decoderMemRequirements();
     ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
-    void *decoderBuf = malloc(memRequirements);
+    unique_ptr<char[]> decoderBuf(new char[memRequirements]);
     ASSERT_NE(decoderBuf, nullptr)
             << "Failed to allocate decoder memory of size " << memRequirements;
     for (int count = 0; count < kMaxCount; count++) {
-        pvmp3_InitDecoder(mConfig, decoderBuf);
+        pvmp3_InitDecoder(mConfig, (void*)decoderBuf.get());
         ALOGV("Decoder created successfully");
     }
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
 }
 
 TEST_P(Mp3DecoderTest, DecodeTest) {
     size_t memRequirements = pvmp3_decoderMemRequirements();
     ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
-    void *decoderBuf = malloc(memRequirements);
+    unique_ptr<char[]> decoderBuf(new char[memRequirements]);
     ASSERT_NE(decoderBuf, nullptr)
             << "Failed to allocate decoder memory of size " << memRequirements;
 
-    pvmp3_InitDecoder(mConfig, decoderBuf);
+    pvmp3_InitDecoder(mConfig, (void*)decoderBuf.get());
     ALOGV("Decoder created successfully");
     string inputFile = gEnv->getRes() + GetParam();
     bool status = mMp3Reader.init(inputFile.c_str());
@@ -130,27 +127,23 @@
     SNDFILE *outFileHandle = openOutputFile(&sfInfo);
     ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
 
-    ERROR_CODE decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
+    ERROR_CODE decoderErr = DecodeFrames((void*)decoderBuf.get(), outFileHandle, sfInfo);
     ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
     ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
     ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
 
     mMp3Reader.close();
     sf_close(outFileHandle);
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
 }
 
 TEST_P(Mp3DecoderTest, ResetDecoderTest) {
     size_t memRequirements = pvmp3_decoderMemRequirements();
     ASSERT_NE(memRequirements, 0) << "Failed to get the memory requirement size";
-    void *decoderBuf = malloc(memRequirements);
+    unique_ptr<char[]> decoderBuf(new char[memRequirements]);
     ASSERT_NE(decoderBuf, nullptr)
             << "Failed to allocate decoder memory of size " << memRequirements;
 
-    pvmp3_InitDecoder(mConfig, decoderBuf);
+    pvmp3_InitDecoder(mConfig, (void*)decoderBuf.get());
     ALOGV("Decoder created successfully.");
     string inputFile = gEnv->getRes() + GetParam();
     bool status = mMp3Reader.init(inputFile.c_str());
@@ -162,24 +155,20 @@
     ASSERT_NE(outFileHandle, nullptr) << "Error opening output file for writing decoded output";
 
     ERROR_CODE decoderErr;
-    decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo, kNumFrameReset);
+    decoderErr = DecodeFrames((void*)decoderBuf.get(), outFileHandle, sfInfo, kNumFrameReset);
     ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
     ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
     ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
 
-    pvmp3_resetDecoder(decoderBuf);
+    pvmp3_resetDecoder((void*)decoderBuf.get());
     // Decode the same file.
-    decoderErr = DecodeFrames(decoderBuf, outFileHandle, sfInfo);
+    decoderErr = DecodeFrames((void*)decoderBuf.get(), outFileHandle, sfInfo);
     ASSERT_EQ(decoderErr, NO_DECODING_ERROR) << "Failed to decode the frames";
     ASSERT_EQ(sfInfo.channels, mConfig->num_channels) << "Number of channels does not match";
     ASSERT_EQ(sfInfo.samplerate, mConfig->samplingRate) << "Sample rate does not match";
 
     mMp3Reader.close();
     sf_close(outFileHandle);
-    if (decoderBuf) {
-        free(decoderBuf);
-        decoderBuf = nullptr;
-    }
 }
 
 INSTANTIATE_TEST_SUITE_P(Mp3DecoderTestAll, Mp3DecoderTest,
diff --git a/media/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h b/media/module/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
similarity index 100%
rename from media/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
rename to media/module/codecs/mp3dec/test/Mp3DecoderTestEnvironment.h
diff --git a/media/codecs/mp3dec/test/README.md b/media/module/codecs/mp3dec/test/README.md
similarity index 100%
rename from media/codecs/mp3dec/test/README.md
rename to media/module/codecs/mp3dec/test/README.md
diff --git a/media/codecs/mp3dec/test/mp3dec_test.cpp b/media/module/codecs/mp3dec/test/mp3dec_test.cpp
similarity index 100%
rename from media/codecs/mp3dec/test/mp3dec_test.cpp
rename to media/module/codecs/mp3dec/test/mp3dec_test.cpp
diff --git a/media/codecs/mp3dec/test/mp3reader.cpp b/media/module/codecs/mp3dec/test/mp3reader.cpp
similarity index 100%
rename from media/codecs/mp3dec/test/mp3reader.cpp
rename to media/module/codecs/mp3dec/test/mp3reader.cpp
diff --git a/media/codecs/mp3dec/test/mp3reader.h b/media/module/codecs/mp3dec/test/mp3reader.h
similarity index 100%
rename from media/codecs/mp3dec/test/mp3reader.h
rename to media/module/codecs/mp3dec/test/mp3reader.h
diff --git a/services/mediacodec/registrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
similarity index 95%
rename from services/mediacodec/registrant/Android.bp
rename to media/module/codecserviceregistrant/Android.bp
index 12cc32a..f3a1723 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -4,7 +4,6 @@
     // all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
 }
 
 cc_library {
@@ -62,6 +61,7 @@
         "libcodec2_soft_vp9dec",
         // "libcodec2_soft_av1dec_aom",  // replaced by the gav1 implementation
         "libcodec2_soft_av1dec_gav1",
+        "libcodec2_soft_av1enc",
         "libcodec2_soft_vp8enc",
         "libcodec2_soft_vp9enc",
         "libcodec2_soft_rawdec",
diff --git a/services/mediacodec/registrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
similarity index 100%
rename from services/mediacodec/registrant/CodecServiceRegistrant.cpp
rename to media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
diff --git a/services/mediacodec/registrant/fuzzer/Android.bp b/media/module/codecserviceregistrant/fuzzer/Android.bp
similarity index 81%
rename from services/mediacodec/registrant/fuzzer/Android.bp
rename to media/module/codecserviceregistrant/fuzzer/Android.bp
index 43afbf1..1cb8c2b 100644
--- a/services/mediacodec/registrant/fuzzer/Android.bp
+++ b/media/module/codecserviceregistrant/fuzzer/Android.bp
@@ -20,7 +20,6 @@
     // all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
 }
 
 cc_fuzz {
@@ -42,5 +41,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmedia_codecserviceregistrant",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/services/mediacodec/registrant/fuzzer/README.md b/media/module/codecserviceregistrant/fuzzer/README.md
similarity index 100%
rename from services/mediacodec/registrant/fuzzer/README.md
rename to media/module/codecserviceregistrant/fuzzer/README.md
diff --git a/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
similarity index 100%
rename from services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
rename to media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
diff --git a/media/module/esds/Android.bp b/media/module/esds/Android.bp
new file mode 100644
index 0000000..272d4d7
--- /dev/null
+++ b/media/module/esds/Android.bp
@@ -0,0 +1,44 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_static {
+    name: "libstagefright_esds",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+
+    export_include_dirs: ["include"],
+
+    local_include_dirs: ["include"],
+
+    srcs: ["ESDS.cpp"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+    shared_libs: [
+        "libstagefright_foundation",
+        "libutils"
+    ],
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libstagefright/ESDS.cpp b/media/module/esds/ESDS.cpp
similarity index 99%
rename from media/libstagefright/ESDS.cpp
rename to media/module/esds/ESDS.cpp
index ea059e8..906250b 100644
--- a/media/libstagefright/ESDS.cpp
+++ b/media/module/esds/ESDS.cpp
@@ -20,7 +20,7 @@
 
 #include <media/stagefright/foundation/ByteUtils.h>
 
-#include "include/ESDS.h"
+#include <media/esds/ESDS.h>
 
 #include <string.h>
 
diff --git a/media/module/esds/TEST_MAPPING b/media/module/esds/TEST_MAPPING
new file mode 100644
index 0000000..9368b6d
--- /dev/null
+++ b/media/module/esds/TEST_MAPPING
@@ -0,0 +1,9 @@
+// mappings for frameworks/av/media/module/esds
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "ESDSTest" }
+  ]
+}
diff --git a/media/libstagefright/include/ESDS.h b/media/module/esds/include/media/esds/ESDS.h
similarity index 100%
rename from media/libstagefright/include/ESDS.h
rename to media/module/esds/include/media/esds/ESDS.h
diff --git a/media/libstagefright/tests/ESDS/Android.bp b/media/module/esds/tests/Android.bp
similarity index 93%
rename from media/libstagefright/tests/ESDS/Android.bp
rename to media/module/esds/tests/Android.bp
index 04e9b29..aea611e 100644
--- a/media/libstagefright/tests/ESDS/Android.bp
+++ b/media/module/esds/tests/Android.bp
@@ -20,9 +20,6 @@
     // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_tests_license",
-    ],
 }
 
 cc_test {
diff --git a/media/libstagefright/tests/ESDS/AndroidTest.xml b/media/module/esds/tests/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/tests/ESDS/AndroidTest.xml
rename to media/module/esds/tests/AndroidTest.xml
diff --git a/media/libstagefright/tests/ESDS/ESDSTest.cpp b/media/module/esds/tests/ESDSTest.cpp
similarity index 96%
rename from media/libstagefright/tests/ESDS/ESDSTest.cpp
rename to media/module/esds/tests/ESDSTest.cpp
index 101e00c..33bdcac 100644
--- a/media/libstagefright/tests/ESDS/ESDSTest.cpp
+++ b/media/module/esds/tests/ESDSTest.cpp
@@ -21,8 +21,9 @@
 #include <stdio.h>
 #include <string.h>
 #include <fstream>
+#include <memory>
 
-#include <ESDS.h>
+#include <media/esds/ESDS.h>
 #include <binder/ProcessState.h>
 #include <datasource/FileSource.h>
 #include <media/stagefright/MediaExtractorFactory.h>
@@ -121,18 +122,16 @@
 };
 
 TEST_P(ESDSUnitTest, InvalidDataTest) {
-    void *invalidData = calloc(mESDSSize, 1);
+    std::unique_ptr<char[]> invalidData(new char[mESDSSize]());
     ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
-    ESDS esds(invalidData, mESDSSize);
-    free(invalidData);
+    ESDS esds((void*)invalidData.get(), mESDSSize);
     ASSERT_NE(esds.InitCheck(), OK) << "invalid ESDS data accepted";
 }
 
 TEST(ESDSSanityUnitTest, ConstructorSanityTest) {
-    void *invalidData = malloc(1);
+    std::unique_ptr<char[]> invalidData(new char[1]());
     ASSERT_NE(invalidData, nullptr) << "Unable to allocate memory";
-    ESDS esds_zero(invalidData, 0);
-    free(invalidData);
+    ESDS esds_zero((void*)invalidData.get(), 0);
     ASSERT_NE(esds_zero.InitCheck(), OK) << "invalid ESDS data accepted";
 
     ESDS esds_null(NULL, 0);
diff --git a/media/libstagefright/tests/ESDS/ESDSTestEnvironment.h b/media/module/esds/tests/ESDSTestEnvironment.h
similarity index 100%
rename from media/libstagefright/tests/ESDS/ESDSTestEnvironment.h
rename to media/module/esds/tests/ESDSTestEnvironment.h
diff --git a/media/libstagefright/tests/ESDS/README.md b/media/module/esds/tests/README.md
similarity index 100%
rename from media/libstagefright/tests/ESDS/README.md
rename to media/module/esds/tests/README.md
diff --git a/media/extractors/Android.bp b/media/module/extractors/Android.bp
similarity index 100%
rename from media/extractors/Android.bp
rename to media/module/extractors/Android.bp
diff --git a/media/extractors/TEST_MAPPING b/media/module/extractors/TEST_MAPPING
similarity index 100%
rename from media/extractors/TEST_MAPPING
rename to media/module/extractors/TEST_MAPPING
diff --git a/media/extractors/aac/AACExtractor.cpp b/media/module/extractors/aac/AACExtractor.cpp
similarity index 98%
rename from media/extractors/aac/AACExtractor.cpp
rename to media/module/extractors/aac/AACExtractor.cpp
index 2fc4584..a44fb61 100644
--- a/media/extractors/aac/AACExtractor.cpp
+++ b/media/module/extractors/aac/AACExtractor.cpp
@@ -310,9 +310,9 @@
         return AMEDIA_ERROR_END_OF_STREAM;
     }
 
-    MediaBufferHelper *buffer;
+    MediaBufferHelper *buffer = nullptr;
     status_t err = mBufferGroup->acquire_buffer(&buffer);
-    if (err != OK) {
+    if (err != OK || buffer == nullptr) {
         return AMEDIA_ERROR_UNKNOWN;
     }
 
diff --git a/media/extractors/aac/Android.bp b/media/module/extractors/aac/Android.bp
similarity index 100%
rename from media/extractors/aac/Android.bp
rename to media/module/extractors/aac/Android.bp
diff --git a/media/extractors/aac/MODULE_LICENSE_APACHE2 b/media/module/extractors/aac/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/aac/MODULE_LICENSE_APACHE2
rename to media/module/extractors/aac/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/aac/NOTICE b/media/module/extractors/aac/NOTICE
similarity index 100%
rename from media/extractors/aac/NOTICE
rename to media/module/extractors/aac/NOTICE
diff --git a/media/extractors/aac/exports.lds b/media/module/extractors/aac/exports.lds
similarity index 100%
rename from media/extractors/aac/exports.lds
rename to media/module/extractors/aac/exports.lds
diff --git a/media/extractors/aac/include/AACExtractor.h b/media/module/extractors/aac/include/AACExtractor.h
similarity index 100%
rename from media/extractors/aac/include/AACExtractor.h
rename to media/module/extractors/aac/include/AACExtractor.h
diff --git a/media/extractors/amr/AMRExtractor.cpp b/media/module/extractors/amr/AMRExtractor.cpp
similarity index 98%
rename from media/extractors/amr/AMRExtractor.cpp
rename to media/module/extractors/amr/AMRExtractor.cpp
index e26ff0a..b0f69ce 100644
--- a/media/extractors/amr/AMRExtractor.cpp
+++ b/media/module/extractors/amr/AMRExtractor.cpp
@@ -341,9 +341,9 @@
         return AMEDIA_ERROR_MALFORMED;
     }
 
-    MediaBufferHelper *buffer;
+    MediaBufferHelper *buffer = nullptr;
     status_t err = mBufferGroup->acquire_buffer(&buffer);
-    if (err != OK) {
+    if (err != OK || buffer == nullptr) {
         return AMEDIA_ERROR_UNKNOWN;
     }
 
diff --git a/media/extractors/amr/Android.bp b/media/module/extractors/amr/Android.bp
similarity index 100%
rename from media/extractors/amr/Android.bp
rename to media/module/extractors/amr/Android.bp
diff --git a/media/extractors/amr/MODULE_LICENSE_APACHE2 b/media/module/extractors/amr/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/amr/MODULE_LICENSE_APACHE2
rename to media/module/extractors/amr/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/amr/NOTICE b/media/module/extractors/amr/NOTICE
similarity index 100%
rename from media/extractors/amr/NOTICE
rename to media/module/extractors/amr/NOTICE
diff --git a/media/extractors/amr/exports.lds b/media/module/extractors/amr/exports.lds
similarity index 100%
rename from media/extractors/amr/exports.lds
rename to media/module/extractors/amr/exports.lds
diff --git a/media/extractors/amr/include/AMRExtractor.h b/media/module/extractors/amr/include/AMRExtractor.h
similarity index 100%
rename from media/extractors/amr/include/AMRExtractor.h
rename to media/module/extractors/amr/include/AMRExtractor.h
diff --git a/media/extractors/flac/Android.bp b/media/module/extractors/flac/Android.bp
similarity index 100%
rename from media/extractors/flac/Android.bp
rename to media/module/extractors/flac/Android.bp
diff --git a/media/extractors/flac/FLACExtractor.cpp b/media/module/extractors/flac/FLACExtractor.cpp
similarity index 99%
rename from media/extractors/flac/FLACExtractor.cpp
rename to media/module/extractors/flac/FLACExtractor.cpp
index ec7cb24..2434e41 100644
--- a/media/extractors/flac/FLACExtractor.cpp
+++ b/media/module/extractors/flac/FLACExtractor.cpp
@@ -614,9 +614,9 @@
     }
     // acquire a media buffer
     CHECK(mGroup != NULL);
-    MediaBufferHelper *buffer;
+    MediaBufferHelper *buffer = nullptr;
     status_t err = mGroup->acquire_buffer(&buffer);
-    if (err != OK) {
+    if (err != OK || buffer == nullptr) {
         return NULL;
     }
     const size_t bufferSize = blocksize * getChannels() * getOutputSampleSize();
diff --git a/media/extractors/flac/MODULE_LICENSE_APACHE2 b/media/module/extractors/flac/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/flac/MODULE_LICENSE_APACHE2
rename to media/module/extractors/flac/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/flac/NOTICE b/media/module/extractors/flac/NOTICE
similarity index 100%
rename from media/extractors/flac/NOTICE
rename to media/module/extractors/flac/NOTICE
diff --git a/media/extractors/flac/exports.lds b/media/module/extractors/flac/exports.lds
similarity index 100%
rename from media/extractors/flac/exports.lds
rename to media/module/extractors/flac/exports.lds
diff --git a/media/extractors/flac/include/FLACExtractor.h b/media/module/extractors/flac/include/FLACExtractor.h
similarity index 100%
rename from media/extractors/flac/include/FLACExtractor.h
rename to media/module/extractors/flac/include/FLACExtractor.h
diff --git a/media/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
similarity index 95%
rename from media/extractors/fuzzers/Android.bp
rename to media/module/extractors/fuzzers/Android.bp
index 490e195..91ca7b1 100644
--- a/media/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -72,6 +72,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzers targets the APIs of all the various extractors",
+        vector: "remote",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -173,7 +181,7 @@
     ],
 
     static_libs: [
-        "libwebm",
+        "libwebm_mkvparser",
         "libstagefright_flacdec",
         "libstagefright_metadatautils",
         "libmkvextractor",
diff --git a/media/extractors/fuzzers/ExtractorFuzzerBase.cpp b/media/module/extractors/fuzzers/ExtractorFuzzerBase.cpp
similarity index 100%
rename from media/extractors/fuzzers/ExtractorFuzzerBase.cpp
rename to media/module/extractors/fuzzers/ExtractorFuzzerBase.cpp
diff --git a/media/extractors/fuzzers/README.md b/media/module/extractors/fuzzers/README.md
similarity index 100%
rename from media/extractors/fuzzers/README.md
rename to media/module/extractors/fuzzers/README.md
diff --git a/media/extractors/fuzzers/aac_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/aac_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/aac_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/aac_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/amr_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/amr_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/amr_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/amr_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/amr_extractor_fuzzer.dict b/media/module/extractors/fuzzers/amr_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/amr_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/amr_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/flac_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/flac_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/flac_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/flac_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/flac_extractor_fuzzer.dict b/media/module/extractors/fuzzers/flac_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/flac_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/flac_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/include/ExtractorFuzzerBase.h b/media/module/extractors/fuzzers/include/ExtractorFuzzerBase.h
similarity index 100%
rename from media/extractors/fuzzers/include/ExtractorFuzzerBase.h
rename to media/module/extractors/fuzzers/include/ExtractorFuzzerBase.h
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/midi_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/midi_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/midi_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/midi_extractor_fuzzer.dict b/media/module/extractors/fuzzers/midi_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/midi_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/midi_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/mkv_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/mkv_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/mkv_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/mkv_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/mkv_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mkv_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/mkv_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/mkv_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/mp3_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/mp3_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/mp3_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/mp3_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/mp4_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/mp4_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/mp4_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/mp4_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/mp4_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/mpeg2_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/mpeg2ps_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/mpeg2ts_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/ogg_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/ogg_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/ogg_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/ogg_extractor_fuzzer.cpp
diff --git a/media/extractors/fuzzers/ogg_extractor_fuzzer.dict b/media/module/extractors/fuzzers/ogg_extractor_fuzzer.dict
similarity index 100%
rename from media/extractors/fuzzers/ogg_extractor_fuzzer.dict
rename to media/module/extractors/fuzzers/ogg_extractor_fuzzer.dict
diff --git a/media/extractors/fuzzers/wav_extractor_fuzzer.cpp b/media/module/extractors/fuzzers/wav_extractor_fuzzer.cpp
similarity index 100%
rename from media/extractors/fuzzers/wav_extractor_fuzzer.cpp
rename to media/module/extractors/fuzzers/wav_extractor_fuzzer.cpp
diff --git a/media/extractors/midi/Android.bp b/media/module/extractors/midi/Android.bp
similarity index 100%
rename from media/extractors/midi/Android.bp
rename to media/module/extractors/midi/Android.bp
diff --git a/media/extractors/midi/MODULE_LICENSE_APACHE2 b/media/module/extractors/midi/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/midi/MODULE_LICENSE_APACHE2
rename to media/module/extractors/midi/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/midi/MidiExtractor.cpp b/media/module/extractors/midi/MidiExtractor.cpp
similarity index 98%
rename from media/extractors/midi/MidiExtractor.cpp
rename to media/module/extractors/midi/MidiExtractor.cpp
index d0efb2f..167cc40 100644
--- a/media/extractors/midi/MidiExtractor.cpp
+++ b/media/module/extractors/midi/MidiExtractor.cpp
@@ -240,9 +240,9 @@
     if ((state == EAS_STATE_STOPPED) || (state == EAS_STATE_ERROR)) {
         return NULL;
     }
-    MediaBufferHelper *buffer;
+    MediaBufferHelper *buffer = nullptr;
     status_t err = mGroup->acquire_buffer(&buffer);
-    if (err != OK) {
+    if (err != OK || buffer == nullptr) {
         ALOGE("readBuffer: no buffer");
         return NULL;
     }
diff --git a/media/extractors/midi/NOTICE b/media/module/extractors/midi/NOTICE
similarity index 100%
rename from media/extractors/midi/NOTICE
rename to media/module/extractors/midi/NOTICE
diff --git a/media/extractors/midi/exports.lds b/media/module/extractors/midi/exports.lds
similarity index 100%
rename from media/extractors/midi/exports.lds
rename to media/module/extractors/midi/exports.lds
diff --git a/media/extractors/midi/include/MidiExtractor.h b/media/module/extractors/midi/include/MidiExtractor.h
similarity index 100%
rename from media/extractors/midi/include/MidiExtractor.h
rename to media/module/extractors/midi/include/MidiExtractor.h
diff --git a/media/extractors/mkv/Android.bp b/media/module/extractors/mkv/Android.bp
similarity index 96%
rename from media/extractors/mkv/Android.bp
rename to media/module/extractors/mkv/Android.bp
index 98ce305..c4b67eb 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/module/extractors/mkv/Android.bp
@@ -33,7 +33,7 @@
         "libstagefright_foundation_colorutils_ndk",   // for mainline-safe ColorUtils
         "libstagefright_foundation",
         "libstagefright_metadatautils",
-        "libwebm",
+        "libwebm_mkvparser",
         "libutils",
     ],
 
diff --git a/media/extractors/mkv/MODULE_LICENSE_APACHE2 b/media/module/extractors/mkv/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/mkv/MODULE_LICENSE_APACHE2
rename to media/module/extractors/mkv/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
similarity index 99%
rename from media/extractors/mkv/MatroskaExtractor.cpp
rename to media/module/extractors/mkv/MatroskaExtractor.cpp
index 443e26c..2b72387 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -790,6 +790,7 @@
     int64_t timeUs = mBlockIter.blockTimeUs();
 
     for (int i = 0; i < block->GetFrameCount(); ++i) {
+        status_t err;
         MatroskaExtractor::TrackInfo *trackInfo = &mExtractor->mTracks.editItemAt(mTrackIndex);
         const mkvparser::Block::Frame &frame = block->GetFrame(i);
         size_t len = frame.len;
@@ -798,8 +799,13 @@
         }
 
         len += trackInfo->mHeaderLen;
-        MediaBufferHelper *mbuf;
-        mBufferGroup->acquire_buffer(&mbuf, false /* nonblocking */, len /* requested size */);
+        MediaBufferHelper *mbuf = nullptr;
+        err = mBufferGroup->acquire_buffer(&mbuf, false /* nonblocking */,
+                                           len /* requested size */);
+        if (err != OK || mbuf == nullptr) {
+            ALOGE("readBlock: no buffer");
+            return AMEDIA_ERROR_UNKNOWN;
+        }
         mbuf->set_range(0, len);
         uint8_t *data = static_cast<uint8_t *>(mbuf->data());
         if (trackInfo->mHeader) {
@@ -832,7 +838,7 @@
             }
         }
 
-        status_t err = frame.Read(mExtractor->mReader, data + trackInfo->mHeaderLen);
+        err = frame.Read(mExtractor->mReader, data + trackInfo->mHeaderLen);
         if (err == OK
                 && mExtractor->mIsWebm
                 && trackInfo->mEncrypted) {
diff --git a/media/extractors/mkv/NOTICE b/media/module/extractors/mkv/NOTICE
similarity index 100%
rename from media/extractors/mkv/NOTICE
rename to media/module/extractors/mkv/NOTICE
diff --git a/media/extractors/mkv/exports.lds b/media/module/extractors/mkv/exports.lds
similarity index 100%
rename from media/extractors/mkv/exports.lds
rename to media/module/extractors/mkv/exports.lds
diff --git a/media/extractors/mkv/include/MatroskaExtractor.h b/media/module/extractors/mkv/include/MatroskaExtractor.h
similarity index 100%
rename from media/extractors/mkv/include/MatroskaExtractor.h
rename to media/module/extractors/mkv/include/MatroskaExtractor.h
diff --git a/media/extractors/mp3/Android.bp b/media/module/extractors/mp3/Android.bp
similarity index 100%
rename from media/extractors/mp3/Android.bp
rename to media/module/extractors/mp3/Android.bp
diff --git a/media/extractors/mp3/MP3Extractor.cpp b/media/module/extractors/mp3/MP3Extractor.cpp
similarity index 99%
rename from media/extractors/mp3/MP3Extractor.cpp
rename to media/module/extractors/mp3/MP3Extractor.cpp
index 248a39c..328b790 100644
--- a/media/extractors/mp3/MP3Extractor.cpp
+++ b/media/module/extractors/mp3/MP3Extractor.cpp
@@ -521,9 +521,9 @@
         mSamplesRead = 0;
     }
 
-    MediaBufferHelper *buffer;
+    MediaBufferHelper *buffer = nullptr;
     status_t err = mBufferGroup->acquire_buffer(&buffer);
-    if (err != OK) {
+    if (err != OK || buffer == nullptr) {
         return AMEDIA_ERROR_UNKNOWN;
     }
 
diff --git a/media/extractors/mp3/VBRISeeker.cpp b/media/module/extractors/mp3/VBRISeeker.cpp
similarity index 100%
rename from media/extractors/mp3/VBRISeeker.cpp
rename to media/module/extractors/mp3/VBRISeeker.cpp
diff --git a/media/extractors/mp3/XINGSeeker.cpp b/media/module/extractors/mp3/XINGSeeker.cpp
similarity index 100%
rename from media/extractors/mp3/XINGSeeker.cpp
rename to media/module/extractors/mp3/XINGSeeker.cpp
diff --git a/media/extractors/mp3/exports.lds b/media/module/extractors/mp3/exports.lds
similarity index 100%
rename from media/extractors/mp3/exports.lds
rename to media/module/extractors/mp3/exports.lds
diff --git a/media/extractors/mp3/include/MP3Extractor.h b/media/module/extractors/mp3/include/MP3Extractor.h
similarity index 100%
rename from media/extractors/mp3/include/MP3Extractor.h
rename to media/module/extractors/mp3/include/MP3Extractor.h
diff --git a/media/extractors/mp3/include/MP3Seeker.h b/media/module/extractors/mp3/include/MP3Seeker.h
similarity index 100%
rename from media/extractors/mp3/include/MP3Seeker.h
rename to media/module/extractors/mp3/include/MP3Seeker.h
diff --git a/media/extractors/mp3/include/VBRISeeker.h b/media/module/extractors/mp3/include/VBRISeeker.h
similarity index 100%
rename from media/extractors/mp3/include/VBRISeeker.h
rename to media/module/extractors/mp3/include/VBRISeeker.h
diff --git a/media/extractors/mp3/include/XINGSeeker.h b/media/module/extractors/mp3/include/XINGSeeker.h
similarity index 100%
rename from media/extractors/mp3/include/XINGSeeker.h
rename to media/module/extractors/mp3/include/XINGSeeker.h
diff --git a/media/extractors/mp4/AC4Parser.cpp b/media/module/extractors/mp4/AC4Parser.cpp
similarity index 100%
rename from media/extractors/mp4/AC4Parser.cpp
rename to media/module/extractors/mp4/AC4Parser.cpp
diff --git a/media/extractors/mp4/Android.bp b/media/module/extractors/mp4/Android.bp
similarity index 100%
rename from media/extractors/mp4/Android.bp
rename to media/module/extractors/mp4/Android.bp
diff --git a/media/extractors/mp4/ItemTable.cpp b/media/module/extractors/mp4/ItemTable.cpp
similarity index 100%
rename from media/extractors/mp4/ItemTable.cpp
rename to media/module/extractors/mp4/ItemTable.cpp
diff --git a/media/extractors/mp4/MODULE_LICENSE_APACHE2 b/media/module/extractors/mp4/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/mp4/MODULE_LICENSE_APACHE2
rename to media/module/extractors/mp4/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
similarity index 98%
rename from media/extractors/mp4/MPEG4Extractor.cpp
rename to media/module/extractors/mp4/MPEG4Extractor.cpp
index 7f97ddc..1d88785 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -34,7 +34,7 @@
 #include "SampleTable.h"
 #include "ItemTable.h"
 
-#include <ESDS.h>
+#include <media/esds/ESDS.h>
 #include <ID3.h>
 #include <media/stagefright/DataSourceBase.h>
 #include <media/ExtractorUtils.h>
@@ -394,6 +394,15 @@
             return MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1;
         case FOURCC("mhm1"):
             return MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1;
+        case FOURCC("dtsc"):
+            return MEDIA_MIMETYPE_AUDIO_DTS;
+        case FOURCC("dtse"):
+        case FOURCC("dtsh"):
+            return MEDIA_MIMETYPE_AUDIO_DTS_HD;
+        case FOURCC("dtsl"):
+            return MEDIA_MIMETYPE_AUDIO_DTS_HD_MA;
+        case FOURCC("dtsx"):
+            return MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2;
         default:
             ALOGW("Unknown fourcc: %c%c%c%c",
                    (fourcc >> 24) & 0xff,
@@ -1804,6 +1813,11 @@
         case 0x6D730055: // "ms U" mp3 audio
         case FOURCC("mha1"):
         case FOURCC("mhm1"):
+        case FOURCC("dtsc"):
+        case FOURCC("dtse"):
+        case FOURCC("dtsh"):
+        case FOURCC("dtsl"):
+        case FOURCC("dtsx"):
         {
             if (mIsQT && depth >= 1 && mPath[depth - 1] == FOURCC("wave")) {
 
@@ -1969,26 +1983,8 @@
             }
 
             if (chunk_type == FOURCC("fLaC")) {
-
-                // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
-                // 4 for mime, 4 for blockType and BlockLen, 34 for metadata
-                uint8_t flacInfo[4 + 4 + 34];
-                // skipping dFla, version
-                data_offset += sizeof(buffer) + 12;
-                size_t flacOffset = 4;
-                // Add flaC header mime type to CSD
-                strncpy((char *)flacInfo, "fLaC", 4);
-                if (mDataSource->readAt(
-                        data_offset, flacInfo + flacOffset, sizeof(flacInfo) - flacOffset) <
-                        (ssize_t)sizeof(flacInfo) - flacOffset) {
-                    return ERROR_IO;
-                }
-                data_offset += sizeof(flacInfo) - flacOffset;
-
-                AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
-                                       sizeof(flacInfo));
+                data_offset += sizeof(buffer);
                 *offset = data_offset;
-                CHECK_EQ(*offset, stop_offset);
             }
 
             while (*offset < stop_offset) {
@@ -2521,6 +2517,35 @@
             break;
         }
 
+        case FOURCC("dfLa"):
+        {
+            *offset += chunk_size;
+
+            // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
+            // 4 for mediaType, 4 for blockType and BlockLen, 34 for metadata
+            uint8_t flacInfo[4 + 4 + 34];
+
+            if (chunk_data_size != sizeof(flacInfo)) {
+                return ERROR_MALFORMED;
+            }
+
+            data_offset += 4;
+            size_t flacOffset = 4;
+            // Add flaC header mediaType to CSD
+            strncpy((char *)flacInfo, "fLaC", 4);
+
+            ssize_t bytesToRead = sizeof(flacInfo) - flacOffset;
+            if (mDataSource->readAt(
+                    data_offset, flacInfo + flacOffset, bytesToRead) < bytesToRead) {
+                return ERROR_IO;
+            }
+
+            data_offset += bytesToRead;
+            AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
+                                    sizeof(flacInfo));
+            break;
+        }
+
         case FOURCC("avcC"):
         {
             *offset += chunk_size;
@@ -5897,12 +5922,18 @@
             return -EINVAL;
         }
 
-        int32_t dataOffsetDelta;
-        if (!mDataSource->getUInt32(offset, (uint32_t*)&dataOffsetDelta)) {
+        uint32_t dataOffsetDelta;
+        if (!mDataSource->getUInt32(offset, &dataOffsetDelta)) {
             return ERROR_MALFORMED;
         }
 
-        dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta;
+        if (__builtin_add_overflow(
+                mTrackFragmentHeaderInfo.mBaseDataOffset, dataOffsetDelta, &dataOffset)) {
+            ALOGW("b/232242894 mBaseDataOffset(%" PRIu64 ") + dataOffsetDelta(%u) overflows uint64",
+                    mTrackFragmentHeaderInfo.mBaseDataOffset, dataOffsetDelta);
+            android_errorWriteLog(0x534e4554, "232242894");
+            return ERROR_MALFORMED;
+        }
 
         offset += 4;
         size -= 4;
@@ -6036,7 +6067,12 @@
             return NO_MEMORY;
         }
 
-        dataOffset += sampleSize;
+        if (__builtin_add_overflow(dataOffset, sampleSize, &dataOffset)) {
+            ALOGW("b/232242894 dataOffset(%" PRIu64 ") + sampleSize(%u) overflows uint64",
+                    dataOffset, sampleSize);
+            android_errorWriteLog(0x534e4554, "232242894");
+            return ERROR_MALFORMED;
+        }
     }
 
     mTrackFragmentHeaderInfo.mDataOffset = dataOffset;
@@ -6301,7 +6337,7 @@
 
         err = mBufferGroup->acquire_buffer(&mBuffer);
 
-        if (err != OK) {
+        if (err != OK || mBuffer == nullptr) {
             CHECK(mBuffer == NULL);
             return AMEDIA_ERROR_UNKNOWN;
         }
@@ -6701,7 +6737,7 @@
         const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex];
         offset = smpl->offset;
         size = smpl->size;
-        cts = mCurrentTime + smpl->compositionOffset;
+        cts = (int64_t)mCurrentTime + (int64_t)smpl->compositionOffset;
 
         if (mElstInitialEmptyEditTicks > 0) {
             cts += mElstInitialEmptyEditTicks;
diff --git a/media/extractors/mp4/NOTICE b/media/module/extractors/mp4/NOTICE
similarity index 100%
rename from media/extractors/mp4/NOTICE
rename to media/module/extractors/mp4/NOTICE
diff --git a/media/extractors/mp4/SampleIterator.cpp b/media/module/extractors/mp4/SampleIterator.cpp
similarity index 100%
rename from media/extractors/mp4/SampleIterator.cpp
rename to media/module/extractors/mp4/SampleIterator.cpp
diff --git a/media/extractors/mp4/SampleTable.cpp b/media/module/extractors/mp4/SampleTable.cpp
similarity index 100%
rename from media/extractors/mp4/SampleTable.cpp
rename to media/module/extractors/mp4/SampleTable.cpp
diff --git a/media/extractors/mp4/exports.lds b/media/module/extractors/mp4/exports.lds
similarity index 100%
rename from media/extractors/mp4/exports.lds
rename to media/module/extractors/mp4/exports.lds
diff --git a/media/extractors/mp4/include/AC4Parser.h b/media/module/extractors/mp4/include/AC4Parser.h
similarity index 100%
rename from media/extractors/mp4/include/AC4Parser.h
rename to media/module/extractors/mp4/include/AC4Parser.h
diff --git a/media/extractors/mp4/include/ItemTable.h b/media/module/extractors/mp4/include/ItemTable.h
similarity index 100%
rename from media/extractors/mp4/include/ItemTable.h
rename to media/module/extractors/mp4/include/ItemTable.h
diff --git a/media/extractors/mp4/include/MPEG4Extractor.h b/media/module/extractors/mp4/include/MPEG4Extractor.h
similarity index 100%
rename from media/extractors/mp4/include/MPEG4Extractor.h
rename to media/module/extractors/mp4/include/MPEG4Extractor.h
diff --git a/media/extractors/mp4/include/SampleIterator.h b/media/module/extractors/mp4/include/SampleIterator.h
similarity index 100%
rename from media/extractors/mp4/include/SampleIterator.h
rename to media/module/extractors/mp4/include/SampleIterator.h
diff --git a/media/extractors/mp4/include/SampleTable.h b/media/module/extractors/mp4/include/SampleTable.h
similarity index 100%
rename from media/extractors/mp4/include/SampleTable.h
rename to media/module/extractors/mp4/include/SampleTable.h
diff --git a/media/extractors/mpeg2/Android.bp b/media/module/extractors/mpeg2/Android.bp
similarity index 100%
rename from media/extractors/mpeg2/Android.bp
rename to media/module/extractors/mpeg2/Android.bp
diff --git a/media/extractors/mpeg2/ExtractorBundle.cpp b/media/module/extractors/mpeg2/ExtractorBundle.cpp
similarity index 100%
rename from media/extractors/mpeg2/ExtractorBundle.cpp
rename to media/module/extractors/mpeg2/ExtractorBundle.cpp
diff --git a/media/extractors/mpeg2/MODULE_LICENSE_APACHE2 b/media/module/extractors/mpeg2/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/mpeg2/MODULE_LICENSE_APACHE2
rename to media/module/extractors/mpeg2/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/mpeg2/MPEG2PSExtractor.cpp b/media/module/extractors/mpeg2/MPEG2PSExtractor.cpp
similarity index 97%
rename from media/extractors/mpeg2/MPEG2PSExtractor.cpp
rename to media/module/extractors/mpeg2/MPEG2PSExtractor.cpp
index afd28ef..44c8937 100644
--- a/media/extractors/mpeg2/MPEG2PSExtractor.cpp
+++ b/media/module/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -699,11 +699,26 @@
         }
     }
 
-    MediaBufferBase *mbuf;
-    mSource->read(&mbuf, (MediaTrack::ReadOptions*) options);
+    MediaBufferBase *mbuf = nullptr;
+    status_t err_read = mSource->read(&mbuf, (MediaTrack::ReadOptions*) options);
+    if (mbuf == nullptr) {
+        ALOGE("Track::read: null buffer read from source");
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+    if (err_read != OK) {
+        ALOGE("Track::read: no buffer read from source");
+        mbuf->release();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+
     size_t length = mbuf->range_length();
-    MediaBufferHelper *outbuf;
-    mBufferGroup->acquire_buffer(&outbuf, false, length);
+    MediaBufferHelper *outbuf = nullptr;
+    status_t err = mBufferGroup->acquire_buffer(&outbuf, false, length);
+    if (err != OK || outbuf == nullptr) {
+        ALOGE("Track::read: no buffer");
+        mbuf->release();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
     memcpy(outbuf->data(), mbuf->data(), length);
     outbuf->set_range(0, length);
     *buffer = outbuf;
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/module/extractors/mpeg2/MPEG2TSExtractor.cpp
similarity index 97%
rename from media/extractors/mpeg2/MPEG2TSExtractor.cpp
rename to media/module/extractors/mpeg2/MPEG2TSExtractor.cpp
index 9a3cd92..736b817 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/module/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -182,11 +182,26 @@
         return AMEDIA_ERROR_END_OF_STREAM;
     }
 
-    MediaBufferBase *mbuf;
-    mImpl->read(&mbuf, (MediaTrack::ReadOptions*) options);
+    MediaBufferBase *mbuf = nullptr;
+    status_t err_read = mImpl->read(&mbuf, (MediaTrack::ReadOptions*) options);
+    if (mbuf == nullptr) {
+        ALOGE("Track::read: null buffer read from source");
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+    if (err_read != OK) {
+        ALOGE("Track::read: no buffer read from source");
+        mbuf->release();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
+
     size_t length = mbuf->range_length();
-    MediaBufferHelper *outbuf;
-    mBufferGroup->acquire_buffer(&outbuf, false, length);
+    MediaBufferHelper *outbuf = nullptr;
+    status_t err = mBufferGroup->acquire_buffer(&outbuf, false, length);
+    if (err != OK || outbuf == nullptr) {
+        ALOGE("read: no buffer");
+        mbuf->release();
+        return AMEDIA_ERROR_UNKNOWN;
+    }
     memcpy(outbuf->data(), mbuf->data(), length);
     outbuf->set_range(0, length);
     *out = outbuf;
diff --git a/media/extractors/mpeg2/NOTICE b/media/module/extractors/mpeg2/NOTICE
similarity index 100%
rename from media/extractors/mpeg2/NOTICE
rename to media/module/extractors/mpeg2/NOTICE
diff --git a/media/extractors/mpeg2/exports.lds b/media/module/extractors/mpeg2/exports.lds
similarity index 100%
rename from media/extractors/mpeg2/exports.lds
rename to media/module/extractors/mpeg2/exports.lds
diff --git a/media/extractors/mpeg2/include/MPEG2PSExtractor.h b/media/module/extractors/mpeg2/include/MPEG2PSExtractor.h
similarity index 100%
rename from media/extractors/mpeg2/include/MPEG2PSExtractor.h
rename to media/module/extractors/mpeg2/include/MPEG2PSExtractor.h
diff --git a/media/extractors/mpeg2/include/MPEG2TSExtractor.h b/media/module/extractors/mpeg2/include/MPEG2TSExtractor.h
similarity index 100%
rename from media/extractors/mpeg2/include/MPEG2TSExtractor.h
rename to media/module/extractors/mpeg2/include/MPEG2TSExtractor.h
diff --git a/media/extractors/ogg/Android.bp b/media/module/extractors/ogg/Android.bp
similarity index 100%
rename from media/extractors/ogg/Android.bp
rename to media/module/extractors/ogg/Android.bp
diff --git a/media/extractors/ogg/MODULE_LICENSE_APACHE2 b/media/module/extractors/ogg/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/ogg/MODULE_LICENSE_APACHE2
rename to media/module/extractors/ogg/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/ogg/NOTICE b/media/module/extractors/ogg/NOTICE
similarity index 100%
rename from media/extractors/ogg/NOTICE
rename to media/module/extractors/ogg/NOTICE
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/module/extractors/ogg/OggExtractor.cpp
similarity index 90%
rename from media/extractors/ogg/OggExtractor.cpp
rename to media/module/extractors/ogg/OggExtractor.cpp
index eb2246d..4c106b2 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/module/extractors/ogg/OggExtractor.cpp
@@ -34,6 +34,9 @@
 #include <system/audio.h>
 #include <utils/String8.h>
 
+#include <inttypes.h>
+#include <stdint.h>
+
 extern "C" {
     #include <Tremolo/codec_internal.h>
 
@@ -346,66 +349,118 @@
         off64_t startOffset, off64_t *pageOffset) {
     *pageOffset = startOffset;
 
-    for (;;) {
-        char signature[4];
-        ssize_t n = mSource->readAt(*pageOffset, &signature, 4);
+    // balance between larger reads and reducing how much we over-read.
+    const int FIND_BUF_SIZE = 2048;
+    const int lenOggS = strlen("OggS");
+    while(1) {
 
-        if (n < 4) {
+        // work with big buffers to amortize readAt() costs
+        char signatureBuffer[FIND_BUF_SIZE];
+        ssize_t n = mSource->readAt(*pageOffset, &signatureBuffer, sizeof(signatureBuffer));
+
+        if (n < lenOggS) {
             *pageOffset = 0;
-
             return (n < 0) ? n : (status_t)ERROR_END_OF_STREAM;
         }
 
-        if (!memcmp(signature, "OggS", 4)) {
-            if (*pageOffset > startOffset) {
-                ALOGV("skipped %lld bytes of junk to reach next frame",
-                     (long long)(*pageOffset - startOffset));
-            }
-
-            return OK;
-        }
-
-        // see how far ahead to skip; avoid some fruitless comparisons
-        unsigned int i;
-        for (i = 1; i < 4 ; i++) {
-            if (signature[i] == 'O')
+        for(int i = 0; i < n - (lenOggS - 1) ; i++) {
+            // fast scan for 1st character in a signature
+            char *p = (char *)memchr(&signatureBuffer[i], 'O', n - (lenOggS - 1) - i);
+            if (p == NULL) {
+                // no signature start in the rest of this buffer.
                 break;
+            }
+            int jump = (p-&signatureBuffer[i]);
+            i += jump;
+            if (memcmp("OggS", &signatureBuffer[i], lenOggS) == 0) {
+                *pageOffset += i;
+                if (*pageOffset > startOffset) {
+                    ALOGD("skipped %" PRIu64 " bytes of junk to reach next frame",
+                         (*pageOffset - startOffset));
+                }
+                return OK;
+            }
         }
-        *pageOffset += i;
+
+        // on to next block. buffer didn't end with "OggS", but could end with "Ogg".
+        // overlap enough to detect this. n >= lenOggS, so this always advances.
+        *pageOffset += n - (lenOggS - 1);
     }
+    return (status_t)ERROR_END_OF_STREAM;
 }
 
 // Given the offset of the "current" page, find the page immediately preceding
 // it (if any) and return its granule position.
 // To do this we back up from the "current" page's offset until we find any
 // page preceding it and then scan forward to just before the current page.
+//
 status_t MyOggExtractor::findPrevGranulePosition(
         off64_t pageOffset, uint64_t *granulePos) {
     *granulePos = 0;
 
-    off64_t prevPageOffset = 0;
-    off64_t prevGuess = pageOffset;
-    for (;;) {
-        if (prevGuess >= 5000) {
-            prevGuess -= 5000;
+    const int FIND_BUF_SIZE = 2048;
+    const int lenOggS = strlen("OggS");
+
+    if (pageOffset == 0) {
+        ALOGV("no page before the first page");
+        return UNKNOWN_ERROR;
+    }
+
+    off64_t prevPageOffset = pageOffset;
+
+    // we start our search on the byte immediately in front of pageOffset
+    // which could mean "O" immediately before and "ggS" starting at pageOffset
+    //
+    // if there was an "OggS" at pageOffset, we'll have scanned a few extra bytes
+    // but if pageOffset was chosen by a seek operation, we don't know that it
+    // reflects the beginning of a page. By choosing to scan 3 possibly unneeded
+    // bytes at the start we cover both cases.
+    //
+    off64_t firstAfter = pageOffset + lenOggS - 1;    // NOT within our buffer
+    off64_t nextOffset = pageOffset;
+
+    while(prevPageOffset == pageOffset) {
+        // work with big buffers to amortize readAt() costs
+        char signatureBuffer[FIND_BUF_SIZE];
+
+        ssize_t desired = sizeof(signatureBuffer);
+        if (firstAfter >= desired) {
+            nextOffset = firstAfter - desired;
         } else {
-            prevGuess = 0;
+            nextOffset = 0;
+            desired = firstAfter;
         }
+        ssize_t n = mSource->readAt(nextOffset, &signatureBuffer, desired);
 
-        ALOGV("backing up %lld bytes", (long long)(pageOffset - prevGuess));
-
-        status_t err = findNextPage(prevGuess, &prevPageOffset);
-        if (err == ERROR_END_OF_STREAM) {
-            // We are at the last page and didn't back off enough;
-            // back off 5000 bytes more and try again.
-            continue;
-        } else if (err != OK) {
-            return err;
-        }
-
-        if (prevPageOffset < pageOffset || prevGuess == 0) {
+        if (n < lenOggS) {
+            ALOGD("short read, get out");
             break;
         }
+
+        // work backwards
+        // loop control ok for n >= 0
+        for(int i = n - lenOggS; i >= 0 ; i--) {
+            // fast scan for 1st character in the signature
+            char *p = (char *)memrchr(&signatureBuffer[0], 'O', i);
+            if (p == NULL) {
+                // no signature start in the rest of this buffer.
+                break;
+            }
+            i = (p-&signatureBuffer[0]);
+            // loop start chosen to ensure we will always have lenOggS bytes
+            if (memcmp("OggS", &signatureBuffer[i], lenOggS) == 0) {
+                prevPageOffset = nextOffset + i;
+                break;
+            }
+        }
+
+        // back up for next read; make sure we catch overlaps
+        if (nextOffset == 0) {
+            // can't back up any further
+            break;
+        }
+        // current buffer might start with "ggS", include those bytes in the next iteration
+        firstAfter = nextOffset + lenOggS - 1;
     }
 
     if (prevPageOffset == pageOffset) {
@@ -413,8 +468,8 @@
         return UNKNOWN_ERROR;
     }
 
-    ALOGV("prevPageOffset at %lld, pageOffset at %lld",
-            (long long)prevPageOffset, (long long)pageOffset);
+    ALOGV("prevPageOffset at %" PRIu64 ", pageOffset at %" PRIu64,
+          prevPageOffset, pageOffset);
     uint8_t flag = 0;
     for (;;) {
         Page prevPage;
@@ -790,7 +845,8 @@
             }
             MediaBufferHelper *tmp;
             if (mBufferGroup) {
-                mBufferGroup->acquire_buffer(&tmp, false, fullSize);
+                // ignore return code here. instead, check tmp below.
+                (void) mBufferGroup->acquire_buffer(&tmp, false, fullSize);
                 ALOGV("acquired buffer %p from group", tmp);
             } else {
                 tmp = new StandAloneMediaBuffer(fullSize);
@@ -924,13 +980,16 @@
 status_t MyOggExtractor::init() {
     AMediaFormat_setString(mMeta, AMEDIAFORMAT_KEY_MIME, mMimeType);
 
-    media_status_t err;
-    MediaBufferHelper *packet;
     for (size_t i = 0; i < mNumHeaders; ++i) {
+        media_status_t err;
+        MediaBufferHelper *packet = nullptr;
         // ignore timestamp for configuration packets
         if ((err = _readNextPacket(&packet, /* calcVorbisTimestamp = */ false)) != AMEDIA_OK) {
             return err;
         }
+        if (packet == nullptr) {
+            return AMEDIA_ERROR_UNKNOWN;
+        }
         ALOGV("read packet of size %zu\n", packet->range_length());
         err = verifyHeader(packet, /* type = */ i * 2 + 1);
         packet->release();
@@ -989,16 +1048,21 @@
     size_t numerator = mTableOfContents.size();
 
     if (numerator > kMaxNumTOCEntries) {
-        size_t denom = numerator - kMaxNumTOCEntries;
+        Vector<TOCEntry> maxTOC;
+        maxTOC.setCapacity(kMaxNumTOCEntries);
 
+        size_t denom = numerator - kMaxNumTOCEntries;
         size_t accum = 0;
-        for (ssize_t i = mTableOfContents.size(); i > 0; --i) {
+        for (ssize_t i = 0; i < mTableOfContents.size(); i++) {
             accum += denom;
             if (accum >= numerator) {
-                mTableOfContents.removeAt(i);
                 accum -= numerator;
+            } else {
+                maxTOC.push(mTableOfContents.itemAt(i));
             }
         }
+
+        mTableOfContents = maxTOC;
     }
 }
 
diff --git a/media/extractors/ogg/exports.lds b/media/module/extractors/ogg/exports.lds
similarity index 100%
rename from media/extractors/ogg/exports.lds
rename to media/module/extractors/ogg/exports.lds
diff --git a/media/extractors/ogg/include/OggExtractor.h b/media/module/extractors/ogg/include/OggExtractor.h
similarity index 100%
rename from media/extractors/ogg/include/OggExtractor.h
rename to media/module/extractors/ogg/include/OggExtractor.h
diff --git a/media/extractors/tests/Android.bp b/media/module/extractors/tests/Android.bp
similarity index 98%
rename from media/extractors/tests/Android.bp
rename to media/module/extractors/tests/Android.bp
index 3c3bbdc..d6e79c7 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/module/extractors/tests/Android.bp
@@ -55,7 +55,7 @@
         "libmedia_midiiowrapper",
         "libsonivoxwithoutjet",
         "libvorbisidec",
-        "libwebm",
+        "libwebm_mkvparser",
         "libFLAC",
     ],
 
diff --git a/media/extractors/tests/AndroidTest.xml b/media/module/extractors/tests/AndroidTest.xml
similarity index 100%
rename from media/extractors/tests/AndroidTest.xml
rename to media/module/extractors/tests/AndroidTest.xml
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/module/extractors/tests/ExtractorUnitTest.cpp
similarity index 100%
rename from media/extractors/tests/ExtractorUnitTest.cpp
rename to media/module/extractors/tests/ExtractorUnitTest.cpp
diff --git a/media/extractors/tests/ExtractorUnitTestEnvironment.h b/media/module/extractors/tests/ExtractorUnitTestEnvironment.h
similarity index 100%
rename from media/extractors/tests/ExtractorUnitTestEnvironment.h
rename to media/module/extractors/tests/ExtractorUnitTestEnvironment.h
diff --git a/media/extractors/tests/README.md b/media/module/extractors/tests/README.md
similarity index 100%
rename from media/extractors/tests/README.md
rename to media/module/extractors/tests/README.md
diff --git a/media/extractors/wav/Android.bp b/media/module/extractors/wav/Android.bp
similarity index 100%
rename from media/extractors/wav/Android.bp
rename to media/module/extractors/wav/Android.bp
diff --git a/media/extractors/wav/MODULE_LICENSE_APACHE2 b/media/module/extractors/wav/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/extractors/wav/MODULE_LICENSE_APACHE2
rename to media/module/extractors/wav/MODULE_LICENSE_APACHE2
diff --git a/media/extractors/wav/NOTICE b/media/module/extractors/wav/NOTICE
similarity index 100%
rename from media/extractors/wav/NOTICE
rename to media/module/extractors/wav/NOTICE
diff --git a/media/extractors/wav/WAVExtractor.cpp b/media/module/extractors/wav/WAVExtractor.cpp
similarity index 98%
rename from media/extractors/wav/WAVExtractor.cpp
rename to media/module/extractors/wav/WAVExtractor.cpp
index 9e94587..9c3bac6 100644
--- a/media/extractors/wav/WAVExtractor.cpp
+++ b/media/module/extractors/wav/WAVExtractor.cpp
@@ -459,11 +459,15 @@
         mCurrentPos = pos + mOffset;
     }
 
-    MediaBufferHelper *buffer;
+    MediaBufferHelper *buffer = nullptr;
     media_status_t err = mBufferGroup->acquire_buffer(&buffer);
     if (err != OK) {
         return err;
     }
+    if (buffer == nullptr) {
+        ALOGE("acquire_buffer OK, but no buffer");
+        return AMEDIA_ERROR_UNKNOWN;
+    }
 
     // maxBytesToRead may be reduced so that in-place data conversion will fit in buffer size.
     const size_t bufferSize = std::min(buffer->size(), kMaxFrameSize);
diff --git a/media/extractors/wav/exports.lds b/media/module/extractors/wav/exports.lds
similarity index 100%
rename from media/extractors/wav/exports.lds
rename to media/module/extractors/wav/exports.lds
diff --git a/media/extractors/wav/include/WAVExtractor.h b/media/module/extractors/wav/include/WAVExtractor.h
similarity index 100%
rename from media/extractors/wav/include/WAVExtractor.h
rename to media/module/extractors/wav/include/WAVExtractor.h
diff --git a/media/libstagefright/foundation/AAtomizer.cpp b/media/module/foundation/AAtomizer.cpp
similarity index 100%
rename from media/libstagefright/foundation/AAtomizer.cpp
rename to media/module/foundation/AAtomizer.cpp
diff --git a/media/libstagefright/foundation/ABitReader.cpp b/media/module/foundation/ABitReader.cpp
similarity index 100%
rename from media/libstagefright/foundation/ABitReader.cpp
rename to media/module/foundation/ABitReader.cpp
diff --git a/media/libstagefright/foundation/ABuffer.cpp b/media/module/foundation/ABuffer.cpp
similarity index 100%
rename from media/libstagefright/foundation/ABuffer.cpp
rename to media/module/foundation/ABuffer.cpp
diff --git a/media/libstagefright/foundation/ADebug.cpp b/media/module/foundation/ADebug.cpp
similarity index 100%
rename from media/libstagefright/foundation/ADebug.cpp
rename to media/module/foundation/ADebug.cpp
diff --git a/media/libstagefright/foundation/AHandler.cpp b/media/module/foundation/AHandler.cpp
similarity index 65%
rename from media/libstagefright/foundation/AHandler.cpp
rename to media/module/foundation/AHandler.cpp
index 7dbbe54..d8b0aaf 100644
--- a/media/libstagefright/foundation/AHandler.cpp
+++ b/media/module/foundation/AHandler.cpp
@@ -24,8 +24,10 @@
 namespace android {
 
 void AHandler::deliverMessage(const sp<AMessage> &msg) {
+    setDeliveryStatus(true, msg->what(), ALooper::GetNowUs());
     onMessageReceived(msg);
     mMessageCounter++;
+    setDeliveryStatus(false, 0, 0);
 
     if (mVerboseStats) {
         uint32_t what = msg->what();
@@ -38,4 +40,19 @@
     }
 }
 
+void AHandler::setDeliveryStatus(bool delivering, uint32_t what, int64_t startUs) {
+    AutoMutex autoLock(mLock);
+    mDeliveringMessage = delivering;
+    mCurrentMessageWhat = what;
+    mCurrentMessageStartTimeUs = startUs;
+}
+
+void AHandler::getDeliveryStatus(bool& delivering, uint32_t& what, int64_t& durationUs) {
+    AutoMutex autoLock(mLock);
+    delivering = mDeliveringMessage;
+    what = mCurrentMessageWhat;
+    durationUs = mCurrentMessageStartTimeUs == 0 ?
+            0 : ALooper::GetNowUs() - mCurrentMessageStartTimeUs;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/module/foundation/ALooper.cpp
similarity index 77%
rename from media/libstagefright/foundation/ALooper.cpp
rename to media/module/foundation/ALooper.cpp
index a276722..61bac02 100644
--- a/media/libstagefright/foundation/ALooper.cpp
+++ b/media/module/foundation/ALooper.cpp
@@ -69,6 +69,10 @@
     return systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
 }
 
+int64_t ALooper::getNowUs() {
+    return GetNowUs();
+}
+
 ALooper::ALooper()
     : mRunningLocally(false) {
     // clean up stale AHandlers. Doing it here instead of in the destructor avoids
@@ -170,11 +174,11 @@
 
     int64_t whenUs;
     if (delayUs > 0) {
-        int64_t nowUs = GetNowUs();
+        int64_t nowUs = getNowUs();
         whenUs = (delayUs > INT64_MAX - nowUs ? INT64_MAX : nowUs + delayUs);
 
     } else {
-        whenUs = GetNowUs();
+        whenUs = getNowUs();
     }
 
     List<Event>::iterator it = mEventQueue.begin();
@@ -185,6 +189,7 @@
     Event event;
     event.mWhenUs = whenUs;
     event.mMessage = msg;
+    event.mToken = nullptr;
 
     if (it == mEventQueue.begin()) {
         mQueueChangedCondition.signal();
@@ -193,7 +198,57 @@
     mEventQueue.insert(it, event);
 }
 
+status_t ALooper::postUnique(const sp<AMessage> &msg, const sp<RefBase> &token, int64_t delayUs) {
+    if (token == nullptr) {
+        return -EINVAL;
+    }
+    Mutex::Autolock autoLock(mLock);
+
+    int64_t whenUs;
+    if (delayUs > 0) {
+        int64_t nowUs = getNowUs();
+        whenUs = (delayUs > INT64_MAX - nowUs ? INT64_MAX : nowUs + delayUs);
+    } else {
+        whenUs = getNowUs();
+    }
+
+    // We only need to wake the loop up if we're rescheduling to the earliest event in the queue.
+    // This needs to be checked now, before we reschedule the message, in case this message is
+    // already at the beginning of the queue.
+    bool shouldAwakeLoop = mEventQueue.empty() || whenUs < mEventQueue.begin()->mWhenUs;
+
+    // Erase any previously-posted event with this token.
+    for (auto i = mEventQueue.begin(); i != mEventQueue.end();) {
+        if (i->mToken == token) {
+            i = mEventQueue.erase(i);
+        } else {
+            ++i;
+        }
+    }
+
+    // Find the insertion point for the rescheduled message.
+    List<Event>::iterator i = mEventQueue.begin();
+    while (i != mEventQueue.end() && i->mWhenUs <= whenUs) {
+        ++i;
+    }
+
+    Event event;
+    event.mWhenUs = whenUs;
+    event.mMessage = msg;
+    event.mToken = token;
+    mEventQueue.insert(i, event);
+
+    // If we rescheduled the event to be earlier than the first event, then we need to wake up the
+    // looper earlier than it was previously scheduled to be woken up. Otherwise, it can sleep until
+    // the previous wake-up time and then go to sleep again if needed.
+    if (shouldAwakeLoop){
+        mQueueChangedCondition.signal();
+    }
+    return OK;
+}
+
 bool ALooper::loop() {
+
     Event event;
 
     {
@@ -206,7 +261,7 @@
             return true;
         }
         int64_t whenUs = (*mEventQueue.begin()).mWhenUs;
-        int64_t nowUs = GetNowUs();
+        int64_t nowUs = getNowUs();
 
         if (whenUs > nowUs) {
             int64_t delayUs = whenUs - nowUs;
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/module/foundation/ALooperRoster.cpp
similarity index 88%
rename from media/libstagefright/foundation/ALooperRoster.cpp
rename to media/module/foundation/ALooperRoster.cpp
index 4334f1e..5625c7f 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/module/foundation/ALooperRoster.cpp
@@ -143,8 +143,20 @@
             s.append(looper->getName());
             sp<AHandler> handler = info.mHandler.promote();
             if (handler != NULL) {
+                bool deliveringMessages;
+                uint32_t currentMessageWhat;
+                int64_t currentDeliveryDurationUs;
+                handler->getDeliveryStatus(deliveringMessages,
+                                           currentMessageWhat,
+                                           currentDeliveryDurationUs);
                 handler->mVerboseStats = verboseStats;
-                s.appendFormat(": %" PRIu64 " messages processed", handler->mMessageCounter);
+                s.appendFormat(": %" PRIu64 " messages processed, delivering "
+                               "%d, current msg %" PRIu32 ", current msg "
+                               "durationUs %" PRIu64 "",
+                               handler->mMessageCounter,
+                               deliveringMessages,
+                               currentMessageWhat,
+                               currentDeliveryDurationUs);
                 if (verboseStats) {
                     for (size_t j = 0; j < handler->mMessages.size(); j++) {
                         char fourcc[15];
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/module/foundation/AMessage.cpp
similarity index 98%
rename from media/libstagefright/foundation/AMessage.cpp
rename to media/module/foundation/AMessage.cpp
index 5c99cc9..7cc7c41 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/module/foundation/AMessage.cpp
@@ -430,6 +430,17 @@
     return OK;
 }
 
+status_t AMessage::postUnique(const sp<RefBase> &token, int64_t delayUs) {
+    sp<ALooper> looper = mLooper.promote();
+    if (looper == NULL) {
+        ALOGW("failed to post message as target looper for handler %d is gone.",
+              mTarget);
+        return -ENOENT;
+    }
+
+    return looper->postUnique(this, token, delayUs);
+}
+
 status_t AMessage::postAndAwaitResponse(sp<AMessage> *response) {
     sp<ALooper> looper = mLooper.promote();
     if (looper == NULL) {
@@ -950,6 +961,11 @@
     return mItems.size();
 }
 
+/* static */
+size_t AMessage::maxAllowedEntries() {
+    return kMaxNumItems;
+}
+
 const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
     if (index >= mItems.size()) {
         *type = kTypeInt32;
diff --git a/media/libstagefright/foundation/AString.cpp b/media/module/foundation/AString.cpp
similarity index 100%
rename from media/libstagefright/foundation/AString.cpp
rename to media/module/foundation/AString.cpp
diff --git a/media/libstagefright/foundation/AStringUtils.cpp b/media/module/foundation/AStringUtils.cpp
similarity index 100%
rename from media/libstagefright/foundation/AStringUtils.cpp
rename to media/module/foundation/AStringUtils.cpp
diff --git a/media/libstagefright/foundation/Android.bp b/media/module/foundation/Android.bp
similarity index 97%
rename from media/libstagefright/foundation/Android.bp
rename to media/module/foundation/Android.bp
index ca17117..dc8384d 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -110,6 +110,11 @@
                 "-DNO_IMEMORY",
             ],
         },
+        host: {
+            sanitize: {
+                cfi: false,
+            },
+        },
         apex: {
             exclude_shared_libs: [
                 "libbinder",
diff --git a/media/libstagefright/foundation/AudioPresentationInfo.cpp b/media/module/foundation/AudioPresentationInfo.cpp
similarity index 100%
rename from media/libstagefright/foundation/AudioPresentationInfo.cpp
rename to media/module/foundation/AudioPresentationInfo.cpp
diff --git a/media/libstagefright/foundation/ByteUtils.cpp b/media/module/foundation/ByteUtils.cpp
similarity index 100%
rename from media/libstagefright/foundation/ByteUtils.cpp
rename to media/module/foundation/ByteUtils.cpp
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/module/foundation/ColorUtils.cpp
similarity index 97%
rename from media/libstagefright/foundation/ColorUtils.cpp
rename to media/module/foundation/ColorUtils.cpp
index 6dc8157..12bffca 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/module/foundation/ColorUtils.cpp
@@ -60,6 +60,10 @@
         { CU::kColorStandardBT470M,         { CA::PrimariesBT470_6M, CA::MatrixBT470_6M } },
         // NOTE: there is no close match to the matrix used by standard film, chose closest
         { CU::kColorStandardFilm,           { CA::PrimariesGenericFilm, CA::MatrixBT2020 } },
+        // DCI-P3 (in DataSpace that drives this standard) is actually Display P3
+        // ITU does not specify a matrix suitable for P3. The theoretical KR/KB numbers are
+        // 0.229 and 0.079. Assume BT.601 matrix as P3 is commonly used for JPEG with BT.601.
+        { CU::kColorStandardDisplay_P3,     { CA::PrimariesEG432, CA::MatrixBT601_6 } },
     }
 };
 
@@ -264,6 +268,8 @@
         { 8, ColorAspects::PrimariesGenericFilm },
         { 9, ColorAspects::PrimariesBT2020 },
         { 10, ColorAspects::PrimariesOther /* XYZ */ },
+        { 11, ColorAspects::PrimariesRP431 },
+        { 12, ColorAspects::PrimariesEG432 },
     }
 };
 
@@ -438,6 +444,9 @@
         { CU::kColorStandardBT2020,               CA::PrimariesBT2020 },
         { CU::kColorStandardBT601_525_Unadjusted, CA::PrimariesBT601_6_525 },
         { CU::kColorStandardBT601_625_Unadjusted, CA::PrimariesBT601_6_625 },
+        { CU::kColorStandardDisplay_P3,           CA::PrimariesEG432 },
+        // fall back DCI P3 primaries to Display P3
+        { CU::kColorStandardDisplay_P3,           CA::PrimariesRP431 },
     }
 };
 
@@ -469,7 +478,8 @@
         { CU::kColorStandardBT2020Constant,       GET_HAL_BITFIELD(STANDARD, BT2020_CONSTANT_LUMINANCE) },
         { CU::kColorStandardBT470M,               GET_HAL_BITFIELD(STANDARD, BT470M) },
         { CU::kColorStandardFilm,                 GET_HAL_BITFIELD(STANDARD, FILM) },
-        { CU::kColorStandardDCI_P3,               GET_HAL_BITFIELD(STANDARD, DCI_P3) },
+        // DCI-P3 (in DataSpace that drives this standard) is actually Display P3
+        { CU::kColorStandardDisplay_P3,           GET_HAL_BITFIELD(STANDARD, DCI_P3) },
     }
 };
 
diff --git a/media/libstagefright/foundation/ColorUtils_fill.cpp b/media/module/foundation/ColorUtils_fill.cpp
similarity index 100%
rename from media/libstagefright/foundation/ColorUtils_fill.cpp
rename to media/module/foundation/ColorUtils_fill.cpp
diff --git a/media/libstagefright/foundation/ColorUtils_ndk.cpp b/media/module/foundation/ColorUtils_ndk.cpp
similarity index 100%
rename from media/libstagefright/foundation/ColorUtils_ndk.cpp
rename to media/module/foundation/ColorUtils_ndk.cpp
diff --git a/media/libstagefright/foundation/FoundationUtils.cpp b/media/module/foundation/FoundationUtils.cpp
similarity index 100%
rename from media/libstagefright/foundation/FoundationUtils.cpp
rename to media/module/foundation/FoundationUtils.cpp
diff --git a/media/libstagefright/foundation/MODULE_LICENSE_APACHE2 b/media/module/foundation/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/foundation/MODULE_LICENSE_APACHE2
rename to media/module/foundation/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/foundation/MediaBuffer.cpp b/media/module/foundation/MediaBuffer.cpp
similarity index 100%
rename from media/libstagefright/foundation/MediaBuffer.cpp
rename to media/module/foundation/MediaBuffer.cpp
diff --git a/media/libstagefright/foundation/MediaBufferBase.cpp b/media/module/foundation/MediaBufferBase.cpp
similarity index 100%
rename from media/libstagefright/foundation/MediaBufferBase.cpp
rename to media/module/foundation/MediaBufferBase.cpp
diff --git a/media/libstagefright/foundation/MediaBufferGroup.cpp b/media/module/foundation/MediaBufferGroup.cpp
similarity index 100%
rename from media/libstagefright/foundation/MediaBufferGroup.cpp
rename to media/module/foundation/MediaBufferGroup.cpp
diff --git a/media/libstagefright/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
similarity index 96%
rename from media/libstagefright/foundation/MediaDefs.cpp
rename to media/module/foundation/MediaDefs.cpp
index 5c4ec17..7abab63 100644
--- a/media/libstagefright/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -71,7 +71,10 @@
 const char *MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM = "audio/x-adpcm-dvi-ima";
 const char *MEDIA_MIMETYPE_AUDIO_DTS = "audio/vnd.dts";
 const char *MEDIA_MIMETYPE_AUDIO_DTS_HD = "audio/vnd.dts.hd";
+const char *MEDIA_MIMETYPE_AUDIO_DTS_HD_MA = "audio/vnd.dts.hd;profile=dtsma";
 const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD = "audio/vnd.dts.uhd";
+const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1 = "audio/vnd.dts.uhd;profile=p1";
+const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2 = "audio/vnd.dts.uhd;profile=p2";
 const char *MEDIA_MIMETYPE_AUDIO_EVRC = "audio/evrc";
 const char *MEDIA_MIMETYPE_AUDIO_EVRCB = "audio/evrcb";
 const char *MEDIA_MIMETYPE_AUDIO_EVRCWB = "audio/evrcwb";
diff --git a/media/libstagefright/foundation/MediaKeys.cpp b/media/module/foundation/MediaKeys.cpp
similarity index 100%
rename from media/libstagefright/foundation/MediaKeys.cpp
rename to media/module/foundation/MediaKeys.cpp
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/module/foundation/MetaData.cpp
similarity index 100%
rename from media/libstagefright/foundation/MetaData.cpp
rename to media/module/foundation/MetaData.cpp
diff --git a/media/libstagefright/foundation/MetaDataBase.cpp b/media/module/foundation/MetaDataBase.cpp
similarity index 100%
rename from media/libstagefright/foundation/MetaDataBase.cpp
rename to media/module/foundation/MetaDataBase.cpp
diff --git a/media/libstagefright/foundation/NOTICE b/media/module/foundation/NOTICE
similarity index 100%
rename from media/libstagefright/foundation/NOTICE
rename to media/module/foundation/NOTICE
diff --git a/media/libstagefright/foundation/OpusHeader.cpp b/media/module/foundation/OpusHeader.cpp
similarity index 100%
rename from media/libstagefright/foundation/OpusHeader.cpp
rename to media/module/foundation/OpusHeader.cpp
diff --git a/media/libstagefright/foundation/TEST_MAPPING b/media/module/foundation/TEST_MAPPING
similarity index 100%
rename from media/libstagefright/foundation/TEST_MAPPING
rename to media/module/foundation/TEST_MAPPING
diff --git a/media/libstagefright/foundation/avc_utils.cpp b/media/module/foundation/avc_utils.cpp
similarity index 100%
rename from media/libstagefright/foundation/avc_utils.cpp
rename to media/module/foundation/avc_utils.cpp
diff --git a/media/libstagefright/foundation/base64.cpp b/media/module/foundation/base64.cpp
similarity index 100%
rename from media/libstagefright/foundation/base64.cpp
rename to media/module/foundation/base64.cpp
diff --git a/media/libstagefright/foundation/hexdump.cpp b/media/module/foundation/hexdump.cpp
similarity index 100%
rename from media/libstagefright/foundation/hexdump.cpp
rename to media/module/foundation/hexdump.cpp
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AAtomizer.h b/media/module/foundation/include/media/stagefright/foundation/AAtomizer.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AAtomizer.h
rename to media/module/foundation/include/media/stagefright/foundation/AAtomizer.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ABase.h b/media/module/foundation/include/media/stagefright/foundation/ABase.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ABase.h
rename to media/module/foundation/include/media/stagefright/foundation/ABase.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ABitReader.h b/media/module/foundation/include/media/stagefright/foundation/ABitReader.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ABitReader.h
rename to media/module/foundation/include/media/stagefright/foundation/ABitReader.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ABuffer.h b/media/module/foundation/include/media/stagefright/foundation/ABuffer.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ABuffer.h
rename to media/module/foundation/include/media/stagefright/foundation/ABuffer.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AData.h b/media/module/foundation/include/media/stagefright/foundation/AData.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AData.h
rename to media/module/foundation/include/media/stagefright/foundation/AData.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h b/media/module/foundation/include/media/stagefright/foundation/ADebug.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
rename to media/module/foundation/include/media/stagefright/foundation/ADebug.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h b/media/module/foundation/include/media/stagefright/foundation/AHandler.h
similarity index 83%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
rename to media/module/foundation/include/media/stagefright/foundation/AHandler.h
index 337460a..c9e4f69 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AHandler.h
@@ -30,7 +30,10 @@
     AHandler()
         : mID(0),
           mVerboseStats(false),
-          mMessageCounter(0) {
+          mMessageCounter(0),
+          mDeliveringMessage(false),
+          mCurrentMessageWhat(0),
+          mCurrentMessageStartTimeUs(0){
     }
 
     ALooper::handler_id id() const {
@@ -69,8 +72,17 @@
     uint64_t mMessageCounter;
     KeyedVector<uint32_t, uint32_t> mMessages;
 
+    Mutex mLock;
+    bool mDeliveringMessage;
+    uint32_t  mCurrentMessageWhat;
+    int64_t mCurrentMessageStartTimeUs;
+
     void deliverMessage(const sp<AMessage> &msg);
 
+    void setDeliveryStatus(bool, uint32_t, int64_t);
+    void getDeliveryStatus(bool&, uint32_t&, int64_t&);
+
+
     DISALLOW_EVIL_CONSTRUCTORS(AHandler);
 };
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AHandlerReflector.h b/media/module/foundation/include/media/stagefright/foundation/AHandlerReflector.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AHandlerReflector.h
rename to media/module/foundation/include/media/stagefright/foundation/AHandlerReflector.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ALookup.h b/media/module/foundation/include/media/stagefright/foundation/ALookup.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ALookup.h
rename to media/module/foundation/include/media/stagefright/foundation/ALookup.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ALooper.h b/media/module/foundation/include/media/stagefright/foundation/ALooper.h
similarity index 85%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ALooper.h
rename to media/module/foundation/include/media/stagefright/foundation/ALooper.h
index 09c469b..60bda1f 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ALooper.h
+++ b/media/module/foundation/include/media/stagefright/foundation/ALooper.h
@@ -59,6 +59,9 @@
     }
 
 protected:
+    // overridable by test harness
+    virtual int64_t getNowUs();
+
     virtual ~ALooper();
 
 private:
@@ -67,6 +70,7 @@
     struct Event {
         int64_t mWhenUs;
         sp<AMessage> mMessage;
+        sp<RefBase> mToken;
     };
 
     Mutex mLock;
@@ -87,9 +91,14 @@
 
     // START --- methods used only by AMessage
 
-    // posts a message on this looper with the given timeout
+    // Posts a message on this looper with the given timeout.
     void post(const sp<AMessage> &msg, int64_t delayUs);
 
+    // Post a message uniquely on this looper with the given timeout.
+    // This method ensures that there is exactly one message with the same token pending posted on
+    // this looper after the call returns. A null token will result in an EINVAL error status.
+    status_t postUnique(const sp<AMessage> &msg, const sp<RefBase> &token, int64_t delayUs);
+
     // creates a reply token to be used with this looper
     sp<AReplyToken> createReplyToken();
     // waits for a response for the reply token.  If status is OK, the response
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ALooperRoster.h b/media/module/foundation/include/media/stagefright/foundation/ALooperRoster.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ALooperRoster.h
rename to media/module/foundation/include/media/stagefright/foundation/ALooperRoster.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
similarity index 96%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
rename to media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 960212a..7594565 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -141,6 +141,11 @@
 
     status_t post(int64_t delayUs = 0);
 
+    // Post a message uniquely to its target with the given timeout.
+    // This method ensures that there is exactly one message with the same token posted to its
+    // target after the call returns. A null token will result in an EINVAL error status.
+    status_t postUnique(const sp<RefBase> &token, int64_t delayUs = 0);
+
     // Posts the message to its target and waits for a response (or error)
     // before returning.
     status_t postAndAwaitResponse(sp<AMessage> *response);
@@ -194,6 +199,7 @@
     };
 
     size_t countEntries() const;
+    static size_t maxAllowedEntries();
     const char *getEntryNameAt(size_t index, Type *type) const;
 
     /**
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AString.h b/media/module/foundation/include/media/stagefright/foundation/AString.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AString.h
rename to media/module/foundation/include/media/stagefright/foundation/AString.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AStringUtils.h b/media/module/foundation/include/media/stagefright/foundation/AStringUtils.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AStringUtils.h
rename to media/module/foundation/include/media/stagefright/foundation/AStringUtils.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h b/media/module/foundation/include/media/stagefright/foundation/AUtils.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AUtils.h
rename to media/module/foundation/include/media/stagefright/foundation/AUtils.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AudioPresentationInfo.h b/media/module/foundation/include/media/stagefright/foundation/AudioPresentationInfo.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/AudioPresentationInfo.h
rename to media/module/foundation/include/media/stagefright/foundation/AudioPresentationInfo.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h b/media/module/foundation/include/media/stagefright/foundation/ByteUtils.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ByteUtils.h
rename to media/module/foundation/include/media/stagefright/foundation/ByteUtils.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
similarity index 97%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
rename to media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
index 72c8074..f4e89bb 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/module/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -57,7 +57,8 @@
         kColorStandardBT2020Constant =       7, // not in SDK
         kColorStandardBT470M =               8, // not in SDK
         kColorStandardFilm =                 9, // not in SDK
-        kColorStandardDCI_P3 =               10, // not in SDK, new in Android 8.0
+        kColorStandardDisplay_P3 =           10, // not in SDK, new in Android 8.0
+        kColorStandardDCI_P3 = kColorStandardDisplay_P3, // legacy (incorrect) name for Display P3
 
         /* This marks a section of color-standard values that are not supported by graphics HAL,
            but track defined color primaries-matrix coefficient combinations in media.
@@ -211,7 +212,7 @@
         case ColorUtils::kColorStandardBT2020Constant:       return "BT2020Constant";
         case ColorUtils::kColorStandardBT470M:               return "BT470M";
         case ColorUtils::kColorStandardFilm:                 return "Film";
-        case ColorUtils::kColorStandardDCI_P3:               return "DCI_P3";
+        case ColorUtils::kColorStandardDisplay_P3:           return "Display_P3";
         default:                                             return def;
     }
 }
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/FileDescriptor.h b/media/module/foundation/include/media/stagefright/foundation/FileDescriptor.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/FileDescriptor.h
rename to media/module/foundation/include/media/stagefright/foundation/FileDescriptor.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/Flagged.h b/media/module/foundation/include/media/stagefright/foundation/Flagged.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/Flagged.h
rename to media/module/foundation/include/media/stagefright/foundation/Flagged.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
similarity index 97%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
rename to media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index fb8c299..05ee7fc 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -73,7 +73,10 @@
 extern const char *MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM;
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS;
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS_HD;
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS_HD_MA;
 extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD;
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1;
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2;
 extern const char *MEDIA_MIMETYPE_AUDIO_EVRC;
 extern const char *MEDIA_MIMETYPE_AUDIO_EVRCB;
 extern const char *MEDIA_MIMETYPE_AUDIO_EVRCWB;
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaKeys.h b/media/module/foundation/include/media/stagefright/foundation/MediaKeys.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/MediaKeys.h
rename to media/module/foundation/include/media/stagefright/foundation/MediaKeys.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/Mutexed.h b/media/module/foundation/include/media/stagefright/foundation/Mutexed.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/Mutexed.h
rename to media/module/foundation/include/media/stagefright/foundation/Mutexed.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/OpusHeader.h b/media/module/foundation/include/media/stagefright/foundation/OpusHeader.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/OpusHeader.h
rename to media/module/foundation/include/media/stagefright/foundation/OpusHeader.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/TypeTraits.h b/media/module/foundation/include/media/stagefright/foundation/TypeTraits.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/TypeTraits.h
rename to media/module/foundation/include/media/stagefright/foundation/TypeTraits.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h b/media/module/foundation/include/media/stagefright/foundation/avc_utils.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/avc_utils.h
rename to media/module/foundation/include/media/stagefright/foundation/avc_utils.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/base64.h b/media/module/foundation/include/media/stagefright/foundation/base64.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/base64.h
rename to media/module/foundation/include/media/stagefright/foundation/base64.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/hexdump.h b/media/module/foundation/include/media/stagefright/foundation/hexdump.h
similarity index 100%
rename from media/libstagefright/foundation/include/media/stagefright/foundation/hexdump.h
rename to media/module/foundation/include/media/stagefright/foundation/hexdump.h
diff --git a/media/libstagefright/foundation/tests/AData_test.cpp b/media/module/foundation/tests/AData_test.cpp
similarity index 100%
rename from media/libstagefright/foundation/tests/AData_test.cpp
rename to media/module/foundation/tests/AData_test.cpp
diff --git a/media/module/foundation/tests/AMessage_test.cpp b/media/module/foundation/tests/AMessage_test.cpp
new file mode 100644
index 0000000..08062e5
--- /dev/null
+++ b/media/module/foundation/tests/AMessage_test.cpp
@@ -0,0 +1,337 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AData_test"
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+
+using namespace android;
+
+using ::testing::InSequence;
+using ::testing::NiceMock;
+
+class LooperWithSettableClock : public ALooper {
+public:
+  LooperWithSettableClock() : mClockUs(0) {}
+
+  void setClockUs(int64_t nowUs) {
+    mClockUs = nowUs;
+  }
+
+  int64_t getNowUs() override {
+    return mClockUs;
+  }
+
+private:
+  int64_t mClockUs;
+};
+
+timespec millis100 = {0, 100L*1000*1000};
+
+class MockHandler : public AHandler {
+public:
+    MOCK_METHOD(void, onMessageReceived, (const sp<AMessage>&), (override));
+};
+
+TEST(AMessage_tests, countsAndLimits) {
+  sp<AMessage> m1 = new AMessage();
+
+  // clear, countEntries, maxAllowedEntries
+
+  EXPECT_EQ(0, m1->countEntries());
+
+  m1->setInt32("smaller", INT32_MAX - 2);
+  m1->setInt64("big", INT64_MAX);
+  m1->setString("bigBallOfString", "whatever");
+  EXPECT_EQ(3, m1->countEntries());
+
+  m1->clear();
+  EXPECT_EQ(0, m1->countEntries());
+
+  EXPECT_TRUE(m1->maxAllowedEntries() > 0);
+  EXPECT_TRUE(AMessage::maxAllowedEntries() > 0);
+
+  // static function, make sure we get a consistent answer
+  EXPECT_EQ(m1->maxAllowedEntries(), AMessage::maxAllowedEntries());
+}
+
+TEST(AMessage_tests, settersAndGetters) {
+  sp<AMessage> m1 = new AMessage();
+
+  m1->setInt32("value", 2);
+  m1->setInt32("bar", 3);
+
+  int32_t i32;
+  EXPECT_TRUE(m1->findInt32("value", &i32));
+  EXPECT_EQ(2, i32);
+
+  EXPECT_TRUE(m1->findInt32("bar", &i32));
+  EXPECT_EQ(3, i32);
+
+
+  m1->setInt64("big", INT64_MAX);
+  m1->setInt64("smaller", INT64_MAX - 2);
+  m1->setInt64("smallest", 257);
+
+  int64_t i64;
+  EXPECT_TRUE(m1->findInt64("big", &i64));
+  EXPECT_EQ(INT64_MAX, i64);
+
+  EXPECT_TRUE(m1->findInt64("smaller", &i64));
+  EXPECT_EQ(INT64_MAX - 2, i64);
+
+  m1->setSize("size1", 257);
+  m1->setSize("size2", 1023);
+
+  size_t sizing;
+  EXPECT_TRUE(m1->findSize("size2", &sizing));
+  EXPECT_EQ(1023, sizing);
+  EXPECT_TRUE(m1->findSize("size1", &sizing));
+  EXPECT_EQ(257, sizing);
+
+  m1->setDouble("precise", 10.5);
+  m1->setDouble("small", 0.125);
+
+  double d;
+  EXPECT_TRUE(m1->findDouble("precise", &d));
+  EXPECT_EQ(10.5, d);
+
+  EXPECT_TRUE(m1->findDouble("small", &d));
+  EXPECT_EQ(0.125, d);
+
+  // should be unchanged from the top of the test
+  EXPECT_TRUE(m1->findInt32("bar", &i32));
+  EXPECT_EQ(3, i32);
+
+  EXPECT_FALSE(m1->findInt32("nonesuch", &i32));
+  EXPECT_FALSE(m1->findInt64("nonesuch2", &i64));
+  // types disagree, not found
+  EXPECT_FALSE(m1->findInt32("big", &i32));
+  EXPECT_FALSE(m1->findInt32("precise", &i32));
+
+  // integral types should come back true
+  EXPECT_TRUE(m1->findAsInt64("big", &i64));
+  EXPECT_EQ(INT64_MAX, i64);
+  EXPECT_TRUE(m1->findAsInt64("bar", &i64));
+  EXPECT_EQ(3, i64);
+  EXPECT_FALSE(m1->findAsInt64("precise", &i64));
+
+  // recovers ints, size, and floating point values
+  float value;
+  EXPECT_TRUE(m1->findAsFloat("value", &value));
+  EXPECT_EQ(2, value);
+  EXPECT_TRUE(m1->findAsFloat("smallest", &value));
+  EXPECT_EQ(257, value);
+  EXPECT_TRUE(m1->findAsFloat("size2", &value));
+  EXPECT_EQ(1023, value);
+  EXPECT_TRUE(m1->findAsFloat("precise", &value));
+  EXPECT_EQ(10.5, value);
+  EXPECT_TRUE(m1->findAsFloat("small", &value));
+  EXPECT_EQ(0.125, value);
+
+
+  // need to handle still:
+  // strings
+  // Object
+  // Buffer
+  // Message (nested)
+  //
+
+  // removal
+  m1->setInt32("shortlived", 2);
+  m1->setInt32("alittlelonger", 2);
+  EXPECT_EQ(OK, m1->removeEntryByName("shortlived"));
+  EXPECT_EQ(BAD_VALUE, m1->removeEntryByName(nullptr));
+  EXPECT_EQ(BAD_INDEX, m1->removeEntryByName("themythicalnonesuch"));
+  EXPECT_FALSE(m1->findInt32("shortlived", &i32));
+  EXPECT_TRUE(m1->findInt32("alittlelonger", &i32));
+
+  EXPECT_NE(OK, m1->removeEntryByName("notpresent"));
+}
+
+TEST(AMessage_tests, deliversMultipleMessagesInOrderImmediately) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msgNow1 = new AMessage(0, mockHandler);
+  msgNow1->post();
+  sp<AMessage> msgNow2 = new AMessage(0, mockHandler);
+  msgNow2->post();
+
+  {
+    InSequence inSequence;
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgNow1)).Times(1);
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgNow2)).Times(1);
+  }
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, doesNotDeliverDelayedMessageImmediately) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msgNow = new AMessage(0, mockHandler);
+  msgNow->post();
+  sp<AMessage> msgDelayed = new AMessage(0, mockHandler);
+  msgDelayed->post(100);
+
+  EXPECT_CALL(*mockHandler, onMessageReceived(msgNow)).Times(1);
+  // note: never called
+  EXPECT_CALL(*mockHandler, onMessageReceived(msgDelayed)).Times(0);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversDelayedMessagesInSequence) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msgIn500 = new AMessage(0, mockHandler);
+  msgIn500->post(500);
+  sp<AMessage> msgNow = new AMessage(0, mockHandler);
+  msgNow->post();
+  sp<AMessage> msgIn100 = new AMessage(0, mockHandler);
+  msgIn100->post(100);
+  // not expected to be received
+  sp<AMessage> msgIn1000 = new AMessage(0, mockHandler);
+  msgIn1000->post(1000);
+
+  looper->setClockUs(500);
+  {
+    InSequence inSequence;
+
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgNow)).Times(1);
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgIn100)).Times(1);
+    EXPECT_CALL(*mockHandler, onMessageReceived(msgIn500)).Times(1);
+  }
+  // note: never called
+  EXPECT_CALL(*mockHandler, onMessageReceived(msgIn1000)).Times(0);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversDelayedUniqueMessage) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 50);
+
+  looper->setClockUs(50);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversImmediateUniqueMessage) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  // note: we don't need to set the clock, but we do want a stable clock that doesn't advance
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 0);
+
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, doesNotDeliverUniqueMessageAfterRescheduleLater) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 50);
+  msg->postUnique(msg, 100); // reschedule for later
+
+  looper->setClockUs(50); // if the message is correctly rescheduled, it should not be delivered
+  // Never called because the message was rescheduled to a later point in time
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(0);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversUniqueMessageAfterRescheduleEarlier) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->postUnique(msg, 100);
+  msg->postUnique(msg, 50); // reschedule to fire earlier
+
+  looper->setClockUs(50); // if the message is rescheduled correctly, it should be delivered
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversSameMessageTwice) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  msg->post(50);
+  msg->post(100);
+
+  looper->setClockUs(100);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(2);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+// When messages are posted twice with the same token, it will only be delivered once after being
+// rescheduled.
+TEST(AMessage_tests, deliversUniqueMessageOnce) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg1 = new AMessage(0, mockHandler);
+  msg1->postUnique(msg1, 50);
+  sp<AMessage> msg2 = new AMessage(0, mockHandler);
+  msg2->postUnique(msg1, 75); // note, using the same token as msg1
+
+  looper->setClockUs(100);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg1)).Times(0);
+  EXPECT_CALL(*mockHandler, onMessageReceived(msg2)).Times(1);
+  looper->start();
+  nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, postUnique_withNullToken_returnsInvalidArgument) {
+  sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+  sp<ALooper> looper = new ALooper();
+  looper->registerHandler(mockHandler);
+
+  sp<AMessage> msg = new AMessage(0, mockHandler);
+  EXPECT_EQ(msg->postUnique(nullptr, 0), -EINVAL);
+}
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h b/media/module/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h
similarity index 100%
rename from media/libstagefright/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h
rename to media/module/foundation/tests/AVCUtils/AVCUtilsTestEnvironment.h
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp b/media/module/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
similarity index 92%
rename from media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
rename to media/module/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
index 77a8599..57ac822 100644
--- a/media/libstagefright/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
+++ b/media/module/foundation/tests/AVCUtils/AVCUtilsUnitTest.cpp
@@ -19,6 +19,7 @@
 #include <utils/Log.h>
 
 #include <fstream>
+#include <memory>
 
 #include "media/stagefright/foundation/ABitReader.h"
 #include "media/stagefright/foundation/avc_utils.h"
@@ -151,10 +152,10 @@
     size_t fileSize = buf.st_size;
     ASSERT_NE(fileSize, 0) << "Invalid file size found";
 
-    const uint8_t *volBuffer = new uint8_t[fileSize];
+    std::unique_ptr<uint8_t[]> volBuffer(new uint8_t[fileSize]);
     ASSERT_NE(volBuffer, nullptr) << "Failed to allocate VOL buffer of size: " << fileSize;
 
-    inputFileStream.read((char *)(volBuffer), fileSize);
+    inputFileStream.read((char *)(volBuffer.get()), fileSize);
     ASSERT_EQ(inputFileStream.gcount(), fileSize)
             << "Failed to read complete file, bytes read: " << inputFileStream.gcount();
 
@@ -163,15 +164,13 @@
     int32_t volWidth = -1;
     int32_t volHeight = -1;
 
-    bool status = ExtractDimensionsFromVOLHeader(volBuffer, fileSize, &volWidth, &volHeight);
+    bool status = ExtractDimensionsFromVOLHeader(volBuffer.get(), fileSize, &volWidth, &volHeight);
     ASSERT_TRUE(status)
             << "Failed to get VOL dimensions from function: ExtractDimensionsFromVOLHeader()";
 
     ASSERT_EQ(volWidth, width) << "Expected width: " << width << "Found: " << volWidth;
 
     ASSERT_EQ(volHeight, height) << "Expected height: " << height << "Found: " << volHeight;
-
-    delete[] volBuffer;
 }
 
 TEST_P(AVCDimensionTest, DimensionTest) {
@@ -186,7 +185,8 @@
         stringLine >> type >> chunkLength;
         ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
 
-        const uint8_t *data = new uint8_t[chunkLength];
+        std::unique_ptr<uint8_t[]> dataArray(new uint8_t[chunkLength]);
+        const uint8_t *data = dataArray.get();
         ASSERT_NE(data, nullptr) << "Failed to create a data buffer of size: " << chunkLength;
 
         const uint8_t *nalStart;
@@ -197,9 +197,11 @@
                 << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
 
         size_t smallBufferSize = kSmallBufferSize;
-        const uint8_t *sanityData = new uint8_t[smallBufferSize];
+        uint8_t sanityDataBuffer[smallBufferSize];
+        const uint8_t *sanityData = sanityDataBuffer;
         memcpy((void *)sanityData, (void *)data, smallBufferSize);
 
+        // sanityData could be changed, but sanityDataPtr is not and can be cleaned up.
         status_t result = getNextNALUnit(&sanityData, &smallBufferSize, &nalStart, &nalSize, true);
         ASSERT_EQ(result, -EAGAIN) << "Invalid result found when wrong NAL unit passed";
 
@@ -221,7 +223,6 @@
             ASSERT_EQ(avcHeight, mFrameHeight)
                     << "Expected height: " << mFrameHeight << "Found: " << avcHeight;
         }
-        delete[] data;
     }
     if (mNalUnitsExpected < 0) {
         ASSERT_GT(numNalUnits, 0) << "Failed to find an NAL Unit";
@@ -251,7 +252,8 @@
         accessUnitLength += chunkLength;
 
         if (!type.compare("SPS")) {
-            const uint8_t *data = new uint8_t[chunkLength];
+            std::unique_ptr<uint8_t[]> dataArray(new uint8_t[chunkLength]);
+            const uint8_t *data = dataArray.get();
             ASSERT_NE(data, nullptr) << "Failed to create a data buffer of size: " << chunkLength;
 
             const uint8_t *nalStart;
@@ -271,14 +273,13 @@
                 profile = nalStart[1];
                 level = nalStart[3];
             }
-            delete[] data;
         }
     }
-    const uint8_t *accessUnitData = new uint8_t[accessUnitLength];
+    std::unique_ptr<uint8_t[]> accessUnitData(new uint8_t[accessUnitLength]);
     ASSERT_NE(accessUnitData, nullptr) << "Failed to create a buffer of size: " << accessUnitLength;
 
     mInputFileStream.seekg(0, ios::beg);
-    mInputFileStream.read((char *)accessUnitData, accessUnitLength);
+    mInputFileStream.read((char *)accessUnitData.get(), accessUnitLength);
     ASSERT_EQ(mInputFileStream.gcount(), accessUnitLength)
             << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
 
@@ -286,7 +287,7 @@
     ASSERT_NE(accessUnit, nullptr)
             << "Failed to create an android data buffer of size: " << accessUnitLength;
 
-    memcpy(accessUnit->data(), accessUnitData, accessUnitLength);
+    memcpy(accessUnit->data(), accessUnitData.get(), accessUnitLength);
     sp<ABuffer> csdDataBuffer = MakeAVCCodecSpecificData(accessUnit, &avcWidth, &avcHeight);
     ASSERT_NE(csdDataBuffer, nullptr) << "No data returned from MakeAVCCodecSpecificData()";
 
@@ -306,7 +307,6 @@
     ASSERT_EQ(*(csdData + 3), level)
             << "Expected AVC level: " << level << " found: " << *(csdData + 3);
     csdDataBuffer.clear();
-    delete[] accessUnitData;
     accessUnit.clear();
 }
 
@@ -321,32 +321,31 @@
         stringLine >> type >> chunkLength >> frameLayerID;
         ASSERT_GT(chunkLength, 0) << "Length of the data chunk must be greater than zero";
 
-        char *data = new char[chunkLength];
+        std::unique_ptr<char[]> data(new char[chunkLength]);
         ASSERT_NE(data, nullptr) << "Failed to allocation data buffer of size: " << chunkLength;
 
-        mInputFileStream.read(data, chunkLength);
+        mInputFileStream.read(data.get(), chunkLength);
         ASSERT_EQ(mInputFileStream.gcount(), chunkLength)
                 << "Failed to read complete file, bytes read: " << mInputFileStream.gcount();
 
         if (!type.compare("IDR")) {
-            bool isIDR = IsIDR((uint8_t *)data, chunkLength);
+            bool isIDR = IsIDR((uint8_t *)data.get(), chunkLength);
             ASSERT_TRUE(isIDR);
 
-            layerID = FindAVCLayerId((uint8_t *)data, chunkLength);
+            layerID = FindAVCLayerId((uint8_t *)data.get(), chunkLength);
             ASSERT_EQ(layerID, frameLayerID) << "Wrong layer ID found";
         } else if (!type.compare("P") || !type.compare("B")) {
             sp<ABuffer> accessUnit = new ABuffer(chunkLength);
             ASSERT_NE(accessUnit, nullptr) << "Unable to create access Unit";
 
-            memcpy(accessUnit->data(), data, chunkLength);
+            memcpy(accessUnit->data(), data.get(), chunkLength);
             bool isReferenceFrame = IsAVCReferenceFrame(accessUnit);
             ASSERT_TRUE(isReferenceFrame);
 
             accessUnit.clear();
-            layerID = FindAVCLayerId((uint8_t *)data, chunkLength);
+            layerID = FindAVCLayerId((uint8_t *)data.get(), chunkLength);
             ASSERT_EQ(layerID, frameLayerID) << "Wrong layer ID found";
         }
-        delete[] data;
     }
 }
 
diff --git a/media/libstagefright/foundation/tests/AVCUtils/Android.bp b/media/module/foundation/tests/AVCUtils/Android.bp
similarity index 100%
rename from media/libstagefright/foundation/tests/AVCUtils/Android.bp
rename to media/module/foundation/tests/AVCUtils/Android.bp
diff --git a/media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/foundation/tests/AVCUtils/AndroidTest.xml
rename to media/module/foundation/tests/AVCUtils/AndroidTest.xml
diff --git a/media/libstagefright/foundation/tests/AVCUtils/README.md b/media/module/foundation/tests/AVCUtils/README.md
similarity index 100%
rename from media/libstagefright/foundation/tests/AVCUtils/README.md
rename to media/module/foundation/tests/AVCUtils/README.md
diff --git a/media/libstagefright/foundation/tests/Android.bp b/media/module/foundation/tests/Android.bp
similarity index 96%
rename from media/libstagefright/foundation/tests/Android.bp
rename to media/module/foundation/tests/Android.bp
index e72ce43..c409dd2 100644
--- a/media/libstagefright/foundation/tests/Android.bp
+++ b/media/module/foundation/tests/Android.bp
@@ -20,10 +20,14 @@
 
     shared_libs: [
         "liblog",
-        "libstagefright_foundation",
         "libutils",
     ],
 
+    static_libs: [
+        "libstagefright_foundation",
+        "libgmock",
+    ],
+
     srcs: [
         "AData_test.cpp",
         "AMessage_test.cpp",
diff --git a/media/libstagefright/foundation/tests/Base64_test.cpp b/media/module/foundation/tests/Base64_test.cpp
similarity index 100%
rename from media/libstagefright/foundation/tests/Base64_test.cpp
rename to media/module/foundation/tests/Base64_test.cpp
diff --git a/media/libstagefright/foundation/tests/Flagged_test.cpp b/media/module/foundation/tests/Flagged_test.cpp
similarity index 100%
rename from media/libstagefright/foundation/tests/Flagged_test.cpp
rename to media/module/foundation/tests/Flagged_test.cpp
diff --git a/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp b/media/module/foundation/tests/MetaDataBaseUnitTest.cpp
similarity index 96%
rename from media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp
rename to media/module/foundation/tests/MetaDataBaseUnitTest.cpp
index 0aed4d2..b562e07 100644
--- a/media/libstagefright/foundation/tests/MetaDataBaseUnitTest.cpp
+++ b/media/module/foundation/tests/MetaDataBaseUnitTest.cpp
@@ -19,6 +19,7 @@
 #include <string.h>
 #include <sys/stat.h>
 #include <fstream>
+#include <memory>
 
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaDataBase.h>
@@ -48,18 +49,16 @@
 class MetaDataBaseUnitTest : public ::testing::Test {};
 
 TEST_F(MetaDataBaseUnitTest, CreateMetaDataBaseTest) {
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
 
     // Testing copy constructor
-    MetaDataBase *metaDataCopy = metaData;
+    MetaDataBase *metaDataCopy = metaData.get();
     ASSERT_NE(metaDataCopy, nullptr) << "Failed to create meta data copy";
-
-    delete metaData;
 }
 
 TEST_F(MetaDataBaseUnitTest, SetAndFindDataTest) {
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
 
     // Setting the different key-value pair type for first time, overwrite
@@ -142,12 +141,10 @@
     int32_t angle;
     status = metaData->findInt32(kKeyRotation, &angle);
     ASSERT_FALSE(status) << "Value for an invalid key is returned when the key is not set";
-
-    delete (metaData);
 }
 
 TEST_F(MetaDataBaseUnitTest, OverWriteFunctionalityTest) {
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
 
     // set/set/read to check first overwrite operation
@@ -186,12 +183,10 @@
     status = metaData->findInt32(kKeyHeight, &height);
     ASSERT_TRUE(status) << "kKeyHeight key does not exists in metadata";
     ASSERT_EQ(height, kHeight3) << "Value of height is not overwritten";
-
-    delete (metaData);
 }
 
 TEST_F(MetaDataBaseUnitTest, RemoveKeyTest) {
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
 
     bool status = metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
@@ -246,12 +241,10 @@
 
     metaData->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
     ASSERT_FALSE(status) << "Overwrite should be false since the metadata was cleared";
-
-    delete (metaData);
 }
 
 TEST_F(MetaDataBaseUnitTest, ConvertToStringTest) {
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
 
     String8 info = metaData->toString();
@@ -281,8 +274,6 @@
     info = metaData->toString();
     ASSERT_EQ(info.length(), 0) << "MetaData length is non-zero after clearing it: "
                                 << info.length();
-
-    delete (metaData);
 }
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/tests/OpusHeader/Android.bp b/media/module/foundation/tests/OpusHeader/Android.bp
similarity index 100%
rename from media/libstagefright/foundation/tests/OpusHeader/Android.bp
rename to media/module/foundation/tests/OpusHeader/Android.bp
diff --git a/media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml b/media/module/foundation/tests/OpusHeader/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/foundation/tests/OpusHeader/AndroidTest.xml
rename to media/module/foundation/tests/OpusHeader/AndroidTest.xml
diff --git a/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp b/media/module/foundation/tests/OpusHeader/OpusHeaderTest.cpp
similarity index 100%
rename from media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTest.cpp
rename to media/module/foundation/tests/OpusHeader/OpusHeaderTest.cpp
diff --git a/media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h b/media/module/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h
similarity index 100%
rename from media/libstagefright/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h
rename to media/module/foundation/tests/OpusHeader/OpusHeaderTestEnvironment.h
diff --git a/media/libstagefright/foundation/tests/OpusHeader/README.md b/media/module/foundation/tests/OpusHeader/README.md
similarity index 100%
rename from media/libstagefright/foundation/tests/OpusHeader/README.md
rename to media/module/foundation/tests/OpusHeader/README.md
diff --git a/media/libstagefright/foundation/tests/TypeTraits_test.cpp b/media/module/foundation/tests/TypeTraits_test.cpp
similarity index 100%
rename from media/libstagefright/foundation/tests/TypeTraits_test.cpp
rename to media/module/foundation/tests/TypeTraits_test.cpp
diff --git a/media/libstagefright/foundation/tests/Utils_test.cpp b/media/module/foundation/tests/Utils_test.cpp
similarity index 100%
rename from media/libstagefright/foundation/tests/Utils_test.cpp
rename to media/module/foundation/tests/Utils_test.cpp
diff --git a/media/libstagefright/foundation/tests/colorutils/Android.bp b/media/module/foundation/tests/colorutils/Android.bp
similarity index 100%
rename from media/libstagefright/foundation/tests/colorutils/Android.bp
rename to media/module/foundation/tests/colorutils/Android.bp
diff --git a/media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp b/media/module/foundation/tests/colorutils/ColorUtilsTest.cpp
similarity index 100%
rename from media/libstagefright/foundation/tests/colorutils/ColorUtilsTest.cpp
rename to media/module/foundation/tests/colorutils/ColorUtilsTest.cpp
diff --git a/media/libstagefright/id3/Android.bp b/media/module/id3/Android.bp
similarity index 100%
rename from media/libstagefright/id3/Android.bp
rename to media/module/id3/Android.bp
diff --git a/media/libstagefright/id3/ID3.cpp b/media/module/id3/ID3.cpp
similarity index 100%
rename from media/libstagefright/id3/ID3.cpp
rename to media/module/id3/ID3.cpp
diff --git a/media/libstagefright/id3/MODULE_LICENSE_APACHE2 b/media/module/id3/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/id3/MODULE_LICENSE_APACHE2
rename to media/module/id3/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/id3/NOTICE b/media/module/id3/NOTICE
similarity index 100%
rename from media/libstagefright/id3/NOTICE
rename to media/module/id3/NOTICE
diff --git a/media/libstagefright/id3/TEST_MAPPING b/media/module/id3/TEST_MAPPING
similarity index 100%
rename from media/libstagefright/id3/TEST_MAPPING
rename to media/module/id3/TEST_MAPPING
diff --git a/media/libstagefright/id3/test/Android.bp b/media/module/id3/test/Android.bp
similarity index 100%
rename from media/libstagefright/id3/test/Android.bp
rename to media/module/id3/test/Android.bp
diff --git a/media/libstagefright/id3/test/AndroidTest.xml b/media/module/id3/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/id3/test/AndroidTest.xml
rename to media/module/id3/test/AndroidTest.xml
diff --git a/media/libstagefright/id3/test/ID3Test.cpp b/media/module/id3/test/ID3Test.cpp
similarity index 100%
rename from media/libstagefright/id3/test/ID3Test.cpp
rename to media/module/id3/test/ID3Test.cpp
diff --git a/media/libstagefright/id3/test/ID3TestEnvironment.h b/media/module/id3/test/ID3TestEnvironment.h
similarity index 100%
rename from media/libstagefright/id3/test/ID3TestEnvironment.h
rename to media/module/id3/test/ID3TestEnvironment.h
diff --git a/media/libstagefright/id3/test/README.md b/media/module/id3/test/README.md
similarity index 100%
rename from media/libstagefright/id3/test/README.md
rename to media/module/id3/test/README.md
diff --git a/media/libstagefright/id3/testid3.cpp b/media/module/id3/testid3.cpp
similarity index 100%
rename from media/libstagefright/id3/testid3.cpp
rename to media/module/id3/testid3.cpp
diff --git a/media/libmediaformatshaper/Android.bp b/media/module/libmediaformatshaper/Android.bp
similarity index 100%
rename from media/libmediaformatshaper/Android.bp
rename to media/module/libmediaformatshaper/Android.bp
diff --git a/media/libmediaformatshaper/CodecProperties.cpp b/media/module/libmediaformatshaper/CodecProperties.cpp
similarity index 100%
rename from media/libmediaformatshaper/CodecProperties.cpp
rename to media/module/libmediaformatshaper/CodecProperties.cpp
diff --git a/media/libmediaformatshaper/CodecProperties.h b/media/module/libmediaformatshaper/CodecProperties.h
similarity index 100%
rename from media/libmediaformatshaper/CodecProperties.h
rename to media/module/libmediaformatshaper/CodecProperties.h
diff --git a/media/libmediaformatshaper/CodecSeeding.cpp b/media/module/libmediaformatshaper/CodecSeeding.cpp
similarity index 100%
rename from media/libmediaformatshaper/CodecSeeding.cpp
rename to media/module/libmediaformatshaper/CodecSeeding.cpp
diff --git a/media/libmediaformatshaper/FormatShaper.cpp b/media/module/libmediaformatshaper/FormatShaper.cpp
similarity index 100%
rename from media/libmediaformatshaper/FormatShaper.cpp
rename to media/module/libmediaformatshaper/FormatShaper.cpp
diff --git a/media/libmediaformatshaper/ManageShapingCodecs.cpp b/media/module/libmediaformatshaper/ManageShapingCodecs.cpp
similarity index 100%
rename from media/libmediaformatshaper/ManageShapingCodecs.cpp
rename to media/module/libmediaformatshaper/ManageShapingCodecs.cpp
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/module/libmediaformatshaper/VQApply.cpp
similarity index 100%
rename from media/libmediaformatshaper/VQApply.cpp
rename to media/module/libmediaformatshaper/VQApply.cpp
diff --git a/media/libmediaformatshaper/VQops.h b/media/module/libmediaformatshaper/VQops.h
similarity index 100%
rename from media/libmediaformatshaper/VQops.h
rename to media/module/libmediaformatshaper/VQops.h
diff --git a/media/libmediaformatshaper/VideoShaper.cpp b/media/module/libmediaformatshaper/VideoShaper.cpp
similarity index 100%
rename from media/libmediaformatshaper/VideoShaper.cpp
rename to media/module/libmediaformatshaper/VideoShaper.cpp
diff --git a/media/libmediaformatshaper/VideoShaper.h b/media/module/libmediaformatshaper/VideoShaper.h
similarity index 100%
rename from media/libmediaformatshaper/VideoShaper.h
rename to media/module/libmediaformatshaper/VideoShaper.h
diff --git a/media/libmediaformatshaper/exports.lds b/media/module/libmediaformatshaper/exports.lds
similarity index 100%
rename from media/libmediaformatshaper/exports.lds
rename to media/module/libmediaformatshaper/exports.lds
diff --git a/media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h b/media/module/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
similarity index 100%
rename from media/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
rename to media/module/libmediaformatshaper/include/media/formatshaper/FormatShaper.h
diff --git a/media/libmediatranscoding/.clang-format b/media/module/libmediatranscoding/.clang-format
similarity index 100%
rename from media/libmediatranscoding/.clang-format
rename to media/module/libmediatranscoding/.clang-format
diff --git a/media/libmediatranscoding/Android.bp b/media/module/libmediatranscoding/Android.bp
similarity index 100%
rename from media/libmediatranscoding/Android.bp
rename to media/module/libmediatranscoding/Android.bp
diff --git a/media/libmediatranscoding/OWNERS b/media/module/libmediatranscoding/OWNERS
similarity index 100%
rename from media/libmediatranscoding/OWNERS
rename to media/module/libmediatranscoding/OWNERS
diff --git a/media/libmediatranscoding/TEST_MAPPING b/media/module/libmediatranscoding/TEST_MAPPING
similarity index 100%
rename from media/libmediatranscoding/TEST_MAPPING
rename to media/module/libmediatranscoding/TEST_MAPPING
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/module/libmediatranscoding/TranscoderWrapper.cpp
similarity index 100%
rename from media/libmediatranscoding/TranscoderWrapper.cpp
rename to media/module/libmediatranscoding/TranscoderWrapper.cpp
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/module/libmediatranscoding/TranscodingClientManager.cpp
similarity index 100%
rename from media/libmediatranscoding/TranscodingClientManager.cpp
rename to media/module/libmediatranscoding/TranscodingClientManager.cpp
diff --git a/media/libmediatranscoding/TranscodingLogger.cpp b/media/module/libmediatranscoding/TranscodingLogger.cpp
similarity index 100%
rename from media/libmediatranscoding/TranscodingLogger.cpp
rename to media/module/libmediatranscoding/TranscodingLogger.cpp
diff --git a/media/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
similarity index 100%
rename from media/libmediatranscoding/TranscodingResourcePolicy.cpp
rename to media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/module/libmediatranscoding/TranscodingSessionController.cpp
similarity index 100%
rename from media/libmediatranscoding/TranscodingSessionController.cpp
rename to media/module/libmediatranscoding/TranscodingSessionController.cpp
diff --git a/media/libmediatranscoding/TranscodingThermalPolicy.cpp b/media/module/libmediatranscoding/TranscodingThermalPolicy.cpp
similarity index 100%
rename from media/libmediatranscoding/TranscodingThermalPolicy.cpp
rename to media/module/libmediatranscoding/TranscodingThermalPolicy.cpp
diff --git a/media/libmediatranscoding/TranscodingUidPolicy.cpp b/media/module/libmediatranscoding/TranscodingUidPolicy.cpp
similarity index 100%
rename from media/libmediatranscoding/TranscodingUidPolicy.cpp
rename to media/module/libmediatranscoding/TranscodingUidPolicy.cpp
diff --git a/media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl b/media/module/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
rename to media/module/libmediatranscoding/aidl/android/media/IMediaTranscodingService.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl b/media/module/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
rename to media/module/libmediatranscoding/aidl/android/media/ITranscodingClient.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl b/media/module/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
rename to media/module/libmediatranscoding/aidl/android/media/ITranscodingClientCallback.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingRequestParcel.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingResultParcel.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingSessionParcel.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingSessionPriority.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingSessionStats.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingTestConfig.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingType.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingType.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingType.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingType.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingVideoCodecType.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingVideoCodecType.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingVideoCodecType.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingVideoCodecType.aidl
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl b/media/module/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
similarity index 100%
rename from media/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
rename to media/module/libmediatranscoding/aidl/android/media/TranscodingVideoTrackFormat.aidl
diff --git a/media/libmediatranscoding/build_and_run_all_unit_tests.sh b/media/module/libmediatranscoding/build_and_run_all_unit_tests.sh
similarity index 100%
rename from media/libmediatranscoding/build_and_run_all_unit_tests.sh
rename to media/module/libmediatranscoding/build_and_run_all_unit_tests.sh
diff --git a/media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h b/media/module/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
similarity index 100%
rename from media/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
rename to media/module/libmediatranscoding/include/media/AdjustableMaxPriorityQueue.h
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/module/libmediatranscoding/include/media/ControllerClientInterface.h
similarity index 100%
rename from media/libmediatranscoding/include/media/ControllerClientInterface.h
rename to media/module/libmediatranscoding/include/media/ControllerClientInterface.h
diff --git a/media/libmediatranscoding/include/media/ResourcePolicyInterface.h b/media/module/libmediatranscoding/include/media/ResourcePolicyInterface.h
similarity index 100%
rename from media/libmediatranscoding/include/media/ResourcePolicyInterface.h
rename to media/module/libmediatranscoding/include/media/ResourcePolicyInterface.h
diff --git a/media/libmediatranscoding/include/media/ThermalPolicyInterface.h b/media/module/libmediatranscoding/include/media/ThermalPolicyInterface.h
similarity index 100%
rename from media/libmediatranscoding/include/media/ThermalPolicyInterface.h
rename to media/module/libmediatranscoding/include/media/ThermalPolicyInterface.h
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/module/libmediatranscoding/include/media/TranscoderInterface.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscoderInterface.h
rename to media/module/libmediatranscoding/include/media/TranscoderInterface.h
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/module/libmediatranscoding/include/media/TranscoderWrapper.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscoderWrapper.h
rename to media/module/libmediatranscoding/include/media/TranscoderWrapper.h
diff --git a/media/libmediatranscoding/include/media/TranscodingClientManager.h b/media/module/libmediatranscoding/include/media/TranscodingClientManager.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingClientManager.h
rename to media/module/libmediatranscoding/include/media/TranscodingClientManager.h
diff --git a/media/libmediatranscoding/include/media/TranscodingDefs.h b/media/module/libmediatranscoding/include/media/TranscodingDefs.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingDefs.h
rename to media/module/libmediatranscoding/include/media/TranscodingDefs.h
diff --git a/media/libmediatranscoding/include/media/TranscodingLogger.h b/media/module/libmediatranscoding/include/media/TranscodingLogger.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingLogger.h
rename to media/module/libmediatranscoding/include/media/TranscodingLogger.h
diff --git a/media/libmediatranscoding/include/media/TranscodingRequest.h b/media/module/libmediatranscoding/include/media/TranscodingRequest.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingRequest.h
rename to media/module/libmediatranscoding/include/media/TranscodingRequest.h
diff --git a/media/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingResourcePolicy.h
rename to media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
diff --git a/media/libmediatranscoding/include/media/TranscodingSessionController.h b/media/module/libmediatranscoding/include/media/TranscodingSessionController.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingSessionController.h
rename to media/module/libmediatranscoding/include/media/TranscodingSessionController.h
diff --git a/media/libmediatranscoding/include/media/TranscodingThermalPolicy.h b/media/module/libmediatranscoding/include/media/TranscodingThermalPolicy.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingThermalPolicy.h
rename to media/module/libmediatranscoding/include/media/TranscodingThermalPolicy.h
diff --git a/media/libmediatranscoding/include/media/TranscodingUidPolicy.h b/media/module/libmediatranscoding/include/media/TranscodingUidPolicy.h
similarity index 100%
rename from media/libmediatranscoding/include/media/TranscodingUidPolicy.h
rename to media/module/libmediatranscoding/include/media/TranscodingUidPolicy.h
diff --git a/media/libmediatranscoding/include/media/UidPolicyInterface.h b/media/module/libmediatranscoding/include/media/UidPolicyInterface.h
similarity index 100%
rename from media/libmediatranscoding/include/media/UidPolicyInterface.h
rename to media/module/libmediatranscoding/include/media/UidPolicyInterface.h
diff --git a/media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp b/media/module/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
similarity index 100%
rename from media/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
rename to media/module/libmediatranscoding/tests/AdjustableMaxPriorityQueue_tests.cpp
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/module/libmediatranscoding/tests/Android.bp
similarity index 100%
rename from media/libmediatranscoding/tests/Android.bp
rename to media/module/libmediatranscoding/tests/Android.bp
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/module/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
similarity index 100%
rename from media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
rename to media/module/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
diff --git a/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp b/media/module/libmediatranscoding/tests/TranscodingLogger_tests.cpp
similarity index 100%
rename from media/libmediatranscoding/tests/TranscodingLogger_tests.cpp
rename to media/module/libmediatranscoding/tests/TranscodingLogger_tests.cpp
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/module/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
similarity index 99%
rename from media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
rename to media/module/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
index ef9c4f8..fdd327f 100644
--- a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
+++ b/media/module/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -337,6 +337,8 @@
         // Should have created new transcoder.
         EXPECT_EQ(mTranscoder->getGeneration(), generation);
         EXPECT_EQ(mTranscoder.use_count(), 2);
+        // b/240537336: Allow extra time to finish onError call
+        sleep(1);
     }
 
     void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/Video_4K_HEVC_10Frames_Audio.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/backyard_hevc_1920x1080_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/cubicle_avc_480x240_aac_24KHz.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/desk_hevc_1920x1080_aac_48KHz_rot90.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/jets_hevc_1280x720_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/longtest_15s.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_12Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/plex_hevc_3840x2160_20Mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4 b/media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4
similarity index 100%
rename from media/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4
rename to media/module/libmediatranscoding/tests/assets/TranscodingTestAssets/video_1280x720_hevc_hdr10_static_3mbps.mp4
Binary files differ
diff --git a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/module/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
similarity index 100%
rename from media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
rename to media/module/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
diff --git a/media/libmediatranscoding/tests/push_assets.sh b/media/module/libmediatranscoding/tests/push_assets.sh
similarity index 100%
rename from media/libmediatranscoding/tests/push_assets.sh
rename to media/module/libmediatranscoding/tests/push_assets.sh
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/module/libmediatranscoding/transcoder/Android.bp
similarity index 100%
rename from media/libmediatranscoding/transcoder/Android.bp
rename to media/module/libmediatranscoding/transcoder/Android.bp
diff --git a/media/libmediatranscoding/transcoder/MediaSampleQueue.cpp b/media/module/libmediatranscoding/transcoder/MediaSampleQueue.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/MediaSampleQueue.cpp
rename to media/module/libmediatranscoding/transcoder/MediaSampleQueue.cpp
diff --git a/media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp b/media/module/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
rename to media/module/libmediatranscoding/transcoder/MediaSampleReaderNDK.cpp
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/module/libmediatranscoding/transcoder/MediaSampleWriter.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
rename to media/module/libmediatranscoding/transcoder/MediaSampleWriter.cpp
diff --git a/media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp b/media/module/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
rename to media/module/libmediatranscoding/transcoder/MediaTrackTranscoder.cpp
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/module/libmediatranscoding/transcoder/MediaTranscoder.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/MediaTranscoder.cpp
rename to media/module/libmediatranscoding/transcoder/MediaTranscoder.cpp
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/module/libmediatranscoding/transcoder/NdkCommon.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/NdkCommon.cpp
rename to media/module/libmediatranscoding/transcoder/NdkCommon.cpp
diff --git a/media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp b/media/module/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
rename to media/module/libmediatranscoding/transcoder/PassthroughTrackTranscoder.cpp
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/module/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
rename to media/module/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
diff --git a/media/libmediatranscoding/transcoder/benchmark/Android.bp b/media/module/libmediatranscoding/transcoder/benchmark/Android.bp
similarity index 100%
rename from media/libmediatranscoding/transcoder/benchmark/Android.bp
rename to media/module/libmediatranscoding/transcoder/benchmark/Android.bp
diff --git a/media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml b/media/module/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml
similarity index 100%
rename from media/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml
rename to media/module/libmediatranscoding/transcoder/benchmark/AndroidTestTemplate.xml
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp b/media/module/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
rename to media/module/libmediatranscoding/transcoder/benchmark/MediaSampleReaderBenchmark.cpp
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp b/media/module/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
rename to media/module/libmediatranscoding/transcoder/benchmark/MediaTrackTranscoderBenchmark.cpp
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp b/media/module/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
rename to media/module/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSample.h b/media/module/libmediatranscoding/transcoder/include/media/MediaSample.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaSample.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaSample.h
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h b/media/module/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaSampleQueue.h
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h b/media/module/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaSampleReader.h
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h b/media/module/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaSampleReaderNDK.h
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h b/media/module/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaSampleWriter.h
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h b/media/module/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaTrackTranscoder.h
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h b/media/module/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaTrackTranscoderCallback.h
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/module/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
rename to media/module/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/module/libmediatranscoding/transcoder/include/media/NdkCommon.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/NdkCommon.h
rename to media/module/libmediatranscoding/transcoder/include/media/NdkCommon.h
diff --git a/media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h b/media/module/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
rename to media/module/libmediatranscoding/transcoder/include/media/PassthroughTrackTranscoder.h
diff --git a/media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h b/media/module/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
rename to media/module/libmediatranscoding/transcoder/include/media/VideoTrackTranscoder.h
diff --git a/media/libmediatranscoding/transcoder/setloglevel.sh b/media/module/libmediatranscoding/transcoder/setloglevel.sh
similarity index 100%
rename from media/libmediatranscoding/transcoder/setloglevel.sh
rename to media/module/libmediatranscoding/transcoder/setloglevel.sh
diff --git a/media/libmediatranscoding/transcoder/tests/Android.bp b/media/module/libmediatranscoding/transcoder/tests/Android.bp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/Android.bp
rename to media/module/libmediatranscoding/transcoder/tests/Android.bp
diff --git a/media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml b/media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
rename to media/module/libmediatranscoding/transcoder/tests/AndroidTestTemplate.xml
diff --git a/media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp b/media/module/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/HdrTranscodeTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp b/media/module/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/MediaSampleQueueTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp b/media/module/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/MediaSampleReaderNDKTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp b/media/module/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/MediaSampleWriterTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp b/media/module/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/MediaTrackTranscoderTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/module/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp b/media/module/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/PassthroughTrackTranscoderTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/README.md b/media/module/libmediatranscoding/transcoder/tests/README.md
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/README.md
rename to media/module/libmediatranscoding/transcoder/tests/README.md
diff --git a/media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h b/media/module/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h
rename to media/module/libmediatranscoding/transcoder/tests/TranscoderTestUtils.h
diff --git a/media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp b/media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
rename to media/module/libmediatranscoding/transcoder/tests/VideoTrackTranscoderTests.cpp
diff --git a/media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh b/media/module/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
rename to media/module/libmediatranscoding/transcoder/tests/build_and_run_all_unit_tests.sh
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp b/media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
rename to media/module/libmediatranscoding/transcoder/tests/fuzzer/Android.bp
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/README.md b/media/module/libmediatranscoding/transcoder/tests/fuzzer/README.md
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/fuzzer/README.md
rename to media/module/libmediatranscoding/transcoder/tests/fuzzer/README.md
diff --git a/media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp b/media/module/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
rename to media/module/libmediatranscoding/transcoder/tests/fuzzer/media_transcoder_fuzzer.cpp
diff --git a/media/libmediatranscoding/transcoder/tools/Android.bp b/media/module/libmediatranscoding/transcoder/tools/Android.bp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tools/Android.bp
rename to media/module/libmediatranscoding/transcoder/tools/Android.bp
diff --git a/media/libmediatranscoding/transcoder/tools/Transcode.cpp b/media/module/libmediatranscoding/transcoder/tools/Transcode.cpp
similarity index 100%
rename from media/libmediatranscoding/transcoder/tools/Transcode.cpp
rename to media/module/libmediatranscoding/transcoder/tools/Transcode.cpp
diff --git a/media/libwatchdog/Android.bp b/media/module/libwatchdog/Android.bp
similarity index 100%
rename from media/libwatchdog/Android.bp
rename to media/module/libwatchdog/Android.bp
diff --git a/media/libwatchdog/Watchdog.cpp b/media/module/libwatchdog/Watchdog.cpp
similarity index 100%
rename from media/libwatchdog/Watchdog.cpp
rename to media/module/libwatchdog/Watchdog.cpp
diff --git a/media/libwatchdog/include/watchdog/Watchdog.h b/media/module/libwatchdog/include/watchdog/Watchdog.h
similarity index 100%
rename from media/libwatchdog/include/watchdog/Watchdog.h
rename to media/module/libwatchdog/include/watchdog/Watchdog.h
diff --git a/media/module/metadatautils/Android.bp b/media/module/metadatautils/Android.bp
new file mode 100644
index 0000000..c77474f
--- /dev/null
+++ b/media/module/metadatautils/Android.bp
@@ -0,0 +1,46 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_static {
+    name: "libstagefright_metadatautils",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+    min_sdk_version: "29",
+
+    srcs: ["MetaDataUtils.cpp"],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+
+    header_libs: [
+        "libaudioclient_headers",
+        "libstagefright_headers",
+        "libstagefright_foundation_headers",
+        "media_ndk_headers",
+    ],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    export_include_dirs: ["include"],
+}
diff --git a/media/libstagefright/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
similarity index 100%
rename from media/libstagefright/MetaDataUtils.cpp
rename to media/module/metadatautils/MetaDataUtils.cpp
diff --git a/media/module/metadatautils/TEST_MAPPING b/media/module/metadatautils/TEST_MAPPING
new file mode 100644
index 0000000..21836a5
--- /dev/null
+++ b/media/module/metadatautils/TEST_MAPPING
@@ -0,0 +1,9 @@
+// mappings for frameworks/av/media/module/metadatautils
+{
+  // tests which require dynamic content
+  // invoke with: atest -- --enable-module-dynamic-download=true
+  // TODO(b/148094059): unit tests not allowed to download content
+  "dynamic-presubmit": [
+    { "name": "MetaDataUtilsTest" }
+  ]
+}
diff --git a/media/libstagefright/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
similarity index 100%
rename from media/libstagefright/include/media/stagefright/MetaDataUtils.h
rename to media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
diff --git a/media/libstagefright/tests/metadatautils/Android.bp b/media/module/metadatautils/test/Android.bp
similarity index 93%
rename from media/libstagefright/tests/metadatautils/Android.bp
rename to media/module/metadatautils/test/Android.bp
index ecdf89b..21f38f6 100644
--- a/media/libstagefright/tests/metadatautils/Android.bp
+++ b/media/module/metadatautils/test/Android.bp
@@ -20,9 +20,6 @@
     // all of the 'license_kinds' from "frameworks_av_media_libstagefright_tests_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: [
-        "frameworks_av_media_libstagefright_tests_license",
-    ],
 }
 
 cc_test {
diff --git a/media/libstagefright/tests/metadatautils/AndroidTest.xml b/media/module/metadatautils/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/tests/metadatautils/AndroidTest.xml
rename to media/module/metadatautils/test/AndroidTest.xml
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp b/media/module/metadatautils/test/MetaDataUtilsTest.cpp
similarity index 96%
rename from media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
rename to media/module/metadatautils/test/MetaDataUtilsTest.cpp
index 9fd5fdb..7c35249 100644
--- a/media/libstagefright/tests/metadatautils/MetaDataUtilsTest.cpp
+++ b/media/module/metadatautils/test/MetaDataUtilsTest.cpp
@@ -19,9 +19,10 @@
 #include <utils/Log.h>
 
 #include <fstream>
+#include <memory>
 #include <string>
 
-#include <ESDS.h>
+#include <media/esds/ESDS.h>
 #include <media/NdkMediaFormat.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
@@ -228,7 +229,7 @@
     ASSERT_TRUE(status) << "Failed to get the mime type";
     ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_VIDEO_AVC);
 
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
 
     status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
@@ -264,7 +265,6 @@
     int32_t result = memcmp(csdAMediaFormatBuffer, csdMetaDataBaseBuffer, csdAMediaFormatSize);
     ASSERT_EQ(result, 0) << "CSD from AMediaFormat and MetaDataBase do not match";
 
-    delete metaData;
     AMediaFormat_delete(csdData);
 }
 
@@ -275,7 +275,7 @@
     bool status = MakeAVCCodecSpecificData(csdData, mInputBuffer, mInputBufferSize);
     ASSERT_FALSE(status) << "MakeAVCCodecSpecificData with AMediaFormat succeeds with invalid data";
 
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
 
     status = MakeAVCCodecSpecificData(*metaData, mInputBuffer, mInputBufferSize);
@@ -307,7 +307,7 @@
     ASSERT_TRUE(status) << "Failed to get the mime type";
     ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
 
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create MetaData Base";
 
     status = MakeAACCodecSpecificData(*metaData, mAacProfile, mAacSamplingFreqIndex,
@@ -356,11 +356,10 @@
     ASSERT_EQ(memcmpResult, 0) << "AMediaFormat and MetaDataBase CSDs do not match";
 
     AMediaFormat_delete(csdData);
-    delete metaData;
 }
 
 TEST_P(AacADTSTest, AacADTSValidationTest) {
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
 
     bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
@@ -380,12 +379,10 @@
     status = metaData->findCString(kKeyMIMEType, &mimeType);
     ASSERT_TRUE(status) << "Failed to get mime type";
     ASSERT_STREQ(mimeType, MEDIA_MIMETYPE_AUDIO_AAC);
-
-    delete metaData;
 }
 
 TEST_P(AacCSDValidateTest, AacInvalidInputTest) {
-    MetaDataBase *metaData = new MetaDataBase();
+    std::unique_ptr<MetaDataBase> metaData(new MetaDataBase());
     ASSERT_NE(metaData, nullptr) << "Failed to create meta data";
 
     bool status = MakeAACCodecSpecificData(*metaData, mInputBuffer, kAdtsCsdSize);
@@ -412,14 +409,14 @@
         istringstream dataStringLine(dataLine);
         dataStringLine >> comment;
 
-        char *buffer = strndup(comment.c_str(), commentLength);
+        std::unique_ptr<char[]> buffer(new char[commentLength]);
         ASSERT_NE(buffer, nullptr) << "Failed to allocate buffer of size: " << commentLength;
+        strncpy(buffer.get(), comment.c_str(), commentLength);
 
         AMediaFormat *fileMeta = AMediaFormat_new();
         ASSERT_NE(fileMeta, nullptr) << "Failed to create AMedia format";
 
-        parseVorbisComment(fileMeta, buffer, commentLength);
-        free(buffer);
+        parseVorbisComment(fileMeta, buffer.get(), commentLength);
 
         if (!strncasecmp(tag.c_str(), "ANDROID_HAPTIC", sizeof(tag))) {
             int32_t numChannelExpected = stoi(value);
diff --git a/media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h b/media/module/metadatautils/test/MetaDataUtilsTestEnvironment.h
similarity index 100%
rename from media/libstagefright/tests/metadatautils/MetaDataUtilsTestEnvironment.h
rename to media/module/metadatautils/test/MetaDataUtilsTestEnvironment.h
diff --git a/media/libstagefright/tests/metadatautils/README.md b/media/module/metadatautils/test/README.md
similarity index 100%
rename from media/libstagefright/tests/metadatautils/README.md
rename to media/module/metadatautils/test/README.md
diff --git a/services/minijail/Android.bp b/media/module/minijail/Android.bp
similarity index 100%
rename from services/minijail/Android.bp
rename to media/module/minijail/Android.bp
diff --git a/services/minijail/OWNERS b/media/module/minijail/OWNERS
similarity index 100%
rename from services/minijail/OWNERS
rename to media/module/minijail/OWNERS
diff --git a/services/minijail/TEST_MAPPING b/media/module/minijail/TEST_MAPPING
similarity index 100%
rename from services/minijail/TEST_MAPPING
rename to media/module/minijail/TEST_MAPPING
diff --git a/services/minijail/av_services_minijail_unittest.cpp b/media/module/minijail/av_services_minijail_unittest.cpp
similarity index 100%
rename from services/minijail/av_services_minijail_unittest.cpp
rename to media/module/minijail/av_services_minijail_unittest.cpp
diff --git a/services/minijail/minijail.cpp b/media/module/minijail/minijail.cpp
similarity index 100%
rename from services/minijail/minijail.cpp
rename to media/module/minijail/minijail.cpp
diff --git a/services/minijail/minijail.h b/media/module/minijail/minijail.h
similarity index 100%
rename from services/minijail/minijail.h
rename to media/module/minijail/minijail.h
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
similarity index 97%
rename from media/libstagefright/mpeg2ts/ATSParser.cpp
rename to media/module/mpeg2ts/ATSParser.cpp
index 1482072..6aeea3b 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -556,7 +556,15 @@
             if (descriptor_length > ES_info_length) {
                 return ERROR_MALFORMED;
             }
-            if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
+
+            // The DTS descriptor is used in the PSI PMT to identify streams which carry
+            // DTS audio(core only). If a DTS descriptor is present, a DTS-HD or DTS-UHD
+            // descriptors shall not be present in the same ES_info descriptor loop.
+            if (descriptor_tag == DESCRIPTOR_DTS) {
+                info.mType = STREAMTYPE_DTS;
+                ES_info_length -= descriptor_length;
+                br->skipBits(descriptor_length * 8);
+            } else if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
                 hasStreamCA = true;
                 streamCA.mSystemID = br->getBits(16);
                 streamCA.mPID = br->getBits(16) & 0x1fff;
@@ -575,6 +583,16 @@
                 if (descTagExt == EXT_DESCRIPTOR_DVB_AC4) {
                     info.mTypeExt = EXT_DESCRIPTOR_DVB_AC4;
                     br->skipBits(descriptor_length * 8);
+                } else if (descTagExt == EXT_DESCRIPTOR_DVB_DTS_HD) {
+                    // DTS HD extended descriptor which can accommodate core only formats
+                    // as well as extension only and core + extension combinations.
+                    info.mTypeExt = EXT_DESCRIPTOR_DVB_DTS_HD;
+                    br->skipBits(descriptor_length * 8);
+                } else if (descTagExt == EXT_DESCRIPTOR_DVB_DTS_UHD) {
+                    // The DTS-UHD descriptor is used in the PSI PMT to identify streams
+                    // which carry DTS-UHD audio
+                    info.mTypeExt = EXT_DESCRIPTOR_DVB_DTS_UHD;
+                    br->skipBits(descriptor_length * 8);
                 } else if (descTagExt == EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION &&
                            descriptor_length >= 1) {
                     // DVB BlueBook A038 Table 110
@@ -920,9 +938,17 @@
             mode = ElementaryStreamQueue::EAC3;
             break;
 
+        case STREAMTYPE_DTS:
+            mode = ElementaryStreamQueue::DTS;
+            break;
+
         case STREAMTYPE_PES_PRIVATE_DATA:
             if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4) {
                 mode = ElementaryStreamQueue::AC4;
+            } else if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_HD) {
+                mode = ElementaryStreamQueue::DTS_HD;
+            } else if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_UHD) {
+                mode = ElementaryStreamQueue::DTS_UHD;
             }
             break;
 
@@ -1158,9 +1184,12 @@
         case STREAMTYPE_EAC3:
         case STREAMTYPE_AAC_ENCRYPTED:
         case STREAMTYPE_AC3_ENCRYPTED:
+        case STREAMTYPE_DTS:
             return true;
         case STREAMTYPE_PES_PRIVATE_DATA:
-            return mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4;
+            return (mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4
+                    || mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_HD
+                    || mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_UHD);
 
         default:
             return false;
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/module/mpeg2ts/Android.bp
similarity index 98%
rename from media/libstagefright/mpeg2ts/Android.bp
rename to media/module/mpeg2ts/Android.bp
index 283df1e..bf762c6 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/module/mpeg2ts/Android.bp
@@ -51,6 +51,7 @@
         "libmedia_datasource_headers",
         "libaudioclient_headers",
         "media_ndk_headers",
+        "libstagefright_headers",
         "libstagefright_foundation_headers",
     ],
 
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/module/mpeg2ts/AnotherPacketSource.cpp
similarity index 100%
rename from media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
rename to media/module/mpeg2ts/AnotherPacketSource.cpp
diff --git a/media/libstagefright/mpeg2ts/CasManager.cpp b/media/module/mpeg2ts/CasManager.cpp
similarity index 100%
rename from media/libstagefright/mpeg2ts/CasManager.cpp
rename to media/module/mpeg2ts/CasManager.cpp
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/module/mpeg2ts/ESQueue.cpp
similarity index 75%
rename from media/libstagefright/mpeg2ts/ESQueue.cpp
rename to media/module/mpeg2ts/ESQueue.cpp
index 192ba77..2dc7b0a 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/module/mpeg2ts/ESQueue.cpp
@@ -362,6 +362,436 @@
     return OK;
 }
 
+#define RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitstream, size) \
+    do { \
+        if ((bitstream).numBitsLeft() < (size)) { \
+        ALOGE("Not enough bits left for further parsing"); \
+        return ERROR_MALFORMED; } \
+    } while (0)
+
+// Parse DTS Digital Surround and DTS Express(LBR) stream header
+static status_t parseDTSHDSyncFrame(
+    const uint8_t *ptr, size_t size, unsigned &frameSize, sp<MetaData> *metaData) {
+    static const unsigned channelCountTable[] = {1, 2, 2, 2, 2, 3, 3, 4,
+                                                 4, 5, 6, 6, 6, 7, 8, 8};
+    static const unsigned samplingRateTableCoreSS[] = {0, 8000, 16000, 32000, 0, 0, 11025, 22050,
+                                                       44100, 0, 0, 12000, 24000, 48000, 0, 0};
+    static const unsigned samplingRateTableExtSS[] = {8000, 16000, 32000, 64000, 128000,
+                                                      22050, 44100, 88200, 176400, 352800,
+                                                      12000, 24000, 48000, 96000, 192000, 384000};
+
+    const uint32_t DTSHD_SYNC_CORE_16BIT_BE = 0x7ffe8001;
+    const uint32_t DTSHD_SYNC_EXSS_16BIT_BE = 0x64582025;
+
+    uint32_t numChannels = 0, samplingRate = 0;
+    bool isLBR = false;
+
+    ABitReader bits(ptr, size);
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 32);
+    uint32_t dtshdSyncWord = bits.getBits(32);
+
+    // Expecting DTS Digital Surround or DTS Express(LBR) streams only
+    if (dtshdSyncWord == DTSHD_SYNC_CORE_16BIT_BE) { // DTS Digital Surround Header
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1 + 5 + 1 + 7 + 14 + 6 + 4 + 15 + 2));
+
+        // FTYPE, SHORT, CRC, NBLKS
+        bits.skipBits(1 + 5 + 1 + 7);
+
+        frameSize = bits.getBits(14) + 1;
+        uint32_t amode = bits.getBits(6);
+        uint32_t freqIndex = bits.getBits(4);
+
+        // RATE, FIXEDBIT, DYNF, TIMEF, AUXF, HDCD, EXT_AUDIO_ID, EXT_AUDIO, ASPF
+        bits.skipBits(5 + 1 + 1 + 1 + 1 + 1 + 3 + 1 + 1);
+
+        uint32_t lfeFlag = bits.getBits(2);
+        numChannels = (amode <= 15) ? channelCountTable[amode] : 0;
+        numChannels += ((lfeFlag == 1) || (lfeFlag == 2)) ? 1 : 0;
+        samplingRate = (freqIndex <= 15) ? samplingRateTableCoreSS[freqIndex] : 0;
+
+        isLBR = false;
+    } else if (dtshdSyncWord == DTSHD_SYNC_EXSS_16BIT_BE) { // DTS Express(LBR) Header
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (8 + 2 + 1));
+
+        uint32_t extHeadersize, extSSFsize;
+        uint32_t numAudioPresent = 1, numAssets = 1;
+        uint32_t nuActiveExSSMask[8];
+
+        // userDefinedBits
+        bits.skipBits(8);
+
+        uint32_t extSSIndex = bits.getBits(2);
+        uint32_t headerSizeType = bits.getBits(1);
+
+        if (headerSizeType == 0) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (8 + 16));
+
+            extHeadersize = bits.getBits(8) + 1;
+            extSSFsize = bits.getBits(16) + 1;
+        } else {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (12 + 20));
+
+            extHeadersize = bits.getBits(12) + 1;
+            extSSFsize = bits.getBits(20) + 1;
+        }
+
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1));
+
+        uint32_t staticFieldsPresent = bits.getBits(1);
+
+        if (staticFieldsPresent) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (2 + 3 + 1));
+
+            // nuRefClockCode, nuExSSFrameDurationCode
+            bits.skipBits(2 + 3);
+
+            if (bits.getBits(1)) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (32 + 4));
+
+                bits.skipBits(32 + 4);
+            }
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (3 + 3));
+
+            // numAudioPresent, numAssets
+            bits.skipBits(3 + 3);
+
+            for (uint32_t nAuPr = 0; nAuPr < numAudioPresent; nAuPr++) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (extSSIndex + 1));
+
+                nuActiveExSSMask[nAuPr] = bits.getBits(extSSIndex + 1);
+            }
+
+            for (uint32_t nAuPr = 0; nAuPr < numAudioPresent; nAuPr++) {
+                for (uint32_t nSS = 0; nSS < extSSIndex + 1; nSS++) {
+                    if (((nuActiveExSSMask[nAuPr] >> nSS) & 0x1) == 1) {
+                        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 8);
+
+                        // nuActiveAssetMask
+                        bits.skipBits(8);
+                    }
+                }
+            }
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+            // bMixMetadataEnbl
+            if (bits.getBits(1)) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (2 + 2 + 2));
+
+                // nuMixMetadataAdjLevel
+                bits.skipBits(2);
+
+                uint32_t bits4MixOutMask = (bits.getBits(2) + 1) << 2;
+                uint32_t numMixOutConfigs = bits.getBits(2) + 1;
+
+                for (int ns = 0; ns < numMixOutConfigs; ns++) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, bits4MixOutMask);
+
+                    // nuMixOutChMask
+                    bits.skipBits(bits4MixOutMask);
+                }
+            }
+        }
+
+        for (int nAst = 0; nAst < numAssets; nAst++) {
+            int bits4ExSSFsize = (headerSizeType == 0) ? 16 : 20;
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, bits4ExSSFsize);
+
+            bits.skipBits(bits4ExSSFsize);
+        }
+
+        /* Asset descriptor */
+        for (int nAst = 0; nAst < numAssets; nAst++) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (9 + 3));
+
+            // nuAssetDescriptFsize, nuAssetIndex
+            bits.skipBits(9 + 3);
+
+            if (staticFieldsPresent) {
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+                // bAssetTypeDescrPresent
+                if (bits.getBits(1)) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 4);
+
+                    // nuAssetTypeDescriptor
+                    bits.skipBits(4);
+                }
+
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+                // bLanguageDescrPresent
+                if (bits.getBits(1)) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 24);
+
+                    // LanguageDescriptor
+                    bits.skipBits(24);
+                }
+
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+                // bInfoTextPresent
+                if (bits.getBits(1)) {
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 10);
+
+                    uint32_t nuInfoTextByteSize = bits.getBits(10) + 1;
+
+                    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (nuInfoTextByteSize * 8));
+
+                    // InfoTextString
+                    bits.skipBits(nuInfoTextByteSize * 8);
+                }
+
+                RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (5 + 4 + 8));
+
+                // nuBitResolution
+                bits.skipBits(5);
+
+                samplingRate = samplingRateTableExtSS[bits.getBits(4)];
+                numChannels = bits.getBits(8) + 1;
+            }
+        }
+
+        frameSize = extHeadersize + extSSFsize;
+        isLBR = true;
+    } else {
+        ALOGE("No valid sync word in DTS/DTSHD header");
+        return ERROR_MALFORMED;
+    }
+
+    if (metaData != NULL) {
+        if (isLBR) {
+            (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS_HD);
+            (*metaData)->setInt32(kKeyAudioProfile, 0x2); // CodecProfileLevel.DTS_HDProfileLBR
+        } else {
+            (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS);
+        }
+        (*metaData)->setInt32(kKeyChannelCount, numChannels);
+        (*metaData)->setInt32(kKeySampleRate, samplingRate);
+    }
+    return OK;
+}
+
+static status_t extractVarLenBitFields(
+    ABitReader *bits, size_t *bitsUsed, uint32_t *value,
+    unsigned ucTable[], bool extractAndAddFlag) {
+
+    static const unsigned bitsUsedTbl[8] = {1, 1, 1, 1, 2, 2, 3, 3}; // prefix code lengths
+    static const unsigned indexTbl[8] = {0, 0, 0, 0, 1, 1, 2, 3}; // code to prefix code index map
+
+    /* Clone the bitstream */
+    ABitReader bitStream(bits->data(), bits->numBitsLeft() / 8);
+    ABitReader bitstreamClone(bits->data(), bits->numBitsLeft() / 8);
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitstreamClone, 3);
+
+    unsigned code = bitstreamClone.getBits(3);
+    unsigned totalBitsUsed = bitsUsedTbl[code];
+    unsigned unIndex = indexTbl[code];
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, totalBitsUsed);
+
+    bitStream.skipBits(totalBitsUsed);
+
+    uint32_t unValue = 0;
+    if (ucTable[unIndex] > 0) {
+        if (extractAndAddFlag) {
+            for (unsigned un = 0; un < unIndex; un++) {
+                unValue += (1 << ucTable[un]);
+            }
+
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, ucTable[unIndex]);
+
+            unValue += bitStream.getBits(ucTable[unIndex]);
+            totalBitsUsed += ucTable[unIndex];
+        } else {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, ucTable[unIndex]);
+
+            unValue += bitStream.getBits(ucTable[unIndex]);
+            totalBitsUsed += ucTable[unIndex];
+        }
+    }
+
+    *bitsUsed = (size_t)totalBitsUsed;
+    *value = unValue;
+    return OK;
+}
+
+// Parse DTS UHD Profile-2 stream header
+static status_t parseDTSUHDSyncFrame(
+    const uint8_t *ptr, size_t size, unsigned &frameSize, sp<MetaData> *metaData) {
+
+    static const uint32_t DTSUHD_SYNC_CORE_16BIT_BE = 0x40411BF2;
+    static const uint32_t DTSUHD_NONSYNC_CORE_16BIT_BE = 0x71C442E8;
+
+    unsigned audioSamplRate = 0;
+
+    ABitReader bits(ptr, size);
+
+    RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 32);
+
+    uint32_t syncWord = bits.getBits(32);
+
+    bool isSyncFrameFlag = false;
+    switch (syncWord) {
+        case DTSUHD_SYNC_CORE_16BIT_BE:
+            isSyncFrameFlag = true;
+            break;
+        case DTSUHD_NONSYNC_CORE_16BIT_BE:
+            isSyncFrameFlag = false;
+            break;
+        default:
+            ALOGE("No valid sync word in DTSUHD header");
+            return ERROR_MALFORMED; // invalid sync word
+    }
+
+    unsigned uctable1[4] = { 5, 8, 10, 12 };
+    uint32_t sizeOfFTOCPayload = 0;
+    size_t nuBitsUsed = 0;
+    status_t status = OK;
+
+    status = extractVarLenBitFields(&bits, &nuBitsUsed, &sizeOfFTOCPayload, uctable1, true);
+
+    if (status != OK) {
+        ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+        return ERROR_MALFORMED;
+    }
+
+    bits.skipBits(nuBitsUsed);
+
+    if (isSyncFrameFlag) {
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1 + 2 + 3 + 2 + 1));
+
+        // FullChannelBasedMixFlag, ETSI TS 103 491 V1.2.1, Section 6.4.6.1
+        if (!(bits.getBits(1))) {
+            // This implementation only supports full channel mask-based
+            // audio presentation (i.e. 2.0, 5.1, 11.1 mix without objects)
+            ALOGE("Objects not supported, only DTSUHD full channel mask-based mix");
+            return ERROR_MALFORMED;
+        }
+
+        // BaseDuration, FrameDuration
+        bits.skipBits(2 + 3);
+
+        unsigned clockRateIndex = bits.getBits(2);
+        unsigned clockRateHertz = 0;
+
+        switch (clockRateIndex) {
+            case 0:
+                clockRateHertz = 32000;
+                break;
+            case 1:
+                clockRateHertz = 44100;
+                break;
+            case 2:
+                clockRateHertz = 48000;
+                break;
+            default:
+                ALOGE("Invalid clockRateIndex in DTSUHD header");
+                return ERROR_MALFORMED;
+        }
+
+        if (bits.getBits(1)) {
+            RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (32 + 4));
+
+            bits.skipBits(32 + 4);
+        }
+
+        RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 2);
+
+        unsigned samplRateMultiplier = (1 << bits.getBits(2));
+        audioSamplRate = clockRateHertz * samplRateMultiplier;
+    }
+
+    uint32_t chunkPayloadBytes = 0;
+    int numOfMDChunks = isSyncFrameFlag ? 1 : 0; // Metadata chunks
+    for (int nmdc = 0; nmdc < numOfMDChunks; nmdc++) {
+        unsigned uctable2[4] = {6, 9, 12, 15};
+        uint32_t nuMDChunkSize = 0;
+        nuBitsUsed = 0;
+
+        status = extractVarLenBitFields(&bits, &nuBitsUsed, &nuMDChunkSize, uctable2, true);
+        if (status != OK) {
+            ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+            return ERROR_MALFORMED;
+        }
+
+        bits.skipBits(nuBitsUsed);
+
+        if (nuMDChunkSize > 32767) {
+            ALOGE("Unsupported number of metadata chunks in DTSUHD header");
+            return ERROR_MALFORMED;
+        }
+        chunkPayloadBytes += nuMDChunkSize;
+    }
+
+    // Ony one audio chunk is supported
+    int numAudioChunks = 1;
+    for (int nac = 0; nac < numAudioChunks; nac++) {
+        uint32_t acID = 256, nuAudioChunkSize = 0;
+
+        // isSyncFrameFlag means that ACID is present
+        if (isSyncFrameFlag) {
+            unsigned uctable3[4] = {2, 4, 6, 8};
+            nuBitsUsed = 0;
+
+            status = extractVarLenBitFields(&bits, &nuBitsUsed, &acID, uctable3, true);
+
+            if (status != OK) {
+                ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+                return ERROR_MALFORMED;
+            }
+
+            bits.skipBits(nuBitsUsed);
+        }
+
+        nuBitsUsed = 0;
+        if (acID == 0) {
+            nuAudioChunkSize = 0;
+        } else {
+            unsigned uctable4[4] = {9, 11, 13, 16};
+
+            status = extractVarLenBitFields(&bits, &nuBitsUsed, &nuAudioChunkSize, uctable4, true);
+
+            if (status != OK) {
+                ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+                return ERROR_MALFORMED;
+            }
+        }
+
+        if (nuAudioChunkSize > 65535){
+            ALOGE("Unsupported number of audio chunks in DTSUHD header");
+            return ERROR_MALFORMED;
+        }
+
+        chunkPayloadBytes += nuAudioChunkSize;
+    }
+
+    frameSize = (sizeOfFTOCPayload + 1) + chunkPayloadBytes;
+
+    if (metaData != NULL) {
+        (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS_UHD);
+        (*metaData)->setInt32(kKeyAudioProfile, 0x2); // CodecProfileLevel.DTS_UHDProfileP2
+        (*metaData)->setInt32(kKeyChannelCount, 2); // Setting default channel count as stereo
+        (*metaData)->setInt32(kKeySampleRate, audioSamplRate);
+    }
+
+    return OK;
+}
+
+static status_t isSeeminglyValidDTSHDHeader(const uint8_t *ptr, size_t size,unsigned &frameSize)
+{
+    return parseDTSHDSyncFrame(ptr, size, frameSize, NULL);
+}
+
+static status_t isSeeminglyValidDTSUHDHeader(const uint8_t *ptr, size_t size,unsigned &frameSize)
+{
+    return parseDTSUHDSyncFrame(ptr, size, frameSize, NULL);
+}
+
 static status_t IsSeeminglyValidAC4Header(const uint8_t *ptr, size_t size, unsigned &frameSize) {
     return parseAC4SyncFrame(ptr, size, frameSize, NULL);
 }
@@ -655,6 +1085,70 @@
                 break;
             }
 
+            case DTS: //  Checking for DTS or DTS-HD syncword
+            case DTS_HD:
+            {
+                uint8_t *ptr = (uint8_t *)data;
+                unsigned frameSize = 0;
+                ssize_t startOffset = -1;
+
+                for (size_t i = 0; i < size; ++i) {
+                    if (isSeeminglyValidDTSHDHeader(&ptr[i], size - i, frameSize) == OK) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+                if (startOffset > 0) {
+                    ALOGI("found something resembling a DTS-HD syncword at "
+                          "offset %zd",
+                          startOffset);
+                }
+
+                if (frameSize != size - startOffset) {
+                    ALOGV("DTS-HD frame size is %u bytes, while the buffer size is %zd bytes.",
+                          frameSize, size - startOffset);
+                }
+
+                data = &ptr[startOffset];
+                size -= startOffset;
+                break;
+            }
+
+            case DTS_UHD:
+            {
+                uint8_t *ptr = (uint8_t *)data;
+                ssize_t startOffset = -1;
+                unsigned frameSize = 0;
+
+                for (size_t i = 0; i < size; ++i) {
+                    if (isSeeminglyValidDTSUHDHeader(&ptr[i], size - i, frameSize) == OK) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+                if (startOffset >= 0) {
+                    ALOGI("found something resembling a DTS UHD syncword"
+                          "syncword at offset %zd",
+                          startOffset);
+                }
+
+                if (frameSize != size - startOffset) {
+                    ALOGV("DTS-UHD frame size is %u bytes, while the buffer size is %zd bytes.",
+                          frameSize, size - startOffset);
+                }
+                data = &ptr[startOffset];
+                size -= startOffset;
+                break;
+            }
+
             case PCM_AUDIO:
             case METADATA:
             {
@@ -928,6 +1422,11 @@
             return dequeueAccessUnitPCMAudio();
         case METADATA:
             return dequeueAccessUnitMetadata();
+        case DTS: // Using same dequeue function for both DTS and DTS-HD types.
+        case DTS_HD:
+            return dequeueAccessUnitDTSOrDTSHD();
+        case DTS_UHD:
+            return dequeueAccessUnitDTSUHD();
         default:
             if (mMode != MPEG_AUDIO) {
                 ALOGE("Unknown mode");
@@ -937,6 +1436,113 @@
     }
 }
 
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitDTSOrDTSHD() {
+    unsigned syncStartPos = 0; // in bytes
+    unsigned payloadSize = 0;
+    sp<MetaData> format = new MetaData;
+
+    ALOGV("dequeueAccessUnitDTSOrDTSHD[%d]: mBuffer %p(%zu)", mAUIndex,
+          mBuffer->data(), mBuffer->size());
+
+    while (true) {
+        if (syncStartPos + 4 >= mBuffer->size()) {
+            return NULL;
+        }
+        uint8_t *ptr = mBuffer->data() + syncStartPos;
+        size_t size = mBuffer->size() - syncStartPos;
+        status_t status = parseDTSHDSyncFrame(ptr, size, payloadSize, &format);
+        if (status == 0) {
+            break;
+        }
+        ++syncStartPos;
+    }
+
+    if (mBuffer->size() < syncStartPos + payloadSize) {
+        ALOGV("Not enough buffer size for DTS/DTS-HD");
+        return NULL;
+    }
+
+    if (mFormat == NULL) {
+        mFormat = format;
+    }
+
+    int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
+    if (timeUs < 0LL) {
+        ALOGE("negative timeUs");
+        return NULL;
+    }
+    mAUIndex++;
+
+    sp<ABuffer> accessUnit = new ABuffer(syncStartPos + payloadSize);
+    memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);
+
+    accessUnit->meta()->setInt64("timeUs", timeUs);
+    accessUnit->meta()->setInt32("isSync", 1);
+
+    memmove(
+        mBuffer->data(),
+        mBuffer->data() + syncStartPos + payloadSize,
+        mBuffer->size() - syncStartPos - payloadSize);
+
+    mBuffer->setRange(0, mBuffer->size() - syncStartPos - payloadSize);
+
+    return accessUnit;
+}
+
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitDTSUHD()
+{
+    unsigned syncStartPos = 0; // in bytes
+    unsigned payloadSize = 0;
+    sp<MetaData> format = new MetaData;
+
+    ALOGV("dequeueAccessUnitDTSUHD[%d]: mBuffer %p(%zu)", mAUIndex,
+          mBuffer->data(), mBuffer->size());
+
+    while (true) {
+        if (syncStartPos + 4 >= mBuffer->size()) {
+            return NULL;
+        }
+        uint8_t *ptr = mBuffer->data() + syncStartPos;
+        size_t size = mBuffer->size() - syncStartPos;
+        status_t status = parseDTSUHDSyncFrame(ptr, size, payloadSize, &format);
+        if (status == 0) {
+            break;
+        }
+        ++syncStartPos;
+    }
+
+    if (mBuffer->size() < syncStartPos + payloadSize) {
+        ALOGV("Not enough buffer size for DTS-UHD");
+        return NULL;
+    }
+
+    if (mFormat == NULL) {
+        mFormat = format;
+    }
+
+    int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
+    if (timeUs < 0LL) {
+        ALOGE("negative timeUs");
+        return NULL;
+    }
+    mAUIndex++;
+
+    sp<ABuffer> accessUnit = new ABuffer(syncStartPos + payloadSize);
+    memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);
+
+    accessUnit->meta()->setInt64("timeUs", timeUs);
+    accessUnit->meta()->setInt32("isSync", 1);
+
+    memmove(
+        mBuffer->data(),
+        mBuffer->data() + syncStartPos + payloadSize,
+        mBuffer->size() - syncStartPos - payloadSize);
+
+    mBuffer->setRange(0, mBuffer->size() - syncStartPos - payloadSize);
+
+    return accessUnit;
+}
+
 sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitEAC3() {
     unsigned syncStartPos = 0;  // in bytes
     unsigned payloadSize = 0;
diff --git a/media/libstagefright/mpeg2ts/HlsSampleDecryptor.cpp b/media/module/mpeg2ts/HlsSampleDecryptor.cpp
similarity index 100%
rename from media/libstagefright/mpeg2ts/HlsSampleDecryptor.cpp
rename to media/module/mpeg2ts/HlsSampleDecryptor.cpp
diff --git a/media/libstagefright/mpeg2ts/MODULE_LICENSE_APACHE2 b/media/module/mpeg2ts/MODULE_LICENSE_APACHE2
similarity index 100%
rename from media/libstagefright/mpeg2ts/MODULE_LICENSE_APACHE2
rename to media/module/mpeg2ts/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/mpeg2ts/NOTICE b/media/module/mpeg2ts/NOTICE
similarity index 100%
rename from media/libstagefright/mpeg2ts/NOTICE
rename to media/module/mpeg2ts/NOTICE
diff --git a/media/libstagefright/mpeg2ts/TEST_MAPPING b/media/module/mpeg2ts/TEST_MAPPING
similarity index 100%
rename from media/libstagefright/mpeg2ts/TEST_MAPPING
rename to media/module/mpeg2ts/TEST_MAPPING
diff --git a/media/libstagefright/mpeg2ts/include/mpeg2ts/ATSParser.h b/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
similarity index 96%
rename from media/libstagefright/mpeg2ts/include/mpeg2ts/ATSParser.h
rename to media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
index 49578d3..b658c5a 100644
--- a/media/libstagefright/mpeg2ts/include/mpeg2ts/ATSParser.h
+++ b/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
@@ -157,6 +157,9 @@
         STREAMTYPE_LPCM_AC3             = 0x83,
         STREAMTYPE_EAC3                 = 0x87,
 
+        // DTS audio stream type which contains only Core substream
+        STREAMTYPE_DTS                  = 0x8A,
+
         //Sample Encrypted types
         STREAMTYPE_H264_ENCRYPTED       = 0xDB,
         STREAMTYPE_AAC_ENCRYPTED        = 0xCF,
@@ -168,6 +171,7 @@
         DESCRIPTOR_CA                   = 0x09,
 
         // DVB BlueBook A038 Table 12
+        DESCRIPTOR_DTS                  = 0x7B,
         DESCRIPTOR_DVB_EXTENSION        = 0x7F,
     };
 
@@ -175,6 +179,8 @@
     enum {
         EXT_DESCRIPTOR_DVB_AC4                  = 0x15,
         EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION   = 0x19,
+        EXT_DESCRIPTOR_DVB_DTS_HD               = 0x0E,
+        EXT_DESCRIPTOR_DVB_DTS_UHD              = 0x21,
         EXT_DESCRIPTOR_DVB_RESERVED_MAX         = 0x7F,
     };
 
diff --git a/media/libstagefright/mpeg2ts/include/mpeg2ts/AnotherPacketSource.h b/media/module/mpeg2ts/include/mpeg2ts/AnotherPacketSource.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/include/mpeg2ts/AnotherPacketSource.h
rename to media/module/mpeg2ts/include/mpeg2ts/AnotherPacketSource.h
diff --git a/media/libstagefright/mpeg2ts/include/mpeg2ts/CasManager.h b/media/module/mpeg2ts/include/mpeg2ts/CasManager.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/include/mpeg2ts/CasManager.h
rename to media/module/mpeg2ts/include/mpeg2ts/CasManager.h
diff --git a/media/libstagefright/mpeg2ts/include/mpeg2ts/ESQueue.h b/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
similarity index 96%
rename from media/libstagefright/mpeg2ts/include/mpeg2ts/ESQueue.h
rename to media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
index a06bd6a..550a0e4 100644
--- a/media/libstagefright/mpeg2ts/include/mpeg2ts/ESQueue.h
+++ b/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
@@ -45,6 +45,9 @@
         MPEG4_VIDEO,
         PCM_AUDIO,
         METADATA,
+        DTS,
+        DTS_HD,
+        DTS_UHD,
     };
 
     enum Flags {
@@ -125,6 +128,8 @@
     sp<ABuffer> dequeueAccessUnitMPEG4Video();
     sp<ABuffer> dequeueAccessUnitPCMAudio();
     sp<ABuffer> dequeueAccessUnitMetadata();
+    sp<ABuffer> dequeueAccessUnitDTSOrDTSHD();
+    sp<ABuffer> dequeueAccessUnitDTSUHD();
 
     // consume a logical (compressed) access unit of size "size",
     // returns its timestamp in us (or -1 if no time information).
diff --git a/media/libstagefright/mpeg2ts/include/mpeg2ts/HlsSampleDecryptor.h b/media/module/mpeg2ts/include/mpeg2ts/HlsSampleDecryptor.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/include/mpeg2ts/HlsSampleDecryptor.h
rename to media/module/mpeg2ts/include/mpeg2ts/HlsSampleDecryptor.h
diff --git a/media/libstagefright/mpeg2ts/include/mpeg2ts/SampleDecryptor.h b/media/module/mpeg2ts/include/mpeg2ts/SampleDecryptor.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/include/mpeg2ts/SampleDecryptor.h
rename to media/module/mpeg2ts/include/mpeg2ts/SampleDecryptor.h
diff --git a/media/libstagefright/mpeg2ts/test/Android.bp b/media/module/mpeg2ts/test/Android.bp
similarity index 100%
rename from media/libstagefright/mpeg2ts/test/Android.bp
rename to media/module/mpeg2ts/test/Android.bp
diff --git a/media/libstagefright/mpeg2ts/test/AndroidTest.xml b/media/module/mpeg2ts/test/AndroidTest.xml
similarity index 100%
rename from media/libstagefright/mpeg2ts/test/AndroidTest.xml
rename to media/module/mpeg2ts/test/AndroidTest.xml
diff --git a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp b/media/module/mpeg2ts/test/Mpeg2tsUnitTest.cpp
similarity index 100%
rename from media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
rename to media/module/mpeg2ts/test/Mpeg2tsUnitTest.cpp
diff --git a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h b/media/module/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h
rename to media/module/mpeg2ts/test/Mpeg2tsUnitTestEnvironment.h
diff --git a/media/libstagefright/mpeg2ts/test/README.md b/media/module/mpeg2ts/test/README.md
similarity index 100%
rename from media/libstagefright/mpeg2ts/test/README.md
rename to media/module/mpeg2ts/test/README.md
diff --git a/services/mediatranscoding/.clang-format b/media/module/service.mediatranscoding/.clang-format
similarity index 100%
rename from services/mediatranscoding/.clang-format
rename to media/module/service.mediatranscoding/.clang-format
diff --git a/services/mediatranscoding/Android.bp b/media/module/service.mediatranscoding/Android.bp
similarity index 96%
rename from services/mediatranscoding/Android.bp
rename to media/module/service.mediatranscoding/Android.bp
index fa5eb4e..37f354b 100644
--- a/services/mediatranscoding/Android.bp
+++ b/media/module/service.mediatranscoding/Android.bp
@@ -26,6 +26,10 @@
         "SimulatedTranscoder.cpp",
     ],
 
+    export_include_dirs: [
+        ".",
+    ],
+
     min_sdk_version: "29",
     apex_available: [
         "com.android.media",
diff --git a/services/mediatranscoding/MODULE_LICENSE_APACHE2 b/media/module/service.mediatranscoding/MODULE_LICENSE_APACHE2
similarity index 100%
rename from services/mediatranscoding/MODULE_LICENSE_APACHE2
rename to media/module/service.mediatranscoding/MODULE_LICENSE_APACHE2
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/media/module/service.mediatranscoding/MediaTranscodingService.cpp
similarity index 100%
rename from services/mediatranscoding/MediaTranscodingService.cpp
rename to media/module/service.mediatranscoding/MediaTranscodingService.cpp
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/media/module/service.mediatranscoding/MediaTranscodingService.h
similarity index 100%
rename from services/mediatranscoding/MediaTranscodingService.h
rename to media/module/service.mediatranscoding/MediaTranscodingService.h
diff --git a/services/mediatranscoding/NOTICE b/media/module/service.mediatranscoding/NOTICE
similarity index 100%
rename from services/mediatranscoding/NOTICE
rename to media/module/service.mediatranscoding/NOTICE
diff --git a/services/mediatranscoding/OWNERS b/media/module/service.mediatranscoding/OWNERS
similarity index 100%
rename from services/mediatranscoding/OWNERS
rename to media/module/service.mediatranscoding/OWNERS
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/media/module/service.mediatranscoding/SimulatedTranscoder.cpp
similarity index 100%
rename from services/mediatranscoding/SimulatedTranscoder.cpp
rename to media/module/service.mediatranscoding/SimulatedTranscoder.cpp
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/media/module/service.mediatranscoding/SimulatedTranscoder.h
similarity index 100%
rename from services/mediatranscoding/SimulatedTranscoder.h
rename to media/module/service.mediatranscoding/SimulatedTranscoder.h
diff --git a/services/mediatranscoding/main_mediatranscodingservice.cpp b/media/module/service.mediatranscoding/main_mediatranscodingservice.cpp
similarity index 100%
rename from services/mediatranscoding/main_mediatranscodingservice.cpp
rename to media/module/service.mediatranscoding/main_mediatranscodingservice.cpp
diff --git a/services/mediatranscoding/tests/Android.bp b/media/module/service.mediatranscoding/tests/Android.bp
similarity index 95%
rename from services/mediatranscoding/tests/Android.bp
rename to media/module/service.mediatranscoding/tests/Android.bp
index ae13656..97fbd4c 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/media/module/service.mediatranscoding/tests/Android.bp
@@ -20,10 +20,6 @@
         "-Wextra",
     ],
 
-    include_dirs: [
-        "frameworks/av/services/mediatranscoding",
-    ],
-
     shared_libs: [
         "libactivitymanager_aidl",
         "libbinder",
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
similarity index 100%
rename from services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
rename to media/module/service.mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
diff --git a/services/mediatranscoding/tests/README.txt b/media/module/service.mediatranscoding/tests/README.txt
similarity index 100%
rename from services/mediatranscoding/tests/README.txt
rename to media/module/service.mediatranscoding/tests/README.txt
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/Android.bp
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppA.xml
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppB.xml
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/TestAppC.xml
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/MainActivity.java
diff --git a/services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java b/media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
similarity index 100%
rename from services/mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
rename to media/module/service.mediatranscoding/tests/TranscodingUidPolicyTestApp/src/com/android/tests/transcoding/ResourcePolicyTestActivity.java
diff --git a/services/mediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/module/service.mediatranscoding/tests/build_and_run_all_unit_tests.sh
similarity index 100%
rename from services/mediatranscoding/tests/build_and_run_all_unit_tests.sh
rename to media/module/service.mediatranscoding/tests/build_and_run_all_unit_tests.sh
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp b/media/module/service.mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
similarity index 100%
rename from services/mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
rename to media/module/service.mediatranscoding/tests/mediatranscodingservice_real_tests.cpp
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp b/media/module/service.mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
similarity index 100%
rename from services/mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
rename to media/module/service.mediatranscoding/tests/mediatranscodingservice_resource_tests.cpp
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/media/module/service.mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
similarity index 100%
rename from services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
rename to media/module/service.mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
diff --git a/media/mtp/Android.bp b/media/mtp/Android.bp
index 97e2a22..719d05a 100644
--- a/media/mtp/Android.bp
+++ b/media/mtp/Android.bp
@@ -31,8 +31,8 @@
     ],
 }
 
-cc_library_shared {
-    name: "libmtp",
+cc_defaults {
+    name: "libmtp_defaults",
     srcs: [
         "MtpDataPacket.cpp",
         "MtpDebug.cpp",
@@ -71,3 +71,14 @@
     ],
     header_libs: ["libcutils_headers"],
 }
+
+cc_library_shared {
+    name: "libmtp",
+    defaults: ["libmtp_defaults"],
+}
+
+cc_library_shared {
+    name: "libmtp_fuzz",
+    defaults: ["libmtp_defaults"],
+    cflags: ["-DMTP_FUZZER"],
+}
diff --git a/media/mtp/MtpDescriptors.h b/media/mtp/MtpDescriptors.h
index d600a24..9b98a71 100644
--- a/media/mtp/MtpDescriptors.h
+++ b/media/mtp/MtpDescriptors.h
@@ -23,6 +23,17 @@
 
 namespace android {
 
+#ifdef MTP_FUZZER
+constexpr char FFS_MTP_EP0[] = "/data/local/tmp/usb-ffs/mtp/ep0";
+constexpr char FFS_MTP_EP_IN[] = "/data/local/tmp/usb-ffs/mtp/ep1";
+constexpr char FFS_MTP_EP_OUT[] = "/data/local/tmp/usb-ffs/mtp/ep2";
+constexpr char FFS_MTP_EP_INTR[] = "/data/local/tmp/usb-ffs/mtp/ep3";
+
+constexpr char FFS_PTP_EP0[] = "/data/local/tmp/usb-ffs/ptp/ep0";
+constexpr char FFS_PTP_EP_IN[] = "/data/local/tmp/usb-ffs/ptp/ep1";
+constexpr char FFS_PTP_EP_OUT[] = "/data/local/tmp/usb-ffs/ptp/ep2";
+constexpr char FFS_PTP_EP_INTR[] = "/data/local/tmp/usb-ffs/ptp/ep3";
+#else
 constexpr char FFS_MTP_EP0[] = "/dev/usb-ffs/mtp/ep0";
 constexpr char FFS_MTP_EP_IN[] = "/dev/usb-ffs/mtp/ep1";
 constexpr char FFS_MTP_EP_OUT[] = "/dev/usb-ffs/mtp/ep2";
@@ -32,6 +43,7 @@
 constexpr char FFS_PTP_EP_IN[] = "/dev/usb-ffs/ptp/ep1";
 constexpr char FFS_PTP_EP_OUT[] = "/dev/usb-ffs/ptp/ep2";
 constexpr char FFS_PTP_EP_INTR[] = "/dev/usb-ffs/ptp/ep3";
+#endif
 
 constexpr int MAX_PACKET_SIZE_FS = 64;
 constexpr int MAX_PACKET_SIZE_HS = 512;
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index 6fcf119..d917772 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -985,7 +985,7 @@
 
     int type = storage->getType();
     if (type == MTP_STORAGE_REMOVABLE_RAM) {
-        std::string str = android::base::Trim((const char*)name);
+        std::string str = android::base::Trim(name);
         name.set(str.c_str());
     }
     ALOGV("name: %s format: 0x%04X (%s)\n", (const char*)name, format,
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
index 4cec58a..30d1bc1 100644
--- a/media/mtp/MtpStringBuffer.h
+++ b/media/mtp/MtpStringBuffer.h
@@ -20,6 +20,7 @@
 #include <log/log.h>
 #include <stdint.h>
 #include <string>
+#include <string_view>
 
 // Max Character number of a MTP String
 #define MTP_STRING_MAX_CHARACTER_NUMBER             255
@@ -55,6 +56,7 @@
     inline int      size() const { return mString.length(); }
 
     inline operator const char*() const { return mString.c_str(); }
+    operator std::string_view() const { return mString; }
 };
 
 inline void MtpStringBuffer::append(const char* other) {
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
index 5365f4b..9e41680 100644
--- a/media/mtp/tests/MtpFuzzer/Android.bp
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package {
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
@@ -6,29 +22,20 @@
     //   SPDX-license-identifier-Apache-2.0
     default_applicable_licenses: ["frameworks_av_media_mtp_license"],
 }
-
-cc_fuzz {
-    name: "mtp_fuzzer",
-    srcs: [
-        "mtp_fuzzer.cpp",
-        "MtpMockDatabase.cpp",
-    ],
+cc_defaults {
+    name: "mtp_fuzzer_defaults",
     shared_libs: [
-	"libmtp",
-	"libbase",
-	"liblog",
-	"libutils",
+        "libbase",
+        "liblog",
+        "libutils",
     ],
+    static_libs: ["libc++fs",],
     cflags: [
         "-Wall",
         "-Wextra",
         "-Werror",
-        "-DMTP_DEVICE",
         "-Wno-unused-parameter",
     ],
-    dictionary: "mtp_fuzzer.dict",
-    corpus: ["corpus/*"],
-
     fuzz_config: {
 
         cc: ["jameswei@google.com"],
@@ -38,3 +45,107 @@
         ],
     },
 }
+cc_fuzz {
+    name: "mtp_fuzzer",
+    srcs: [
+        "mtp_fuzzer.cpp",
+        "MtpMockDatabase.cpp",
+    ],
+    cflags: ["-DMTP_DEVICE",],
+    shared_libs: ["libmtp",],
+    defaults: ["mtp_fuzzer_defaults"],
+    dictionary: "mtp_fuzzer.dict",
+    corpus: ["corpus/*"],
+}
+
+cc_defaults {
+    name: "mtp_property_fuzzer_defaults",
+    srcs: ["mtp_property_fuzzer.cpp"],
+    shared_libs: [
+        "libmtp",
+        "libasyncio",
+        "libusbhost",
+    ],
+    defaults: ["mtp_fuzzer_defaults"],
+}
+
+cc_fuzz {
+     name: "mtp_device_property_fuzzer",
+     defaults: ["mtp_property_fuzzer_defaults"],
+     cflags: [
+         "-DMTP_DEVICE",
+     ],
+}
+
+cc_fuzz {
+     name: "mtp_host_property_fuzzer",
+     defaults: ["mtp_property_fuzzer_defaults"],
+     cflags: [
+         "-DMTP_HOST",
+     ],
+}
+
+cc_fuzz {
+    name: "mtp_handle_fuzzer",
+    srcs: ["mtp_handle_fuzzer.cpp"],
+    shared_libs: ["libmtp_fuzz"],
+    defaults: ["mtp_fuzzer_defaults"],
+    cflags: ["-DMTP_FUZZER"],
+}
+
+cc_defaults  {
+     name: "mtp_packet_defaults",
+     shared_libs: [
+        "libmtp",
+        "libasyncio",
+        "libusbhost",
+     ],
+     defaults: ["mtp_fuzzer_defaults"],
+     cflags: [
+         "-DMTP_HOST",
+         "-DMTP_DEVICE",
+     ],
+}
+
+cc_fuzz {
+     name: "mtp_packet_fuzzer",
+     srcs: ["mtp_packet_fuzzer.cpp"],
+     defaults: ["mtp_packet_defaults"],
+}
+
+cc_fuzz {
+     name: "mtp_device_fuzzer",
+     srcs: ["mtp_device_fuzzer.cpp"],
+     shared_libs: [
+        "libmtp",
+        "libusbhost",
+     ],
+     defaults: ["mtp_fuzzer_defaults"],
+     cflags: [
+         "-DMTP_DEVICE",
+     ],
+}
+
+cc_fuzz {
+     name: "mtp_request_packet_fuzzer",
+     srcs: ["mtp_request_packet_fuzzer.cpp"],
+     defaults: ["mtp_packet_defaults"],
+}
+
+cc_fuzz {
+     name: "mtp_event_packet_fuzzer",
+     srcs: ["mtp_event_packet_fuzzer.cpp"],
+     defaults: ["mtp_packet_defaults"],
+}
+
+cc_fuzz {
+     name: "mtp_response_packet_fuzzer",
+     srcs: ["mtp_response_packet_fuzzer.cpp"],
+     defaults: ["mtp_packet_defaults"],
+}
+
+cc_fuzz {
+     name: "mtp_data_packet_fuzzer",
+     srcs: ["mtp_data_packet_fuzzer.cpp"],
+     defaults: ["mtp_packet_defaults"],
+}
diff --git a/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h b/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
new file mode 100644
index 0000000..87fea9f
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/MtpPacketFuzzerUtils.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpStringBuffer.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <linux/usbdevice_fs.h>
+#include <sys/mman.h>
+#include <usbhost/usbhost.h>
+#include <MtpTypes.h>
+
+using namespace android;
+constexpr UrbPacketDivisionMode kUrbPacketDivisionModes[] = {FIRST_PACKET_ONLY_HEADER,
+                                                             FIRST_PACKET_HAS_PAYLOAD};
+
+constexpr size_t kMinSize = 0;
+constexpr size_t kMaxSize = 1000;
+constexpr size_t kMaxLength = 1000;
+
+class MtpPacketFuzzerUtils {
+  protected:
+    struct usb_request mUsbRequest;
+    struct usbdevfs_urb* mUsbDevFsUrb;
+    std::string mPath;
+
+    void fillFd(int32_t& fd, FuzzedDataProvider* fdp) {
+        if (fdp->ConsumeBool()) {
+            std::string text = fdp->ConsumeRandomLengthString(kMaxLength);
+            write(fd, text.c_str(), text.length());
+        }
+    };
+
+    void fillFilePath(FuzzedDataProvider* fdp) {
+       mPath= fdp->ConsumeRandomLengthString(kMaxLength);
+    };
+
+    void fillUsbDevFsUrb(FuzzedDataProvider* fdp) {
+        mUsbDevFsUrb->type = fdp->ConsumeIntegral<unsigned char>();
+        mUsbDevFsUrb->endpoint = fdp->ConsumeIntegral<unsigned char>();
+        mUsbDevFsUrb->flags = fdp->ConsumeIntegral<uint32_t>();
+        std::vector<uint8_t> buffer =
+                fdp->ConsumeBytes<uint8_t>(fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+        mUsbDevFsUrb->buffer = static_cast<void*>(buffer.data());
+        mUsbDevFsUrb->buffer_length = buffer.size();
+        mUsbDevFsUrb->actual_length = fdp->ConsumeIntegral<uint32_t>();
+        mUsbDevFsUrb->start_frame = fdp->ConsumeIntegral<uint32_t>();
+        mUsbDevFsUrb->number_of_packets = fdp->ConsumeIntegral<uint32_t>();
+        mUsbDevFsUrb->stream_id = fdp->ConsumeIntegral<uint32_t>();
+        mUsbDevFsUrb->error_count = fdp->ConsumeIntegral<size_t>();
+        mUsbDevFsUrb->signr = fdp->ConsumeIntegral<uint32_t>();
+        std::vector<uint8_t> userBuffer = (fdp->ConsumeBytes<uint8_t>(
+                fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize)));
+        mUsbDevFsUrb->usercontext = static_cast<void*>(userBuffer.data());
+        mUsbDevFsUrb->iso_frame_desc[0].length = fdp->ConsumeIntegral<uint32_t>();
+        mUsbDevFsUrb->iso_frame_desc[0].actual_length = fdp->ConsumeIntegral<uint32_t>();
+    };
+
+    void fillUsbRequest(int32_t& fd, FuzzedDataProvider* fdp) {
+        fillUsbDevFsUrb(fdp);
+        fillFd(fd, fdp);
+        std::vector<uint8_t> buffer =
+                fdp->ConsumeBytes<uint8_t>(fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+        mUsbRequest.buffer = static_cast<void*>(buffer.data());
+        mUsbRequest.buffer_length = buffer.size();
+        mUsbRequest.actual_length = fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+        mUsbRequest.max_packet_size = fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+        mUsbRequest.private_data = static_cast<void*>(mUsbDevFsUrb);
+        mUsbRequest.endpoint = fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+        std::vector<uint8_t> clientBuffer = (fdp->ConsumeBytes<uint8_t>(
+                fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize)));
+        mUsbRequest.client_data = static_cast<void*>(clientBuffer.data());
+    };
+
+    template <typename Object>
+    void writeHandle(Object obj, FuzzedDataProvider* fdp) {
+        MtpDevHandle handle;
+        std::vector<uint8_t> initData =
+                fdp->ConsumeBytes<uint8_t>(fdp->ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+        handle.write(initData.data(), initData.size());
+        obj->write(&handle);
+    };
+};
diff --git a/media/mtp/tests/MtpFuzzer/README.md b/media/mtp/tests/MtpFuzzer/README.md
new file mode 100644
index 0000000..7efaf67
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/README.md
@@ -0,0 +1,210 @@
+# Fuzzers for libmtp
+
+## Table of contents
++ [mtp_fuzzer](#MtpServer)
++ [mtp_host_property_fuzzer](#MtpHostProperty)
++ [mtp_device_property_fuzzer](#MtpDeviceProperty)
++ [mtp_handle_fuzzer](#MtpHandle)
++ [mtp_packet_fuzzer](#MtpPacket)
+ + [mtp_device_fuzzer](#MtpDevice)
++ [mtp_request_packet_fuzzer](#MtpRequestPacket)
++ [mtp_event_packet_fuzzer](#MtpEventPacket)
++ [mtp_response_packet_fuzzer](#MtpResponsePacket)
++ [mtp_data_packet_fuzzer](#MtpDataPacket)
+
+# <a name="MtpServer"></a> Fuzzer for MtpServer
+
+MtpServer supports the following parameters:
+1. PacketData (parameter name: "packetData")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`packetData`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_fuzzer/mtp_fuzzer corpus/ -dict=mtp_fuzzer.dict
+```
+
+# <a name="MtpHostProperty"></a> Fuzzer for MtpHostProperty
+
+MtpHostProperty supports the following parameters:
+1. Feasible Type (parameter name: "kFeasibleTypes")
+2. UrbPacket Division Mode (parameter name: "kUrbPacketDivisionModes")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+| `kFeasibleType`| 1. `MTP_TYPE_UNDEFINED`, 2. `MTP_TYPE_INT8`, 3.`MTP_TYPE_UINT8`, 4.`MTP_TYPE_INT16`, 5.`MTP_TYPE_UINT16`, 6.`MTP_TYPE_INT32`, 7.`MTP_TYPE_UINT32`, 8.`MTP_TYPE_INT64`, 9.`MTP_TYPE_UINT64`, 10.`MTP_TYPE_INT128`, 11.`MTP_TYPE_UINT128`, 12.`MTP_TYPE_AINT8`, 13.`MTP_TYPE_AUINT8`, 14.`MTP_TYPE_AINT16`, 15.`MTP_TYPE_AUINT16`, 16.`MTP_TYPE_AINT32`, 17.`MTP_TYPE_AUINT32`, 18.`MTP_TYPE_AINT64`, 19.`MTP_TYPE_AUINT64`, 20.`MTP_TYPE_AINT128`, 21.`MTP_TYPE_AUINT128`, 22.`MTP_TYPE_STR`,| Value obtained from FuzzedDataProvider|
+|`kUrbPacketDivisionMode`| 1. `FIRST_PACKET_ONLY_HEADER`, 2. `FIRST_PACKET_HAS_PAYLOAD`, |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_host_property_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_host_property_fuzzer/mtp_host_property_fuzzer
+```
+
+# <a name="MtpDeviceProperty"></a> Fuzzer for MtpDeviceProperty
+
+MtpDeviceProperty supports the following parameters:
+1. Feasible Type (parameter name: "kFeasibleType")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+| `kFeasibleType`| 1. `MTP_TYPE_UNDEFINED`, 2. `MTP_TYPE_INT8`, 3.`MTP_TYPE_UINT8`, 4.`MTP_TYPE_INT16`, 5.`MTP_TYPE_UINT16`, 6.`MTP_TYPE_INT32`, 7.`MTP_TYPE_UINT32`, 8.`MTP_TYPE_INT64`, 9.`MTP_TYPE_UINT64`, 10.`MTP_TYPE_INT128`, 11.`MTP_TYPE_UINT128`, 12.`MTP_TYPE_AINT8`, 13.`MTP_TYPE_AUINT8`, 14.`MTP_TYPE_AINT16`, 15.`MTP_TYPE_AUINT16`, 16.`MTP_TYPE_AINT32`, 17.`MTP_TYPE_AUINT32`, 18.`MTP_TYPE_AINT64`, 19.`MTP_TYPE_AUINT64`, 20.`MTP_TYPE_AINT128`, 21.`MTP_TYPE_AUINT128`, 22.`MTP_TYPE_STR`,| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_device_property_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_device_property_fuzzer/mtp_device_property_fuzzer
+```
+
+# <a name="MtpHandle"></a>Fuzzer for MtpHandle
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_handle_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_handle_fuzzer/mtp_handle_fuzzer
+```
+
+# <a name="MtpPacket"></a> Fuzzer for MtpPacket
+
+MtpPacket supports the following parameters:
+1. bufferSize (parameter name: "size")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`bufferSize`| Integer `1` to `1000`, |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_packet_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_packet_fuzzer/mtp_packet_fuzzer
+```
+
+# <a name="MtpDevice"></a> Fuzzer for MtpDevice
+
+MtpDevice supports the following parameters:
+1. Device Name (parameter name: "deviceName")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`deviceName`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_device_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_device_fuzzer/mtp_device_fuzzer
+```
+
+# <a name="MtpRequestPacket"></a> Fuzzer for MtpRequestPacket
+
+MtpRequestPacket supports the following parameters:
+1. Data (parameter name: "data")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`data`| Vector of positive Integer |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_request_packet_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_request_packet_fuzzer/mtp_request_packet_fuzzer
+```
+
+# <a name="MtpEventPacket"></a> Fuzzer for MtpEventPacket
+
+MtpEventPacket supports the following parameters:
+1. Size (parameter name: "size")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`size`| Integer `1` to `1000`, |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_event_packet_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_event_packet_fuzzer/mtp_event_packet_fuzzer
+```
+
+# <a name="MtpResponsePacket"></a> Fuzzer for MtpResponsePacket
+
+MtpResponsePacket supports the following parameters:
+1. Size (parameter name: "size")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`size`| Integer `1` to `1000`, |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_response_packet_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_response_packet_fuzzer/mtp_response_packet_fuzzer
+```
+
+# <a name="MtpDataPacket"></a> Fuzzer for MtpDataPacket
+
+MtpDataPacket supports the following parameters:
+1. UrbPacket Division Mode (parameter name: "kUrbPacketDivisionModes")
+2. Size (parameter name: "size")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`kUrbPacketDivisionMode`| 1. `FIRST_PACKET_ONLY_HEADER`, 2. `FIRST_PACKET_HAS_PAYLOAD`, |Value obtained from FuzzedDataProvider|
+|`size`| Integer `1` to `1000`, |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mtp_data_packet_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mtp_data_packet_fuzzer/mtp_data_packet_fuzzer
+```
diff --git a/media/mtp/tests/MtpFuzzer/corpus/6-mtp-open_session_send_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/6-mtp-open_session_send_object_info.pkt
new file mode 100644
index 0000000..71f2836
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/6-mtp-open_session_send_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/mtp_data_packet_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_data_packet_fuzzer.cpp
new file mode 100644
index 0000000..f5faf77
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_data_packet_fuzzer.cpp
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpDataPacket.h>
+#include <MtpDevHandle.h>
+#include <MtpPacketFuzzerUtils.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <utils/String16.h>
+
+using namespace android;
+
+class MtpDataPacketFuzzer : MtpPacketFuzzerUtils {
+  public:
+    MtpDataPacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mUsbDevFsUrb = (struct usbdevfs_urb*)malloc(sizeof(struct usbdevfs_urb) +
+                                                   sizeof(struct usbdevfs_iso_packet_desc));
+    };
+    ~MtpDataPacketFuzzer() { free(mUsbDevFsUrb); };
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+};
+
+void MtpDataPacketFuzzer::process() {
+    MtpDataPacket mtpDataPacket;
+    while (mFdp.remaining_bytes() > 0) {
+        auto mtpDataAPI = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { mtpDataPacket.allocate(mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize)); },
+                [&]() { mtpDataPacket.reset(); },
+                [&]() {
+                    mtpDataPacket.setOperationCode(mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize));
+                },
+                [&]() {
+                    mtpDataPacket.setTransactionID(mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize));
+                },
+                [&]() {
+                    Int8List* result = mtpDataPacket.getAInt8();
+                    delete result;
+                },
+                [&]() {
+                    Int16List* result = mtpDataPacket.getAInt16();
+                    delete result;
+                },
+                [&]() {
+                    Int32List* result = mtpDataPacket.getAInt32();
+                    delete result;
+                },
+                [&]() {
+                    Int64List* result = mtpDataPacket.getAInt64();
+                    delete result;
+                },
+                [&]() {
+                    UInt8List* result = mtpDataPacket.getAUInt8();
+                    delete result;
+                },
+                [&]() {
+                    UInt16List* result = mtpDataPacket.getAUInt16();
+                    delete result;
+                },
+                [&]() {
+                    UInt32List* result = mtpDataPacket.getAUInt32();
+                    delete result;
+                },
+                [&]() {
+                    UInt64List* result = mtpDataPacket.getAUInt64();
+                    delete result;
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        std::vector<uint8_t> initData =
+                                mFdp.ConsumeBytes<uint8_t>(mFdp.ConsumeIntegral<uint8_t>());
+                        mtpDataPacket.putAUInt8(initData.data(), initData.size());
+                    } else {
+                        mtpDataPacket.putAUInt8(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        size_t size = mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+                        uint16_t arr[size];
+                        for (size_t idx = 0; idx < size; ++idx) {
+                            arr[idx] = mFdp.ConsumeIntegral<uint16_t>();
+                        }
+                        mtpDataPacket.putAUInt16(arr, size);
+                    } else {
+                        mtpDataPacket.putAUInt16(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        size_t size = mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+                        uint32_t arr[size];
+                        for (size_t idx = 0; idx < size; ++idx) {
+                            arr[idx] = mFdp.ConsumeIntegral<uint32_t>();
+                        }
+                        mtpDataPacket.putAUInt32(arr, size);
+                    } else {
+                        mtpDataPacket.putAUInt32(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        size_t size = mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+                        uint64_t arr[size];
+                        for (size_t idx = 0; idx < size; ++idx) {
+                            arr[idx] = mFdp.ConsumeIntegral<uint64_t>();
+                        }
+                        mtpDataPacket.putAUInt64(arr, size);
+                    } else {
+                        mtpDataPacket.putAUInt64(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        size_t size = mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+                        int64_t arr[size];
+                        for (size_t idx = 0; idx < size; ++idx) {
+                            arr[idx] = mFdp.ConsumeIntegral<int64_t>();
+                        }
+                        mtpDataPacket.putAInt64(arr, size);
+                    } else {
+                        mtpDataPacket.putAInt64(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        std::vector<uint16_t> arr;
+                        size_t size = mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+                        for (size_t idx = 0; idx < size; ++idx) {
+                            arr.push_back(mFdp.ConsumeIntegral<uint16_t>());
+                        }
+                        mtpDataPacket.putAUInt16(&arr);
+                    } else {
+                        mtpDataPacket.putAUInt16(nullptr);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        std::vector<uint32_t> arr;
+                        size_t size = mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+                        for (size_t idx = 0; idx < size; ++idx) {
+                            arr.push_back(mFdp.ConsumeIntegral<uint32_t>());
+                        }
+                        mtpDataPacket.putAUInt32(&arr);
+                    } else {
+                        mtpDataPacket.putAUInt32(nullptr);
+                    }
+                },
+
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        size_t size = mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize);
+                        int32_t arr[size];
+                        for (size_t idx = 0; idx < size; ++idx) {
+                            arr[idx] = mFdp.ConsumeIntegral<int32_t>();
+                        }
+                        mtpDataPacket.putAInt32(arr, size);
+                    } else {
+                        mtpDataPacket.putAInt32(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        mtpDataPacket.putString(
+                                (mFdp.ConsumeRandomLengthString(kMaxLength)).c_str());
+                    } else {
+                        mtpDataPacket.putString(static_cast<char*>(nullptr));
+                    }
+                },
+                [&]() {
+                    android::MtpStringBuffer sBuffer(
+                            (mFdp.ConsumeRandomLengthString(kMaxLength)).c_str());
+                    if (mFdp.ConsumeBool()) {
+                        mtpDataPacket.getString(sBuffer);
+                    } else {
+                        mtpDataPacket.putString(sBuffer);
+                    }
+                },
+                [&]() {
+                    MtpDevHandle handle;
+                    handle.start(mFdp.ConsumeBool());
+                    std::string text = mFdp.ConsumeRandomLengthString(kMaxLength);
+                    char* data = const_cast<char*>(text.c_str());
+                    handle.read(static_cast<void*>(data), text.length());
+                    if (mFdp.ConsumeBool()) {
+                        mtpDataPacket.read(&handle);
+                    } else if (mFdp.ConsumeBool()) {
+                        mtpDataPacket.write(&handle);
+                    } else {
+                        std::string textData = mFdp.ConsumeRandomLengthString(kMaxLength);
+                        char* Data = const_cast<char*>(textData.c_str());
+                        mtpDataPacket.writeData(&handle, static_cast<void*>(Data),
+                                                textData.length());
+                    }
+                    handle.close();
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        std::string str = mFdp.ConsumeRandomLengthString(kMaxLength);
+                        android::String16 s(str.c_str());
+                        char16_t* data = const_cast<char16_t*>(s.string());
+                        mtpDataPacket.putString(reinterpret_cast<uint16_t*>(data));
+                    } else {
+                        mtpDataPacket.putString(static_cast<uint16_t*>(nullptr));
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        std::vector<int8_t> data = mFdp.ConsumeBytes<int8_t>(
+                                mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                        mtpDataPacket.putAInt8(data.data(), data.size());
+                    } else {
+                        mtpDataPacket.putAInt8(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        std::vector<uint8_t> data = mFdp.ConsumeBytes<uint8_t>(
+                                mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                        mtpDataPacket.putAUInt8(data.data(), data.size());
+                    } else {
+                        mtpDataPacket.putAUInt8(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    std::vector<int8_t> data = mFdp.ConsumeBytes<int8_t>(
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                    mtpDataPacket.readData(&mUsbRequest, data.data(), data.size());
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpDataPacket.write(
+                            &mUsbRequest,
+                            mFdp.PickValueInArray<UrbPacketDivisionMode>(kUrbPacketDivisionModes),
+                            fd, mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize));
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpDataPacket.read(&mUsbRequest);
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpDataPacket.write(&mUsbRequest, mFdp.PickValueInArray<UrbPacketDivisionMode>(
+                                                             kUrbPacketDivisionModes));
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpDataPacket.readDataHeader(&mUsbRequest);
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpDataPacket.readDataAsync(&mUsbRequest);
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpDataPacket.readDataWait(mUsbRequest.dev);
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    if (mFdp.ConsumeBool()) {
+                        std::vector<int16_t> data;
+                        for (size_t idx = 0;
+                             idx < mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize); ++idx) {
+                            data.push_back(mFdp.ConsumeIntegral<int16_t>());
+                        }
+                        mtpDataPacket.putAInt16(data.data(), data.size());
+                    } else {
+                        mtpDataPacket.putAInt16(nullptr, 0);
+                    }
+                },
+                [&]() {
+                    int32_t arr[4];
+                    for (size_t idx = 0; idx < 4; ++idx) {
+                        arr[idx] = mFdp.ConsumeIntegral<int32_t>();
+                    }
+                    mtpDataPacket.putInt128(arr);
+                },
+                [&]() { mtpDataPacket.putInt64(mFdp.ConsumeIntegral<int64_t>()); },
+                [&]() {
+                    int16_t out;
+                    mtpDataPacket.getInt16(out);
+                },
+                [&]() {
+                    int32_t out;
+                    mtpDataPacket.getInt32(out);
+                },
+                [&]() {
+                    int8_t out;
+                    mtpDataPacket.getInt8(out);
+                },
+                [&]() {
+                    uint32_t arr[4];
+                    for (size_t idx = 0; idx < 4; ++idx) {
+                        arr[idx] = mFdp.ConsumeIntegral<uint32_t>();
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        mtpDataPacket.putUInt128(arr);
+                    } else {
+                        mtpDataPacket.getUInt128(arr);
+                    }
+                },
+                [&]() { mtpDataPacket.putUInt64(mFdp.ConsumeIntegral<uint64_t>()); },
+                [&]() {
+                    uint64_t out;
+                    mtpDataPacket.getUInt64(out);
+                },
+                [&]() { mtpDataPacket.putInt128(mFdp.ConsumeIntegral<int64_t>()); },
+                [&]() { mtpDataPacket.putUInt128(mFdp.ConsumeIntegral<uint64_t>()); },
+                [&]() {
+                    int32_t length;
+                    void* data = mtpDataPacket.getData(&length);
+                    free(data);
+                },
+        });
+        mtpDataAPI();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MtpDataPacketFuzzer mtpDataPacketFuzzer(data, size);
+    mtpDataPacketFuzzer.process();
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_device_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_device_fuzzer.cpp
new file mode 100644
index 0000000..c32d28a
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_device_fuzzer.cpp
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpDevHandle.h>
+#include <MtpDevice.h>
+#include <MtpDeviceInfo.h>
+#include <MtpObjectInfo.h>
+#include <MtpProperty.h>
+#include <MtpStorageInfo.h>
+#include <MtpStringBuffer.h>
+#include <android-base/unique_fd.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <linux/usb/ch9.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <usbhost/usbhost.h>
+
+using namespace android;
+
+constexpr int32_t kMaxStringLength = 20;
+constexpr int32_t kMaxBytes = 200;
+constexpr int32_t kMaxDataSize = 20;
+constexpr uint16_t kWMaxPacketSize = 64;
+constexpr uint16_t kEndpointsCount = 3;
+const std::string kInputFile = "/dev/null";
+const std::string kConfigFilePath = "/data/local/tmp/config";
+
+static bool readCallback(void* data, uint32_t offset, uint32_t length, void* clientData) {
+    return true;
+}
+
+struct fdDescriptors {
+    struct usb_interface_descriptor interface;
+    struct usb_endpoint_descriptor ep[kEndpointsCount];
+};
+
+fdDescriptors writeDescriptorsToFd(int32_t fd, FuzzedDataProvider& fdp) {
+    fdDescriptors desc;
+    desc.interface.bLength = sizeof(desc.interface);
+    desc.interface.bDescriptorType = USB_DT_INTERFACE;
+    desc.interface.bInterfaceNumber = fdp.ConsumeIntegral<uint8_t>();
+    desc.interface.bNumEndpoints = kEndpointsCount;
+    desc.interface.bInterfaceClass =
+            fdp.ConsumeBool() ? USB_CLASS_STILL_IMAGE : USB_CLASS_VENDOR_SPEC;
+    desc.interface.bInterfaceSubClass = fdp.ConsumeBool() ? 1 : 0xFF;
+    desc.interface.bInterfaceProtocol = fdp.ConsumeBool() ? 1 : 0;
+    desc.interface.iInterface = fdp.ConsumeIntegral<uint8_t>();
+    for (uint16_t idx = 0; idx < kEndpointsCount; ++idx) {
+        desc.ep[idx].bLength = sizeof(desc.ep[idx]);
+        desc.ep[idx].bDescriptorType = USB_DT_ENDPOINT;
+        desc.ep[idx].bEndpointAddress = idx | (fdp.ConsumeBool() ? USB_DIR_OUT : USB_DIR_IN);
+        desc.ep[idx].bmAttributes =
+                fdp.ConsumeBool() ? USB_ENDPOINT_XFER_BULK : USB_ENDPOINT_XFER_INT;
+        desc.ep[idx].wMaxPacketSize = kWMaxPacketSize;
+    }
+    write(fd, &desc, sizeof(fdDescriptors));
+    return desc;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    int32_t fd = memfd_create(kConfigFilePath.c_str(), MFD_ALLOW_SEALING);
+    fdDescriptors descriptor = writeDescriptorsToFd(fd, fdp);
+    std::string deviceName = fdp.ConsumeRandomLengthString(kMaxStringLength);
+    usb_device* device = usb_device_new(deviceName.c_str(), fd);
+    MtpDevice mtpDevice(device, fdp.ConsumeIntegral<int32_t>(), &descriptor.ep[0],
+                        &descriptor.ep[1], &descriptor.ep[2]);
+    while (fdp.remaining_bytes()) {
+        auto mtpDeviceFunction = fdp.PickValueInArray<const std::function<void()>>(
+                {[&]() { mtpDevice.getStorageIDs(); },
+                 [&]() {
+                     mtpDevice.getStorageInfo(fdp.ConsumeIntegral<int32_t>() /* storageID */);
+                 },
+                 [&]() {
+                     mtpDevice.getObjectHandles(fdp.ConsumeIntegral<uint32_t>() /* storageID */,
+                                                fdp.ConsumeIntegral<uint16_t>() /* format */,
+                                                fdp.ConsumeIntegral<uint32_t>() /* parent */);
+                 },
+                 [&]() { mtpDevice.initialize(); },
+                 [&]() {
+                     int32_t outLength = 0;
+                     mtpDevice.getThumbnail(fdp.ConsumeIntegral<uint32_t>() /* handle */,
+                                            outLength);
+                 },
+                 [&]() {
+                     MtpObjectInfo mtpObjectInfo(fdp.ConsumeIntegral<uint32_t>() /* handle */);
+                     std::string name = fdp.ConsumeRandomLengthString(kMaxStringLength);
+                     std::string keywords = fdp.ConsumeRandomLengthString(kMaxStringLength);
+                     mtpObjectInfo.mName = strdup(name.c_str());
+                     mtpObjectInfo.mKeywords = strdup(keywords.c_str());
+                     mtpDevice.sendObjectInfo(&mtpObjectInfo);
+                 },
+                 [&]() {
+                     mtpDevice.sendObject(fdp.ConsumeIntegral<uint32_t>() /* handle */,
+                                          fdp.ConsumeIntegral<uint32_t>() /* size */, fd);
+                 },
+                 [&]() { mtpDevice.deleteObject(fdp.ConsumeIntegral<uint32_t>() /* handle */); },
+                 [&]() {
+                     mtpDevice.getObjectPropsSupported(
+                             fdp.ConsumeIntegral<uint16_t>() /* format */);
+                 },
+                 [&]() {
+                     MtpDataType dataType = fdp.ConsumeIntegral<int16_t>();
+                     MtpProperty mtpProperty(fdp.ConsumeIntegral<int16_t>() /* propCode */,
+                                             dataType, fdp.ConsumeBool() /* writeable */,
+                                             fdp.ConsumeIntegral<int32_t>() /* defaultValue */);
+                     if (dataType == MTP_TYPE_STR) {
+                         mtpProperty.setCurrentValue(
+                                 fdp.ConsumeRandomLengthString(kMaxStringLength).c_str());
+                     }
+                     mtpDevice.setDevicePropValueStr(&mtpProperty);
+                 },
+                 [&]() {
+                     mtpDevice.getObjectPropDesc(fdp.ConsumeIntegral<uint16_t>() /* code */,
+                                                 fdp.ConsumeIntegral<uint16_t>() /* format */);
+                 },
+                 [&]() {
+                     MtpProperty property;
+                     mtpDevice.getObjectPropValue(fdp.ConsumeIntegral<uint16_t>() /* handle */,
+                                                  &property);
+                 },
+                 [&]() {
+                     std::vector<uint8_t> clientData = fdp.ConsumeBytes<uint8_t>(kMaxDataSize);
+                     mtpDevice.readObject(
+                             fdp.ConsumeIntegral<uint32_t>() /* handle */, readCallback,
+                             fdp.ConsumeIntegral<uint32_t>() /* objectSize */, &clientData);
+                 },
+                 [&]() {
+                     std::vector<uint8_t> clientData = fdp.ConsumeBytes<uint8_t>(kMaxDataSize);
+                     uint32_t writtenSize = 0;
+                     mtpDevice.readPartialObject(fdp.ConsumeIntegral<uint32_t>() /* handle */,
+                                                 fdp.ConsumeIntegral<uint32_t>() /* offset */,
+                                                 fdp.ConsumeIntegral<uint32_t>() /* size */,
+                                                 &writtenSize, readCallback, &clientData);
+                 },
+                 [&]() {
+                     std::vector<uint8_t> clientData = fdp.ConsumeBytes<uint8_t>(kMaxDataSize);
+                     uint32_t writtenSize = 0;
+                     mtpDevice.readPartialObject(fdp.ConsumeIntegral<uint32_t>() /* handle */,
+                                                 fdp.ConsumeIntegral<uint64_t>() /* offset */,
+                                                 fdp.ConsumeIntegral<uint32_t>() /* size */,
+                                                 &writtenSize, readCallback, &clientData);
+                 },
+                 [&]() {
+                     if (mtpDevice.submitEventRequest() != -1) {
+                         uint32_t parameters[3];
+                         mtpDevice.reapEventRequest(fdp.ConsumeIntegral<int32_t>() /* handle */,
+                                                    &parameters);
+                     }
+                 },
+                 [&]() {
+                     mtpDevice.discardEventRequest(fdp.ConsumeIntegral<int32_t>() /*handle*/);
+                 },
+                 [&]() {
+                     mtpDevice.discardEventRequest(fdp.ConsumeIntegral<int32_t>() /* handle */);
+                 },
+                 [&]() { mtpDevice.print(); },
+                 [&]() { mtpDevice.getDeviceName(); },
+                 [&]() { mtpDevice.getObjectInfo(fdp.ConsumeIntegral<uint32_t>() /* handle */); },
+                 [&]() { mtpDevice.getParent(fdp.ConsumeIntegral<uint32_t>() /* handle */); },
+                 [&]() { mtpDevice.getStorageID(fdp.ConsumeIntegral<uint32_t>() /* handle */); },
+                 [&]() { mtpDevice.getDevicePropDesc(fdp.ConsumeIntegral<uint16_t>() /* code */); },
+                 [&]() {
+                     mtpDevice.readObject(
+                             fdp.ConsumeIntegral<uint32_t>() /* handle */,
+                             fdp.ConsumeRandomLengthString(kMaxStringLength).c_str() /* destPath */,
+                             fdp.ConsumeIntegral<int32_t>() /* group */,
+                             fdp.ConsumeIntegral<int32_t>() /* perm */);
+                 },
+                 [&]() {
+                     int32_t filefd = open(kConfigFilePath.c_str(), O_CREAT | O_RDWR);
+                     mtpDevice.readObject(fdp.ConsumeIntegral<uint16_t>() /* handle */, filefd);
+                     close(filefd);
+                 },
+                 [&]() { MtpDevice::open(deviceName.c_str(), fd); },
+                 [&]() {
+                     MtpObjectInfo objectinfo(fdp.ConsumeIntegral<uint32_t>() /* handle */);
+                     MtpDataPacket mtpDataPacket;
+                     MtpDevHandle devHandle;
+                     std::vector<uint8_t> packet = fdp.ConsumeBytes<uint8_t>(kMaxBytes);
+                     mtpDataPacket.writeData(&devHandle, packet.data(), packet.size());
+                     objectinfo.read(mtpDataPacket);
+                     objectinfo.print();
+                 },
+                 [&]() {
+                     MtpStorageInfo storageInfo(fdp.ConsumeIntegral<uint32_t>() /* id */);
+                     MtpDataPacket mtpDataPacket;
+                     MtpDevHandle devHandle;
+                     std::vector<uint8_t> packet = fdp.ConsumeBytes<uint8_t>(kMaxBytes);
+                     mtpDataPacket.writeData(&devHandle, packet.data(), packet.size());
+                     storageInfo.read(mtpDataPacket);
+                     storageInfo.print();
+                 }});
+        mtpDeviceFunction();
+    }
+    close(fd);
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_event_packet_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_event_packet_fuzzer.cpp
new file mode 100644
index 0000000..3bd3be2
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_event_packet_fuzzer.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpDevHandle.h>
+#include <MtpEventPacket.h>
+#include <MtpPacketFuzzerUtils.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+using namespace android;
+
+class MtpEventPacketFuzzer : MtpPacketFuzzerUtils {
+  public:
+    MtpEventPacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mUsbDevFsUrb = (struct usbdevfs_urb*)malloc(sizeof(struct usbdevfs_urb) +
+                                                   sizeof(struct usbdevfs_iso_packet_desc));
+    };
+    ~MtpEventPacketFuzzer() { free(mUsbDevFsUrb); };
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+};
+
+void MtpEventPacketFuzzer::process() {
+    MtpEventPacket mtpEventPacket;
+    while (mFdp.remaining_bytes() > 0) {
+        auto mtpEventAPI = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { mtpEventPacket.allocate(mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize)); },
+                [&]() { mtpEventPacket.reset(); },
+                [&]() { writeHandle(&mtpEventPacket, &mFdp); },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpEventPacket.sendRequest(&mUsbRequest);
+                    usb_device_close(mUsbRequest.dev);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillFd(fd, &mFdp);
+                    struct usb_device* device = usb_device_new(mPath.c_str(), fd);
+                    mtpEventPacket.readResponse(device);
+                    usb_device_close(device);
+                },
+        });
+        mtpEventAPI();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MtpEventPacketFuzzer mtpEventPacketFuzzer(data, size);
+    mtpEventPacketFuzzer.process();
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
index f578462..e886816 100644
--- a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
@@ -14,12 +14,15 @@
  * limitations under the License.
  */
 
+#include <android-base/properties.h>
 #include <android-base/unique_fd.h>
+#include <fuzzer/FuzzedDataProvider.h>
 #include <stddef.h>
 #include <stdint.h>
 #include <stdlib.h>
 #include <unistd.h>
-
+#include <filesystem>
+#include <fstream>
 #include <string>
 
 #define LOG_TAG "MtpFuzzer"
@@ -32,38 +35,40 @@
 #include "MtpStorage.h"
 #include "MtpUtils.h"
 
-const char* storage_desc = "Fuzz Storage";
+constexpr int32_t kMinFiles = 0;
+constexpr int32_t kMaxFiles = 5;
+constexpr int32_t kMaxBytes = 128;
+constexpr float kMinDataSizeFactor = 0.8;
 // prefer tmpfs for file operations to avoid wearing out flash
 const char* storage_path = "/storage/fuzzer/0";
-const char* source_database = "srcdb/";
+const char* source_database = "/data/local/tmp/srcdb/";
+const std::string test_path = std::string(source_database) + "TestDir/";
+const std::string kPropertyKey = "sys.fuse.transcode_mtp";
 
 namespace android {
 class MtpMockServer {
-public:
-    std::unique_ptr<MtpMockHandle> mHandle;
-    std::unique_ptr<MtpStorage> mStorage;
-    std::unique_ptr<MtpMockDatabase> mDatabase;
-    std::unique_ptr<MtpServer> mMtp;
-    int mStorageId;
-
-    MtpMockServer(const char* storage_path) : mStorageId(0) {
-        bool ptp = false;
-        const char* manu = "Google";
-        const char* model = "Pixel 3XL";
-        const char* version = "1.0";
-        const char* serial = "ABDEF1231";
-
+  public:
+    MtpMockServer(const uint8_t* data, size_t size) : mFdp(data, size) {
         // This is unused in our harness
         int controlFd = -1;
 
         mHandle = std::make_unique<MtpMockHandle>();
-        mStorage = std::make_unique<MtpStorage>(mStorageId, storage_path, storage_desc, true,
-                                                0x200000000L);
+        mStorage = std::make_unique<MtpStorage>(
+                mFdp.ConsumeIntegral<uint32_t>() /* storageId */, storage_path,
+                mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* descriptor */,
+                mFdp.ConsumeBool() /* removable */,
+                mFdp.ConsumeIntegral<uint64_t>() /* maxFileSize */);
         mDatabase = std::make_unique<MtpMockDatabase>();
         mDatabase->addStorage(mStorage.get());
 
-        mMtp = std::make_unique<MtpServer>(mDatabase.get(), controlFd, ptp, manu, model, version,
-                                           serial);
+        init(data, size);
+
+        mMtp = std::make_unique<MtpServer>(
+                mDatabase.get(), controlFd, mFdp.ConsumeBool() /* ptp */,
+                mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* manu */,
+                mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* model */,
+                mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* version */,
+                mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* serial */);
         mMtp->addStorage(mStorage.get());
 
         // clear the old handle first, so we don't leak memory
@@ -71,7 +76,76 @@
         mMtp->mHandle = mHandle.get();
     }
 
-    void run() { mMtp->run(); }
+    void process() {
+        if (mFdp.ConsumeBool()) {
+            createDatabaseFromSourceDir(source_database, storage_path, MTP_PARENT_ROOT);
+        }
+
+        while (mFdp.remaining_bytes()) {
+            MtpStorage storage(mFdp.ConsumeIntegral<uint32_t>() /* id */,
+                               mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* filePath */,
+                               mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* description */,
+                               mFdp.ConsumeBool() /* removable */,
+                               mFdp.ConsumeIntegral<uint64_t>() /* maxFileSize */);
+
+            auto invokeMtpServerAPI = mFdp.PickValueInArray<const std::function<void()>>({
+                    [&]() { mMtp->run(); },
+                    [&]() { mMtp->sendObjectAdded(mFdp.ConsumeIntegral<uint32_t>()); },
+                    [&]() { mMtp->sendObjectRemoved(mFdp.ConsumeIntegral<uint32_t>()); },
+                    [&]() { mMtp->sendObjectInfoChanged(mFdp.ConsumeIntegral<uint32_t>()); },
+                    [&]() { mMtp->sendDevicePropertyChanged(mFdp.ConsumeIntegral<uint16_t>()); },
+                    [&]() { mMtp->addStorage(&storage); },
+                    [&]() { mMtp->removeStorage(&storage); },
+            });
+
+            invokeMtpServerAPI();
+        }
+
+        std::filesystem::remove_all(source_database);
+    }
+
+  private:
+    void createFiles(std::string path, size_t fileCount) {
+        std::ofstream file;
+        for (size_t idx = 0; idx < fileCount; ++idx) {
+            file.open(path.append(std::to_string(idx)));
+            file.close();
+        }
+    }
+
+    void addPackets(const uint8_t* data, size_t size) {
+        size_t off = 0;
+        for (size_t i = 0; i < size; ++i) {
+            // A longer delimiter could be used, but this worked in practice
+            if (data[i] == '@') {
+                size_t pktsz = i - off;
+                if (pktsz > 0) {
+                    packet_t pkt = packet_t((unsigned char*)data + off, (unsigned char*)data + i);
+                    // insert into packet buffer
+                    mHandle->add_packet(pkt);
+                    off = i;
+                }
+            }
+        }
+    }
+
+    void init(const uint8_t* data, size_t size) {
+        std::vector<uint8_t> packetData = mFdp.ConsumeBytes<uint8_t>(
+                mFdp.ConsumeIntegralInRange<int32_t>(kMinDataSizeFactor * size, size));
+
+        // Packetize the input stream
+        addPackets(packetData.data(), packetData.size());
+
+        // Setting the property to true/false to randomly fuzz the PoC depended on it
+        base::SetProperty(kPropertyKey, mFdp.ConsumeBool() ? "true" : "false");
+
+        std::filesystem::create_directories(source_database);
+        if (mFdp.ConsumeBool()) {
+            std::filesystem::create_directories(test_path);
+            createFiles(test_path, mFdp.ConsumeIntegralInRange<size_t>(kMinFiles, kMaxFiles));
+        }
+        createFiles(source_database, mFdp.ConsumeIntegralInRange<size_t>(kMinFiles, kMaxFiles));
+    }
 
     int createDatabaseFromSourceDir(const char* fromPath, const char* toPath,
                                     MtpObjectHandle parentHandle) {
@@ -130,8 +204,14 @@
         closedir(dir);
         return ret;
     }
+
+    FuzzedDataProvider mFdp;
+    std::unique_ptr<MtpMockHandle> mHandle;
+    std::unique_ptr<MtpStorage> mStorage;
+    std::unique_ptr<MtpMockDatabase> mDatabase;
+    std::unique_ptr<MtpServer> mMtp;
 };
-}; // namespace android
+};  // namespace android
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) __attribute__((optnone)) {
     // reset our storage (from MtpUtils.h)
@@ -140,26 +220,9 @@
     android::makeFolder(storage_path);
 
     std::unique_ptr<android::MtpMockServer> mtp =
-            std::make_unique<android::MtpMockServer>(storage_path);
+            std::make_unique<android::MtpMockServer>(data, size);
+    mtp->process();
 
-    size_t off = 0;
-
-    // Packetize the input stream
-    for (size_t i = 0; i < size; i++) {
-        // A longer delimiter could be used, but this worked in practice
-        if (data[i] == '@') {
-            size_t pktsz = i - off;
-            if (pktsz > 0) {
-                packet_t pkt = packet_t((unsigned char*)data + off, (unsigned char*)data + i);
-                // insert into packet buffer
-                mtp->mHandle->add_packet(pkt);
-                off = i;
-            }
-        }
-    }
-
-    mtp->createDatabaseFromSourceDir(source_database, storage_path, MTP_PARENT_ROOT);
-    mtp->run();
-
+    std::filesystem::remove_all("/storage/fuzzer");
     return 0;
 }
diff --git a/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
new file mode 100644
index 0000000..7dcdc3f
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
@@ -0,0 +1,205 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <MtpDescriptors.h>
+#include <MtpFfsCompatHandle.h>
+#include <android-base/file.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <mtp.h>
+
+using namespace android;
+
+constexpr int32_t kMaxStringLength = 64;
+constexpr int32_t kMinAPICase = 0;
+constexpr int32_t kMaxMtpHandleAPI = 5;
+constexpr int32_t kMinBufferSize = 0;
+constexpr uint32_t kMaxMtpFileSize = 0xFFFFFFFF;
+constexpr float kDataSizeFactor = 0.1;
+
+const std::string kTempPath = "/data/local/tmp/";
+const std::string kFuzzerUsbDirPath = kTempPath + "usb-ffs";
+const std::string kFuzzerMtpPath = kFuzzerUsbDirPath + "/mtp";
+const std::string kFuzzerPtpPath = kFuzzerUsbDirPath + "/ptp";
+const std::string kFuzzerTestFile = kTempPath + "FuzzerTestDescriptorFile";
+const std::string kFuzzerMtpInputFile = kTempPath + "FuzzerMtpInputFile";
+const std::string kFuzzerMtpOutputFile = kTempPath + "FuzzerMtpOutputFile";
+
+const std::string kDeviceFilePaths[] = {FFS_MTP_EP0,    FFS_MTP_EP_IN, FFS_MTP_EP_INTR,
+                                        FFS_PTP_EP0,    FFS_PTP_EP_IN, FFS_PTP_EP_INTR,
+                                        FFS_MTP_EP_OUT, FFS_PTP_EP_OUT};
+
+class MtpFfsHandleFuzzer {
+  public:
+    MtpFfsHandleFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mDataSize = kDataSizeFactor * size;
+        createFiles();
+    };
+    void process();
+
+    ~MtpFfsHandleFuzzer() { removeFiles(); };
+
+  private:
+    FuzzedDataProvider mFdp;
+    void invokeWriteDescriptor();
+    void invokeMtpFfsHandle();
+    void createFiles();
+    void removeFiles();
+    void createDeviceFile(const char* file);
+    void writeDeviceFile(const char* file);
+    int32_t writeInputFile(int32_t fd);
+    uint32_t mDataSize = 0;
+};
+
+int32_t MtpFfsHandleFuzzer::writeInputFile(int32_t fd) {
+    uint32_t minFileSize = std::min((uint32_t)MTP_BUFFER_SIZE, mDataSize);
+    uint32_t maxFileSize = std::min(mDataSize, kMaxMtpFileSize);
+    std::vector<char> dataBuffer = mFdp.ConsumeBytes<char>(
+            mFdp.ConsumeIntegralInRange<uint32_t>(minFileSize, maxFileSize));
+    write(fd, dataBuffer.data(), dataBuffer.size());
+    lseek(fd, 0, SEEK_SET);
+    return dataBuffer.size();
+}
+
+void MtpFfsHandleFuzzer::createDeviceFile(const char* file) {
+    int32_t fd = open(file, O_CREAT | O_RDWR | O_NONBLOCK);
+    close(fd);
+}
+
+void MtpFfsHandleFuzzer::writeDeviceFile(const char* file) {
+    int32_t fd = open(file, O_RDWR | O_NONBLOCK);
+    writeInputFile(fd);
+    close(fd);
+}
+
+void MtpFfsHandleFuzzer::createFiles() {
+    mkdir(kFuzzerUsbDirPath.c_str(), 0755);
+    mkdir(kFuzzerMtpPath.c_str(), 0755);
+    mkdir(kFuzzerPtpPath.c_str(), 0755);
+
+    for (auto path : kDeviceFilePaths) {
+        createDeviceFile(path.c_str());
+    }
+
+    writeDeviceFile(FFS_MTP_EP_OUT);
+    writeDeviceFile(FFS_PTP_EP_OUT);
+}
+
+void MtpFfsHandleFuzzer::removeFiles() {
+    for (auto path : kDeviceFilePaths) {
+        remove(path.c_str());
+    }
+
+    rmdir(kFuzzerMtpPath.c_str());
+    rmdir(kFuzzerPtpPath.c_str());
+    rmdir(kFuzzerUsbDirPath.c_str());
+}
+
+void MtpFfsHandleFuzzer::invokeWriteDescriptor() {
+    while (mFdp.remaining_bytes() > 0) {
+        int32_t controlFd = mFdp.ConsumeBool()
+                                    ? -1 /* Invalid fd*/
+                                    : open(kFuzzerTestFile.c_str(), O_CREAT | O_RDWR | O_NONBLOCK);
+        std::unique_ptr<MtpFfsHandle> handle(new MtpFfsHandle(controlFd));
+        handle->writeDescriptors(mFdp.ConsumeBool());
+        handle->close();
+        close(controlFd);
+        remove(kFuzzerTestFile.c_str());
+    }
+}
+
+void MtpFfsHandleFuzzer::invokeMtpFfsHandle() {
+    while (mFdp.remaining_bytes() > 0) {
+        int32_t controlFd = open(kFuzzerTestFile.c_str(), O_CREAT | O_RDWR | O_NONBLOCK);
+        writeInputFile(controlFd);
+
+        std::unique_ptr<IMtpHandle> handle;
+        if (mFdp.ConsumeBool()) {
+            std::unique_ptr<IMtpHandle> mtpCompactHandle(new MtpFfsCompatHandle(controlFd));
+            handle = std::move(mtpCompactHandle);
+        } else {
+            std::unique_ptr<IMtpHandle> mtpHandle(new MtpFfsHandle(controlFd));
+            handle = std::move(mtpHandle);
+        }
+
+        int32_t mtpHandle = mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxMtpHandleAPI);
+        switch (mtpHandle) {
+            case 0: {
+                handle->start(mFdp.ConsumeBool());
+                break;
+            }
+            case 1: {
+                std::string data = mFdp.ConsumeRandomLengthString(MTP_BUFFER_SIZE);
+                handle->write(data.c_str(), data.length());
+                break;
+            }
+            case 2: {
+                int32_t bufferSize =
+                        mFdp.ConsumeIntegralInRange<size_t>(kMinBufferSize, MTP_BUFFER_SIZE);
+                uint8_t buffer[bufferSize + 1];
+                handle->read(buffer, bufferSize);
+                break;
+            }
+            case 3: {
+                mtp_file_range mfr;
+                mfr.fd = open(kFuzzerMtpInputFile.c_str(), O_CREAT | O_RDWR | O_NONBLOCK);
+                mfr.length = writeInputFile(mfr.fd);
+                mfr.offset = 0; /* Offset point to the start of the file */
+                mfr.command = mFdp.ConsumeIntegral<uint16_t>();
+                mfr.transaction_id = mFdp.ConsumeIntegral<uint32_t>();
+                handle->sendFile(mfr);
+                close(mfr.fd);
+                remove(kFuzzerMtpInputFile.c_str());
+                break;
+            }
+            case 4: {
+                struct mtp_event event;
+                std::string dataValue = mFdp.ConsumeRandomLengthString(kMaxStringLength);
+                event.data = const_cast<char*>(dataValue.c_str());
+                event.length = dataValue.length();
+                handle->sendEvent(event);
+                break;
+            }
+            case 5:
+            default: {
+                mtp_file_range mfr;
+                mfr.fd = open(kFuzzerMtpOutputFile.c_str(), O_CREAT | O_RDWR | O_NONBLOCK);
+                mfr.offset = 0; /* Offset point to the start of the file */
+                mfr.length = kMaxMtpFileSize;
+                handle->receiveFile(mfr, mFdp.ConsumeBool());
+                close(mfr.fd);
+                remove(kFuzzerMtpOutputFile.c_str());
+                break;
+            }
+        }
+        handle->close();
+        close(controlFd);
+        remove(kFuzzerTestFile.c_str());
+    }
+}
+
+void MtpFfsHandleFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        invokeMtpFfsHandle();
+    } else {
+        invokeWriteDescriptor();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MtpFfsHandleFuzzer mtpFfsHandleFuzzer(data, size);
+    mtpFfsHandleFuzzer.process();
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_packet_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_packet_fuzzer.cpp
new file mode 100644
index 0000000..6fc2a96
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_packet_fuzzer.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpDevHandle.h>
+#include <MtpPacket.h>
+#include <MtpPacketFuzzerUtils.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+using namespace android;
+
+class MtpPacketFuzzer : MtpPacketFuzzerUtils {
+  public:
+    MtpPacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mUsbDevFsUrb = (struct usbdevfs_urb*)malloc(sizeof(struct usbdevfs_urb) +
+                                                   sizeof(struct usbdevfs_iso_packet_desc));
+    };
+    ~MtpPacketFuzzer() { free(mUsbDevFsUrb); };
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+};
+
+void MtpPacketFuzzer::process() {
+    MtpPacket mtpPacket(mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize)); /*bufferSize*/
+    while (mFdp.remaining_bytes() > 0) {
+        auto mtpPacketAPI = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    mtpPacket.allocate(mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                },
+                [&]() { mtpPacket.reset(); },
+                [&]() { mtpPacket.getContainerType(); },
+                [&]() { mtpPacket.getContainerCode(); },
+                [&]() { mtpPacket.dump(); },
+                [&]() { mtpPacket.getTransactionID(); },
+                [&]() {
+                    mtpPacket.setContainerCode(
+                            mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize));
+                },
+                [&]() {
+                    mtpPacket.setTransactionID(
+                            mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize));
+                },
+                [&]() {
+                    mtpPacket.getParameter(
+                            mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize));
+                },
+                [&]() {
+                    mtpPacket.setParameter(
+                            mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize),
+                            mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize));
+                },
+                [&]() {
+                    MtpPacket testMtpPacket(
+                            mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize));
+                    testMtpPacket.copyFrom(mtpPacket);
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpPacket.transfer(&mUsbRequest);
+                    usb_device_close(mUsbRequest.dev);
+                },
+        });
+        mtpPacketAPI();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MtpPacketFuzzer mtpPacketFuzzer(data, size);
+    mtpPacketFuzzer.process();
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_property_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_property_fuzzer.cpp
new file mode 100644
index 0000000..b4e659c
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_property_fuzzer.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpDataPacket.h>
+#include <MtpDevHandle.h>
+#include <MtpPacketFuzzerUtils.h>
+#include <MtpProperty.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <utils/String16.h>
+
+using namespace android;
+
+constexpr uint16_t kFeasibleTypes[] = {
+        MTP_TYPE_UNDEFINED, MTP_TYPE_INT8,    MTP_TYPE_UINT8,  MTP_TYPE_INT16,   MTP_TYPE_UINT16,
+        MTP_TYPE_INT32,     MTP_TYPE_UINT32,  MTP_TYPE_INT64,  MTP_TYPE_UINT64,  MTP_TYPE_INT128,
+        MTP_TYPE_UINT128,   MTP_TYPE_AINT8,   MTP_TYPE_AUINT8, MTP_TYPE_AINT16,  MTP_TYPE_AUINT16,
+        MTP_TYPE_AINT32,    MTP_TYPE_AUINT32, MTP_TYPE_AINT64, MTP_TYPE_AUINT64, MTP_TYPE_AINT128,
+        MTP_TYPE_AUINT128,  MTP_TYPE_STR,
+};
+
+class MtpPropertyFuzzer : MtpPacketFuzzerUtils {
+  public:
+    MtpPropertyFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mUsbDevFsUrb = (struct usbdevfs_urb*)malloc(sizeof(struct usbdevfs_urb) +
+                                                    sizeof(struct usbdevfs_iso_packet_desc));
+    };
+    ~MtpPropertyFuzzer() { free(mUsbDevFsUrb); };
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+};
+
+void MtpPropertyFuzzer::process() {
+    MtpProperty* mtpProperty = nullptr;
+    if (mFdp.ConsumeBool()) {
+        mtpProperty = new MtpProperty();
+    } else {
+        uint16_t type = mFdp.ConsumeBool() ? mFdp.ConsumeIntegral<uint16_t>()
+                                           : mFdp.PickValueInArray<uint16_t>(kFeasibleTypes);
+        mtpProperty = new MtpProperty(mFdp.ConsumeIntegral<uint16_t>(), type, mFdp.ConsumeBool(),
+                                      mFdp.ConsumeIntegral<uint16_t>());
+    }
+
+    while (mFdp.remaining_bytes() > 0) {
+        auto invokeMtpPropertyFuzzer = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    MtpDataPacket mtpDataPacket;
+                    if (mFdp.ConsumeBool()) {
+                        mtpProperty->read(mtpDataPacket);
+
+                    } else {
+                        if (mFdp.ConsumeBool()) {
+#ifdef MTP_DEVICE
+                            android::IMtpHandle* h = new MtpDevHandle();
+                            h->start(mFdp.ConsumeBool());
+                            std::string text = mFdp.ConsumeRandomLengthString(kMaxLength);
+                            char* data = const_cast<char*>(text.c_str());
+                            h->read(static_cast<void*>(data), text.length());
+                            mtpDataPacket.write(h);
+                            h->close();
+                            delete h;
+#endif
+
+#ifdef MTP_HOST
+                            fillFilePath(&mFdp);
+                            int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                            fillUsbRequest(fd, &mFdp);
+                            mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                            mtpDataPacket.write(&mUsbRequest,
+                                                mFdp.PickValueInArray<UrbPacketDivisionMode>(
+                                                        kUrbPacketDivisionModes),
+                                                fd,
+                                                mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize));
+                            usb_device_close(mUsbRequest.dev);
+#endif
+                        }
+
+                        if (mFdp.ConsumeBool()) {
+                            mtpProperty->write(mtpDataPacket);
+                        } else {
+                            mtpProperty->setCurrentValue(mtpDataPacket);
+                        }
+                    }
+                },
+                [&]() {
+                    char16_t* data = nullptr;
+                    std::string str = mFdp.ConsumeRandomLengthString(kMaxLength);
+                    android::String16 s(str.c_str());
+                    if (mFdp.ConsumeBool()) {
+                        data = const_cast<char16_t*>(s.string());
+                    }
+
+                    if (mFdp.ConsumeBool()) {
+                        mtpProperty->setDefaultValue(reinterpret_cast<uint16_t*>(data));
+                    } else if (mFdp.ConsumeBool()) {
+                        mtpProperty->setCurrentValue(reinterpret_cast<uint16_t*>(data));
+                    } else {
+                        mtpProperty->setCurrentValue(str.c_str());
+                    }
+                },
+                [&]() {
+                    mtpProperty->setFormRange(mFdp.ConsumeIntegral<int32_t>(),
+                                              mFdp.ConsumeIntegral<int32_t>(),
+                                              mFdp.ConsumeIntegral<int32_t>());
+                },
+                [&]() {
+                    std::vector<int32_t> init;
+                    for (size_t idx = 0; idx < mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize);
+                         ++idx) {
+                        init.push_back(mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    mtpProperty->setFormEnum(init.data(), init.size());
+                },
+        });
+        invokeMtpPropertyFuzzer();
+    }
+
+    delete (mtpProperty);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MtpPropertyFuzzer mtpPropertyFuzzer(data, size);
+    mtpPropertyFuzzer.process();
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_request_packet_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_request_packet_fuzzer.cpp
new file mode 100644
index 0000000..19fbc5b
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_request_packet_fuzzer.cpp
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpDevHandle.h>
+#include <MtpPacketFuzzerUtils.h>
+#include <MtpRequestPacket.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <fstream>
+
+using namespace android;
+
+std::string kMtpDevPath = "/dev/mtp_usb";
+constexpr int32_t kMaxBytes = 100000;
+
+class MtpRequestPacketFuzzer : MtpPacketFuzzerUtils {
+  public:
+    MtpRequestPacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mUsbDevFsUrb = (struct usbdevfs_urb*)malloc(sizeof(struct usbdevfs_urb) +
+                                                   sizeof(struct usbdevfs_iso_packet_desc));
+    };
+    ~MtpRequestPacketFuzzer() { free(mUsbDevFsUrb); };
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+    void makeFile(std::string s);
+};
+
+void MtpRequestPacketFuzzer::process() {
+    MtpRequestPacket mtpRequestPacket;
+    while (mFdp.remaining_bytes() > 0) {
+        auto mtpRequestAPI = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    mtpRequestPacket.allocate(mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize));
+                },
+                [&]() { mtpRequestPacket.reset(); },
+                [&]() {
+                    MtpDevHandle handle;
+                    makeFile(kMtpDevPath);
+                    handle.start(mFdp.ConsumeBool());
+                    std::vector<uint8_t> data = mFdp.ConsumeBytes<uint8_t>(
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                    handle.write(data.data(), data.size());
+                    mtpRequestPacket.read(&handle);
+                    handle.close();
+                    remove(kMtpDevPath.c_str());
+                },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpRequestPacket.write(&mUsbRequest);
+                    usb_device_close(mUsbRequest.dev);
+                },
+        });
+        mtpRequestAPI();
+    }
+}
+
+void MtpRequestPacketFuzzer::makeFile(std::string s) {
+    std::ofstream out;
+    out.open(s, std::ios::binary | std::ofstream::trunc);
+    for (int32_t idx = 0; idx < mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize); ++idx) {
+        out << mFdp.ConsumeRandomLengthString(kMaxBytes) << "\n";
+    }
+    out.close();
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MtpRequestPacketFuzzer mtpRequestPacketFuzzer(data, size);
+    mtpRequestPacketFuzzer.process();
+    return 0;
+}
diff --git a/media/mtp/tests/MtpFuzzer/mtp_response_packet_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_response_packet_fuzzer.cpp
new file mode 100644
index 0000000..697785f
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/mtp_response_packet_fuzzer.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <MtpDevHandle.h>
+#include <MtpPacketFuzzerUtils.h>
+#include <MtpResponsePacket.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+using namespace android;
+
+class MtpResponsePacketFuzzer : MtpPacketFuzzerUtils {
+  public:
+    MtpResponsePacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mUsbDevFsUrb = (struct usbdevfs_urb*)malloc(sizeof(struct usbdevfs_urb) +
+                                                   sizeof(struct usbdevfs_iso_packet_desc));
+    };
+    ~MtpResponsePacketFuzzer() { free(mUsbDevFsUrb); };
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+};
+
+void MtpResponsePacketFuzzer::process() {
+    MtpResponsePacket mtpResponsePacket;
+    while (mFdp.remaining_bytes() > 0) {
+        auto mtpResponseAPI = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    mtpResponsePacket.allocate(
+                            mFdp.ConsumeIntegralInRange(kMinSize, kMaxSize)); /*size*/
+                },
+                [&]() { mtpResponsePacket.reset(); },
+                [&]() { writeHandle(&mtpResponsePacket, &mFdp); },
+                [&]() {
+                    fillFilePath(&mFdp);
+                    int32_t fd = memfd_create(mPath.c_str(), MFD_ALLOW_SEALING);
+                    fillUsbRequest(fd, &mFdp);
+                    mUsbRequest.dev = usb_device_new(mPath.c_str(), fd);
+                    mtpResponsePacket.read(&mUsbRequest);
+                    usb_device_close(mUsbRequest.dev);
+                },
+        });
+        mtpResponseAPI();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    MtpResponsePacketFuzzer mtpResponsePacketFuzzer(data, size);
+    mtpResponsePacketFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index ddc71db..fded4f5 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -52,6 +52,9 @@
     symbol_file: "libmediandk.map.txt",
     first_version: "21",
     unversioned_until: "current",
+    export_header_libs: [
+        "libmediandk_headers",
+    ],
 }
 
 ndk_headers {
@@ -167,7 +170,7 @@
     stubs: {
         symbol_file: "libmediandk.map.txt",
         versions: ["29"],
-    }
+    },
 }
 
 cc_library {
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 12a0d53..c2093ac 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -24,6 +24,7 @@
 
 #include <android_media_Utils.h>
 #include <private/android/AHardwareBufferHelpers.h>
+#include <ui/PublicFormat.h>
 #include <utils/Log.h>
 
 using namespace android;
@@ -34,6 +35,8 @@
         int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes) :
         mReader(reader), mFormat(format), mUsage(usage), mBuffer(buffer), mLockedBuffer(nullptr),
         mTimestamp(timestamp), mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
+    PublicFormat publicFormat = static_cast<PublicFormat>(format);
+    mHalDataSpace = mapPublicFormatToHalDataspace(publicFormat);
     LOG_FATAL_IF(reader == nullptr, "AImageReader shouldn't be null while creating AImage");
 }
 
@@ -156,6 +159,20 @@
     return AMEDIA_OK;
 }
 
+media_status_t
+AImage::getDataSpace(android_dataspace* dataSpace) const {
+    if (dataSpace == nullptr) {
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    *dataSpace = static_cast<android_dataspace>(-1);
+    if (isClosed()) {
+        ALOGE("%s: image %p has been closed!", __FUNCTION__, this);
+        return AMEDIA_ERROR_INVALID_OBJECT;
+    }
+    *dataSpace = mHalDataSpace;
+    return AMEDIA_OK;
+}
+
 media_status_t AImage::lockImage() {
     if (mBuffer == nullptr || mBuffer->mGraphicBuffer == nullptr) {
         LOG_ALWAYS_FATAL("%s: AImage %p has no buffer.", __FUNCTION__, this);
@@ -247,6 +264,13 @@
         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
             *pixelStride = (planeIdx == 0) ? 1 : 2;
             return AMEDIA_OK;
+        case HAL_PIXEL_FORMAT_YCBCR_P010:
+            if (mLockedBuffer->dataCb && mLockedBuffer->dataCr) {
+                *pixelStride = (planeIdx == 0) ? 2 : mLockedBuffer->chromaStep;
+            } else {
+                *pixelStride = (planeIdx == 0) ? 2 : 4;
+            }
+            return AMEDIA_OK;
         case HAL_PIXEL_FORMAT_Y8:
             *pixelStride = 1;
             return AMEDIA_OK;
@@ -316,6 +340,13 @@
             *rowStride = (planeIdx == 0) ? mLockedBuffer->stride
                                          : ALIGN(mLockedBuffer->stride / 2, 16);
             return AMEDIA_OK;
+        case HAL_PIXEL_FORMAT_YCBCR_P010:
+            if (mLockedBuffer->dataCb && mLockedBuffer->dataCr) {
+                *rowStride = (planeIdx == 0) ?  mLockedBuffer->stride : mLockedBuffer->chromaStride;
+            } else {
+                *rowStride = mLockedBuffer->stride * 2;
+            }
+            return AMEDIA_OK;
         case HAL_PIXEL_FORMAT_RAW10:
         case HAL_PIXEL_FORMAT_RAW12:
             // RAW10 and RAW12 are used for 10-bit and 12-bit raw data, they are single plane
@@ -473,6 +504,47 @@
                                     : (planeIdx == 1) ? cb : cr;
             dataSize = (planeIdx == 0) ? ySize : cSize;
             break;
+        case HAL_PIXEL_FORMAT_YCBCR_P010:
+            if (mLockedBuffer->height % 2 != 0) {
+                ALOGE("YCBCR_P010: height (%d) should be a multiple of 2", mLockedBuffer->height);
+                return AMEDIA_ERROR_UNKNOWN;
+            }
+
+            if (mLockedBuffer->width <= 0) {
+                ALOGE("YCBCR_P010: width (%d) should be a > 0", mLockedBuffer->width);
+                return AMEDIA_ERROR_UNKNOWN;
+            }
+
+            if (mLockedBuffer->height <= 0) {
+                ALOGE("YCBCR_P010: height (%d) should be a > 0", mLockedBuffer->height);
+                return AMEDIA_ERROR_UNKNOWN;
+            }
+
+            if (mLockedBuffer->dataCb && mLockedBuffer->dataCr) {
+                pData = (planeIdx == 0) ?  mLockedBuffer->data :
+                        (planeIdx == 1) ?  mLockedBuffer->dataCb : mLockedBuffer->dataCr;
+                // only map until last pixel
+                if (planeIdx == 0) {
+                    cStride = mLockedBuffer->stride;
+                    dataSize = cStride * (mLockedBuffer->height - 1) + mLockedBuffer->width * 2;
+                } else {
+                    bytesPerPixel = mLockedBuffer->chromaStep;
+                    cStride = mLockedBuffer->chromaStride;
+                    dataSize = cStride * (mLockedBuffer->height / 2 - 1) +
+                            bytesPerPixel * (mLockedBuffer->width / 2);
+                }
+                break;
+            }
+
+            cStride = mLockedBuffer->stride * 2;
+            ySize = cStride * mLockedBuffer->height;
+            cSize = ySize / 2;
+            cb = mLockedBuffer->data + ySize;
+            cr = cb + 2;
+
+            pData = (planeIdx == 0) ?  mLockedBuffer->data : (planeIdx == 1) ?  cb : cr;
+            dataSize = (planeIdx == 0) ? ySize : cSize;
+            break;
         case HAL_PIXEL_FORMAT_Y8:
             // Single plane, 8bpp.
 
@@ -762,3 +834,15 @@
     }
     return image->getHardwareBuffer(buffer);
 }
+
+EXPORT
+media_status_t AImage_getDataSpace(
+    const AImage* image, /*out*/int32_t* dataSpace) {
+    ALOGV("%s", __FUNCTION__);
+
+    if (image == nullptr || dataSpace == nullptr) {
+        ALOGE("%s: bad argument. image %p dataSpace %p", __FUNCTION__, image, dataSpace);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    return image->getDataSpace((android_dataspace*)(dataSpace));
+}
\ No newline at end of file
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index 05115b9..dc10a6a 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -82,6 +82,7 @@
     media_status_t getPlaneRowStride(int planeIdx, /*out*/int32_t* rowStride) const;
     media_status_t getPlaneData(int planeIdx,/*out*/uint8_t** data, /*out*/int* dataLength) const;
     media_status_t getHardwareBuffer(/*out*/AHardwareBuffer** buffer) const;
+    media_status_t getDataSpace(/*out*/android_dataspace* dataSpace) const;
 
   private:
     // AImage should be deleted through free() API.
@@ -101,6 +102,7 @@
     const int32_t              mWidth;
     const int32_t              mHeight;
     const int32_t              mNumPlanes;
+    android_dataspace          mHalDataSpace = HAL_DATASPACE_UNKNOWN;
     bool                       mIsClosed = false;
     mutable Mutex              mLock;
 };
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 1067e24..067c8f4 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -46,6 +46,9 @@
 
 static constexpr int kWindowHalTokenSizeMax = 256;
 
+static media_status_t validateParameters(int32_t width, int32_t height, int32_t format,
+                                         uint64_t usage, int32_t maxImages,
+                                         /*out*/ AImageReader**& reader);
 static native_handle_t *convertHalTokenToNativeHandle(const HalToken &halToken);
 
 bool
@@ -73,6 +76,7 @@
         case AIMAGE_FORMAT_HEIC:
         case AIMAGE_FORMAT_DEPTH_JPEG:
         case AIMAGE_FORMAT_RAW_DEPTH10:
+        case HAL_PIXEL_FORMAT_YCBCR_P010:
             return true;
         case AIMAGE_FORMAT_PRIVATE:
             // For private format, cpu usage is prohibited.
@@ -86,6 +90,7 @@
 AImageReader::getNumPlanesForFormat(int32_t format) {
     switch (format) {
         case AIMAGE_FORMAT_YUV_420_888:
+        case HAL_PIXEL_FORMAT_YCBCR_P010:
             return 3;
         case AIMAGE_FORMAT_RGBA_8888:
         case AIMAGE_FORMAT_RGBX_8888:
@@ -263,13 +268,17 @@
                            int32_t height,
                            int32_t format,
                            uint64_t usage,
-                           int32_t maxImages)
+                           int32_t maxImages,
+                           uint32_t hardwareBufferFormat,
+                           android_dataspace dataSpace)
     : mWidth(width),
       mHeight(height),
       mFormat(format),
       mUsage(usage),
       mMaxImages(maxImages),
       mNumPlanes(getNumPlanesForFormat(format)),
+      mHalFormat(hardwareBufferFormat),
+      mHalDataSpace(dataSpace),
       mFrameListener(new FrameListener(this)),
       mBufferRemovedListener(new BufferRemovedListener(this)) {}
 
@@ -280,9 +289,6 @@
 
 media_status_t
 AImageReader::init() {
-    PublicFormat publicFormat = static_cast<PublicFormat>(mFormat);
-    mHalFormat = mapPublicFormatToHalFormat(publicFormat);
-    mHalDataSpace = mapPublicFormatToHalDataspace(publicFormat);
     mHalUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
 
     sp<IGraphicBufferProducer> gbProducer;
@@ -646,6 +652,41 @@
     }
 }
 
+static
+media_status_t validateParameters(int32_t width, int32_t height, int32_t format,
+                                  uint64_t usage, int32_t maxImages,
+                                  /*out*/ AImageReader**& reader) {
+    if (reader == nullptr) {
+        ALOGE("%s: reader argument is null", __FUNCTION__);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (width < 1 || height < 1) {
+        ALOGE("%s: image dimension must be positive: w:%d h:%d",
+                __FUNCTION__, width, height);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (maxImages < 1) {
+        ALOGE("%s: max outstanding image count must be at least 1 (%d)",
+                __FUNCTION__, maxImages);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (maxImages > BufferQueueDefs::NUM_BUFFER_SLOTS) {
+        ALOGE("%s: max outstanding image count (%d) cannot be larget than %d.",
+              __FUNCTION__, maxImages, BufferQueueDefs::NUM_BUFFER_SLOTS);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (!AImageReader::isSupportedFormatAndUsage(format, usage)) {
+        ALOGE("%s: format %d is not supported with usage 0x%" PRIx64 " by AImageReader",
+                __FUNCTION__, format, usage);
+        return AMEDIA_ERROR_INVALID_PARAMETER;
+    }
+    return AMEDIA_OK;
+}
+
 static native_handle_t *convertHalTokenToNativeHandle(
         const HalToken &halToken) {
     // We attempt to store halToken in the ints of the native_handle_t after its
@@ -696,42 +737,32 @@
 } //extern "C"
 
 EXPORT
+media_status_t AImageReader_newWithDataSpace(
+        int32_t width, int32_t height, uint64_t usage, int32_t maxImages,
+        uint32_t hardwareBufferFormat, int32_t dataSpace,
+        /*out*/ AImageReader** reader) {
+    ALOGV("%s", __FUNCTION__);
+
+    android_dataspace halDataSpace = static_cast<android_dataspace>(dataSpace);
+    int32_t format = static_cast<int32_t>(
+          mapHalFormatDataspaceToPublicFormat(hardwareBufferFormat, halDataSpace));
+    return AImageReader_newWithUsage(width, height, format, usage, maxImages, reader);
+}
+
+EXPORT
 media_status_t AImageReader_newWithUsage(
         int32_t width, int32_t height, int32_t format, uint64_t usage,
         int32_t maxImages, /*out*/ AImageReader** reader) {
     ALOGV("%s", __FUNCTION__);
 
-    if (width < 1 || height < 1) {
-        ALOGE("%s: image dimension must be positive: w:%d h:%d",
-                __FUNCTION__, width, height);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
+    validateParameters(width, height, format, usage, maxImages, reader);
 
-    if (maxImages < 1) {
-        ALOGE("%s: max outstanding image count must be at least 1 (%d)",
-                __FUNCTION__, maxImages);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    if (maxImages > BufferQueueDefs::NUM_BUFFER_SLOTS) {
-        ALOGE("%s: max outstanding image count (%d) cannot be larget than %d.",
-              __FUNCTION__, maxImages, BufferQueueDefs::NUM_BUFFER_SLOTS);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    if (!AImageReader::isSupportedFormatAndUsage(format, usage)) {
-        ALOGE("%s: format %d is not supported with usage 0x%" PRIx64 " by AImageReader",
-                __FUNCTION__, format, usage);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
-
-    if (reader == nullptr) {
-        ALOGE("%s: reader argument is null", __FUNCTION__);
-        return AMEDIA_ERROR_INVALID_PARAMETER;
-    }
+    PublicFormat publicFormat = static_cast<PublicFormat>(format);
+    uint32_t halFormat = mapPublicFormatToHalFormat(publicFormat);
+    android_dataspace halDataSpace = mapPublicFormatToHalDataspace(publicFormat);
 
     AImageReader* tmpReader = new AImageReader(
-        width, height, format, usage, maxImages);
+        width, height, format, usage, maxImages, halFormat, halDataSpace);
     if (tmpReader == nullptr) {
         ALOGE("%s: AImageReader allocation failed", __FUNCTION__);
         return AMEDIA_ERROR_UNKNOWN;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 37c606e..0199616 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -56,10 +56,12 @@
                  int32_t height,
                  int32_t format,
                  uint64_t usage,
-                 int32_t maxImages);
+                 int32_t maxImages,
+                 uint32_t hardwareBufferFormat,
+                 android_dataspace dataSpace);
     ~AImageReader();
 
-    // Inintialize AImageReader, uninitialized or failed to initialize AImageReader
+    // Initialize AImageReader, uninitialized or failed to initialize AImageReader
     // should never be passed to application
     media_status_t init();
 
@@ -79,7 +81,6 @@
     void           close();
 
   private:
-
     friend struct AImage; // for grabing reader lock
 
     BufferItem* getBufferItemLocked();
@@ -118,13 +119,16 @@
 
     const int32_t mWidth;
     const int32_t mHeight;
-    const int32_t mFormat;
+    int32_t mFormat;
     const uint64_t mUsage;  // AHARDWAREBUFFER_USAGE_* flags.
     const int32_t mMaxImages;
 
     // TODO(jwcai) Seems completely unused in AImageReader class.
     const int32_t mNumPlanes;
 
+    uint32_t mHalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
+    android_dataspace mHalDataSpace = HAL_DATASPACE_UNKNOWN;
+
     struct FrameListener : public ConsumerBase::FrameAvailableListener {
       public:
         explicit FrameListener(AImageReader* parent) : mReader(parent) {}
@@ -155,8 +159,6 @@
     };
     sp<BufferRemovedListener> mBufferRemovedListener;
 
-    int mHalFormat;
-    android_dataspace mHalDataSpace;
     uint64_t mHalUsage;
 
     sp<IGraphicBufferProducer> mProducer;
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 38e422d..b230df5 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <charconv>
 #include <inttypes.h>
 #include <mutex>
 #include <set>
@@ -63,6 +64,10 @@
 
         if (untranslated.find(err) == untranslated.end()) {
             ALOGE("untranslated sf error code: %d", err);
+            char err_as_string[32];
+            snprintf(err_as_string, sizeof(err_as_string), "%d", err);
+            android_errorWriteWithInfoLog(0x534e4554, "224869524", -1,
+                                          err_as_string, strlen(err_as_string));
             untranslated.insert(err);
         }
     }
@@ -324,29 +329,61 @@
             }
 
             AMessage::Type type;
-            int64_t mediaTimeUs, systemNano;
-            size_t index = 0;
+            size_t n = data->countEntries();
 
-            // TODO. This code has dependency with MediaCodec::CreateFramesRenderedMessage.
-            for (size_t ix = 0; ix < data->countEntries(); ix++) {
-                AString name = data->getEntryNameAt(ix, &type);
-                if (name.startsWith(AStringPrintf("%zu-media-time-us", index).c_str())) {
-                    AMessage::ItemData data = msg->getEntryAt(index);
-                    data.find(&mediaTimeUs);
-                } else if (name.startsWith(AStringPrintf("%zu-system-nano", index).c_str())) {
-                    AMessage::ItemData data = msg->getEntryAt(index);
-                    data.find(&systemNano);
+            thread_local std::vector<std::optional<int64_t>> mediaTimesInUs;
+            thread_local std::vector<std::optional<int64_t>> systemTimesInNs;
+            mediaTimesInUs.resize(n);
+            systemTimesInNs.resize(n);
+            std::fill_n(mediaTimesInUs.begin(), n, std::nullopt);
+            std::fill_n(systemTimesInNs.begin(), n, std::nullopt);
+            for (size_t i = 0; i < n; i++) {
+                AString name = data->getEntryNameAt(i, &type);
+                if (name.endsWith("-media-time-us")) {
+                    int64_t mediaTimeUs;
+                    AMessage::ItemData itemData = data->getEntryAt(i);
+                    itemData.find(&mediaTimeUs);
 
-                    Mutex::Autolock _l(mCodec->mFrameRenderedCallbackLock);
-                    if (mCodec->mFrameRenderedCallback != NULL) {
+                    int index = -1;
+                    std::from_chars_result result = std::from_chars(
+                            name.c_str(), name.c_str() + name.find("-"), index);
+                    if (result.ec == std::errc() && 0 <= index && index < n) {
+                        mediaTimesInUs[index] = mediaTimeUs;
+                    } else {
+                        std::error_code ec = std::make_error_code(result.ec);
+                        ALOGE("Unexpected media time index: #%d with value %lldus (err=%d %s)",
+                              index, (long long)mediaTimeUs, ec.value(), ec.message().c_str());
+                    }
+                } else if (name.endsWith("-system-nano")) {
+                    int64_t systemNano;
+                    AMessage::ItemData itemData = data->getEntryAt(i);
+                    itemData.find(&systemNano);
+
+                    int index = -1;
+                    std::from_chars_result result = std::from_chars(
+                            name.c_str(), name.c_str() + name.find("-"), index);
+                    if (result.ec == std::errc() && 0 <= index && index < n) {
+                        systemTimesInNs[index] = systemNano;
+                    } else {
+                        std::error_code ec = std::make_error_code(result.ec);
+                        ALOGE("Unexpected system time index: #%d with value %lldns (err=%d %s)",
+                              index, (long long)systemNano, ec.value(), ec.message().c_str());
+                    }
+                }
+            }
+
+            Mutex::Autolock _l(mCodec->mFrameRenderedCallbackLock);
+            if (mCodec->mFrameRenderedCallback != NULL) {
+                for (size_t i = 0; i < n; ++i) {
+                    if (mediaTimesInUs[i] && systemTimesInNs[i]) {
                         mCodec->mFrameRenderedCallback(
                                 mCodec,
                                 mCodec->mFrameRenderedCallbackUserData,
-                                mediaTimeUs,
-                                systemNano);
+                                mediaTimesInUs[i].value(),
+                                systemTimesInNs[i].value());
+                    } else {
+                        break;
                     }
-
-                    index++;
                 }
             }
             break;
@@ -529,22 +566,31 @@
         AMediaCodecOnAsyncNotifyCallback callback,
         void *userdata) {
 
-    Mutex::Autolock _l(mData->mAsyncCallbackLock);
-
-    if (mData->mAsyncNotify == NULL) {
-        mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
+    {
+        Mutex::Autolock _l(mData->mAsyncCallbackLock);
+        if (mData->mAsyncNotify == NULL) {
+            mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
+        }
+        // we set this ahead so that we can be ready
+        // to receive callbacks as soon as the next call is a
+        // success.
+        mData->mAsyncCallback = callback;
+        mData->mAsyncCallbackUserData = userdata;
     }
 
     // always call, codec may have been reset/re-configured since last call.
     status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
     if (err != OK) {
+        {
+            //The setup gone wrong. clean up the pointers.
+            Mutex::Autolock _l(mData->mAsyncCallbackLock);
+            mData->mAsyncCallback = {};
+            mData->mAsyncCallbackUserData = nullptr;
+        }
         ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
         return translate_error(err);
     }
 
-    mData->mAsyncCallback = callback;
-    mData->mAsyncCallbackUserData = userdata;
-
     return AMEDIA_OK;
 }
 
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 59c1103..0df7636 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -183,7 +183,7 @@
     AMediaDrmSessionId asid = {sessionId.data(), sessionId.size()};
     int32_t dataSize = data.size();
     const uint8_t *dataPtr = data.data();
-    if (dataSize > 0) {
+    if (dataSize >= 0) {
         (*mEventListener)(mObj, &asid, ndkEventType, 0, dataPtr, dataSize);
     } else {
         ALOGE("invalid event data size=%d", dataSize);
@@ -253,7 +253,7 @@
 }
 
 static sp<IDrm> CreateDrm() {
-    return DrmUtils::MakeDrm();
+    return DrmUtils::MakeDrm(IDRM_NDK);
 }
 
 
@@ -758,6 +758,9 @@
 EXPORT
 media_status_t AMediaDrm_setPropertyByteArray(AMediaDrm *mObj,
         const char *propertyName, const uint8_t *value, size_t valueSize) {
+    if (!mObj || mObj->mDrm == NULL) {
+        return AMEDIA_ERROR_INVALID_OBJECT;
+    }
 
     Vector<uint8_t> byteArray;
     byteArray.appendArray(value, valueSize);
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 6d3c348..386e42c 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -125,6 +125,7 @@
 AMediaFormat* AMediaExtractor_getFileFormat(AMediaExtractor *mData) {
     sp<AMessage> format;
     mData->mImpl->getFileFormat(&format);
+    // ignore any error, we want to return the empty format
     return AMediaFormat_fromMsg(&format);
 }
 
@@ -247,7 +248,10 @@
     }
 
     sp<AMessage> format;
-    ex->mImpl->getFileFormat(&format);
+    if (ex->mImpl->getFileFormat(&format) != OK) {
+        android_errorWriteWithInfoLog(0x534e4554, "243222985", -1, nullptr, 0);
+        return NULL;
+    }
     sp<ABuffer> buffer;
     if(!format->findBuffer("pssh", &buffer)) {
         return NULL;
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 923453a..66b5dec 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -147,38 +147,80 @@
 
 EXPORT
 bool AMediaFormat_getInt32(AMediaFormat* format, const char *name, int32_t *out) {
+    if (format == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     return format->mFormat->findInt32(name, out);
 }
 
 EXPORT
 bool AMediaFormat_getInt64(AMediaFormat* format, const char *name, int64_t *out) {
+    if (format == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     return format->mFormat->findInt64(name, out);
 }
 
 EXPORT
 bool AMediaFormat_getFloat(AMediaFormat* format, const char *name, float *out) {
+    if (format == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     return format->mFormat->findFloat(name, out);
 }
 
 EXPORT
 bool AMediaFormat_getDouble(AMediaFormat* format, const char *name, double *out) {
+    if (format == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     return format->mFormat->findDouble(name, out);
 }
 
 EXPORT
 bool AMediaFormat_getSize(AMediaFormat* format, const char *name, size_t *out) {
+    if (format == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     return format->mFormat->findSize(name, out);
 }
 
 EXPORT
 bool AMediaFormat_getRect(AMediaFormat* format, const char *name,
                           int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) {
+    if (format == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     return format->mFormat->findRect(name, left, top, right, bottom);
 }
 
 EXPORT
 bool AMediaFormat_getBuffer(AMediaFormat* format, const char *name, void** data, size_t *outsize) {
     sp<ABuffer> buf;
+    if (format == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     if (format->mFormat->findBuffer(name, &buf)) {
         *data = buf->data() + buf->offset();
         *outsize = buf->size();
@@ -189,7 +231,12 @@
 
 EXPORT
 bool AMediaFormat_getString(AMediaFormat* mData, const char *name, const char **out) {
-
+    if (mData == nullptr) {
+        return false;
+    }
+    if (name == nullptr) {
+        return false;
+    }
     for (size_t i = 0; i < mData->mStringCache.size(); i++) {
         if (strcmp(mData->mStringCache.keyAt(i).string(), name) == 0) {
             mData->mStringCache.removeItemsAt(i, 1);
@@ -212,43 +259,94 @@
 
 EXPORT
 void AMediaFormat_setInt32(AMediaFormat* format, const char *name, int32_t value) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
     format->mFormat->setInt32(name, value);
 }
 
 EXPORT
 void AMediaFormat_setInt64(AMediaFormat* format, const char *name, int64_t value) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
     format->mFormat->setInt64(name, value);
 }
 
 EXPORT
 void AMediaFormat_setFloat(AMediaFormat* format, const char* name, float value) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
     format->mFormat->setFloat(name, value);
 }
 
 EXPORT
 void AMediaFormat_setDouble(AMediaFormat* format, const char* name, double value) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
     format->mFormat->setDouble(name, value);
 }
 
 EXPORT
 void AMediaFormat_setSize(AMediaFormat* format, const char* name, size_t value) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
     format->mFormat->setSize(name, value);
 }
 
 EXPORT
 void AMediaFormat_setRect(AMediaFormat* format, const char *name,
                           int32_t left, int32_t top, int32_t right, int32_t bottom) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
     format->mFormat->setRect(name, left, top, right, bottom);
 }
 
 EXPORT
 void AMediaFormat_setString(AMediaFormat* format, const char* name, const char* value) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
+    if (value == nullptr) {
+        return;
+    }
     // AMessage::setString() makes a copy of the string
     format->mFormat->setString(name, value, strlen(value));
 }
 
 EXPORT
 void AMediaFormat_setBuffer(AMediaFormat* format, const char* name, const void* data, size_t size) {
+    if (format == nullptr) {
+        return;
+    }
+    if (name == nullptr) {
+        return;
+    }
     // the ABuffer(void*, size_t) constructor doesn't take ownership of the data, so create
     // a new buffer and copy the data into it
     sp<ABuffer> buf = new ABuffer(size);
@@ -270,6 +368,7 @@
 EXPORT const char* AMEDIAFORMAT_KEY_ALBUM = "album";
 EXPORT const char* AMEDIAFORMAT_KEY_ALBUMART = "albumart";
 EXPORT const char* AMEDIAFORMAT_KEY_ALBUMARTIST = "albumartist";
+EXPORT const char* AMEDIAFORMAT_KEY_ALLOW_FRAME_DROP = "allow-frame-drop";
 EXPORT const char* AMEDIAFORMAT_KEY_ARTIST = "artist";
 EXPORT const char* AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO = "audio-presentation-info";
 EXPORT const char* AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID =
@@ -346,6 +445,7 @@
 EXPORT const char* AMEDIAFORMAT_KEY_LYRICIST = "lyricist";
 EXPORT const char* AMEDIAFORMAT_KEY_MANUFACTURER = "manufacturer";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE = "max-bitrate";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_B_FRAMES = "max-bframes";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER = "max-fps-to-encoder";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
 EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
diff --git a/media/ndk/NdkMediaMuxer.cpp b/media/ndk/NdkMediaMuxer.cpp
index 1965e62..9d62884 100644
--- a/media/ndk/NdkMediaMuxer.cpp
+++ b/media/ndk/NdkMediaMuxer.cpp
@@ -46,7 +46,7 @@
     if (mData == nullptr) {
         return nullptr;
     }
-    mData->mImpl = new (std::nothrow) MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+    mData->mImpl = MediaMuxer::create(fd, (MediaMuxer::OutputFormat)format);
     if (mData->mImpl == nullptr) {
         delete mData;
         return nullptr;
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
new file mode 100644
index 0000000..a3d6a96
--- /dev/null
+++ b/media/ndk/fuzzer/Android.bp
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_ndk_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_media_ndk_license"],
+}
+
+cc_defaults {
+     name: "libmediandk_fuzzer_defaults",
+     shared_libs: [
+        "libandroid_runtime_lazy",
+        "libbase",
+        "libdatasource",
+        "libmedia",
+        "libmediadrm",
+        "libmedia_omx",
+        "libmedia_jni_utils",
+        "libstagefright",
+        "libstagefright_foundation",
+        "liblog",
+        "libutils",
+        "libcutils",
+        "libnativewindow",
+        "libhidlbase",
+        "libgui",
+        "libui",
+        "libmediandk",
+     ],
+     static_libs: [
+        "libmediandk_utils",
+        "libnativehelper_lazy",
+     ],
+     header_libs: [
+         "media_ndk_headers",
+     ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "ndk_crypto_fuzzer",
+    srcs: ["ndk_crypto_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults"],
+}
+
+cc_fuzz {
+     name: "ndk_image_reader_fuzzer",
+     srcs: [
+        "ndk_image_reader_fuzzer.cpp",
+     ],
+     shared_libs: [
+        "android.hidl.token@1.0-utils",
+        "android.hardware.graphics.bufferqueue@1.0",
+     ],
+     cflags: [
+        "-D__ANDROID_VNDK__",
+     ],
+     defaults: ["libmediandk_fuzzer_defaults"],
+}
+
+cc_fuzz {
+    name: "ndk_extractor_fuzzer",
+    srcs: ["ndk_extractor_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults"],
+    shared_libs: ["libbinder_ndk",],
+    corpus: ["corpus/*"],
+}
+
+cc_fuzz {
+    name: "ndk_mediaformat_fuzzer",
+    srcs: ["ndk_mediaformat_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
+
+cc_fuzz {
+    name: "ndk_drm_fuzzer",
+    srcs: ["ndk_drm_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
+
+cc_fuzz {
+    name: "ndk_mediamuxer_fuzzer",
+    srcs: ["ndk_mediamuxer_fuzzer.cpp"],
+    defaults: ["libmediandk_fuzzer_defaults"],
+    shared_libs: ["libbinder_ndk",],
+}
+
+cc_fuzz {
+    name: "ndk_sync_codec_fuzzer",
+    srcs: [
+            "ndk_sync_codec_fuzzer.cpp",
+             "NdkMediaCodecFuzzerBase.cpp",
+          ],
+    header_libs: ["libnativewindow_headers",],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
diff --git a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp
new file mode 100644
index 0000000..fa81cd8
--- /dev/null
+++ b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.cpp
@@ -0,0 +1,414 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+
+static const std::string kMimeTypes[] = {
+        MIMETYPE_AUDIO_AMR_NB, MIMETYPE_AUDIO_AMR_WB,    MIMETYPE_AUDIO_MPEG,
+        MIMETYPE_AUDIO_AAC,    MIMETYPE_AUDIO_FLAC,      MIMETYPE_AUDIO_VORBIS,
+        MIMETYPE_AUDIO_OPUS,   MIMETYPE_AUDIO_RAW,       MIMETYPE_AUDIO_MSGSM,
+        MIMETYPE_AUDIO_EAC3,   MIMETYPE_AUDIO_SCRAMBLED, MIMETYPE_VIDEO_VP8,
+        MIMETYPE_VIDEO_VP9,    MIMETYPE_VIDEO_AV1,       MIMETYPE_VIDEO_AVC,
+        MIMETYPE_VIDEO_HEVC,   MIMETYPE_VIDEO_MPEG4,     MIMETYPE_VIDEO_H263,
+        MIMETYPE_VIDEO_MPEG2,  MIMETYPE_VIDEO_RAW,       MIMETYPE_VIDEO_SCRAMBLED};
+
+static const std::string kEncoderNames[] = {
+        "c2.android.avc.encoder",    "c2.android.vp8.encoder",   "c2.android.vp9.encoder",
+        "c2.android.hevc.encoder",   "c2.android.mpeg2.encoder", "c2.android.mpeg4.encoder",
+        "c2.android.opus.encoder",   "c2.android.amrnb.encoder", "c2.android.flac.encoder",
+        "c2.android.av1-aom.encoder"};
+
+static const std::string kDecoderNames[] = {"c2.android.avc.decoder",
+                                            "c2.android.vp8.decoder",
+                                            "c2.android.vp9.decoder"
+                                            "c2.android.hevc.decoder",
+                                            "c2.android.mpeg2.decoder",
+                                            "c2.android.mpeg4.decoder",
+                                            "c2.android.opus.decoder",
+                                            "c2.android.amrnb.decoder",
+                                            "c2.android.flac.decoder",
+                                            "c2.android.av1-aom.decoder"};
+
+static const std::string kFormatIntKeys[] = {AMEDIAFORMAT_KEY_BIT_RATE,
+                                             AMEDIAFORMAT_KEY_SAMPLE_RATE,
+                                             AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
+                                             AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+                                             AMEDIAFORMAT_KEY_WIDTH,
+                                             AMEDIAFORMAT_KEY_HEIGHT,
+                                             AMEDIAFORMAT_KEY_FRAME_RATE,
+                                             AMEDIAFORMAT_KEY_COLOR_FORMAT,
+                                             AMEDIAFORMAT_VIDEO_QP_P_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_P_MAX,
+                                             AMEDIAFORMAT_VIDEO_QP_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_MAX,
+                                             AMEDIAFORMAT_VIDEO_QP_I_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_I_MAX,
+                                             AMEDIAFORMAT_VIDEO_QP_B_MIN,
+                                             AMEDIAFORMAT_VIDEO_QP_B_MAX,
+                                             AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE,
+                                             AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
+                                             AMEDIAFORMAT_KEY_VALID_SAMPLES,
+                                             AMEDIAFORMAT_KEY_TRACK_INDEX,
+                                             AMEDIAFORMAT_KEY_TRACK_ID,
+                                             AMEDIAFORMAT_KEY_TILE_WIDTH,
+                                             AMEDIAFORMAT_KEY_TILE_HEIGHT,
+                                             AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH,
+                                             AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT,
+                                             AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID,
+                                             AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT,
+                                             AMEDIAFORMAT_KEY_STRIDE,
+                                             AMEDIAFORMAT_KEY_SLICE_HEIGHT,
+                                             AMEDIAFORMAT_KEY_SAR_WIDTH,
+                                             AMEDIAFORMAT_KEY_SAR_HEIGHT,
+                                             AMEDIAFORMAT_KEY_ROTATION,
+                                             AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN,
+                                             AMEDIAFORMAT_KEY_PROFILE,
+                                             AMEDIAFORMAT_KEY_PRIORITY,
+                                             AMEDIAFORMAT_KEY_PICTURE_TYPE,
+                                             AMEDIAFORMAT_KEY_PCM_ENCODING,
+                                             AMEDIAFORMAT_KEY_OPERATING_RATE,
+                                             AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+                                             AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+                                             AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER,
+                                             AMEDIAFORMAT_KEY_MAX_INPUT_SIZE,
+                                             AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER,
+                                             AMEDIAFORMAT_KEY_LOW_LATENCY,
+                                             AMEDIAFORMAT_KEY_LOOP,
+                                             AMEDIAFORMAT_KEY_LEVEL,
+                                             AMEDIAFORMAT_KEY_LATENCY,
+                                             AMEDIAFORMAT_KEY_IS_SYNC_FRAME,
+                                             AMEDIAFORMAT_KEY_IS_DEFAULT,
+                                             AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD,
+                                             AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT,
+                                             AMEDIAFORMAT_KEY_GRID_ROWS,
+                                             AMEDIAFORMAT_KEY_GRID_COLUMNS,
+                                             AMEDIAFORMAT_KEY_FRAME_COUNT,
+                                             AMEDIAFORMAT_KEY_ENCODER_PADDING,
+                                             AMEDIAFORMAT_KEY_ENCODER_DELAY,
+                                             AMEDIAFORMAT_KEY_DISPLAY_WIDTH,
+                                             AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+                                             AMEDIAFORMAT_KEY_DISPLAY_CROP,
+                                             AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK,
+                                             AMEDIAFORMAT_KEY_CRYPTO_MODE,
+                                             AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK,
+                                             AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE,
+                                             AMEDIAFORMAT_KEY_COLOR_TRANSFER,
+                                             AMEDIAFORMAT_KEY_COLOR_STANDARD,
+                                             AMEDIAFORMAT_KEY_COLOR_RANGE,
+                                             AMEDIAFORMAT_KEY_CHANNEL_MASK,
+                                             AMEDIAFORMAT_KEY_BITS_PER_SAMPLE,
+                                             AMEDIAFORMAT_KEY_BITRATE_MODE,
+                                             AMEDIAFORMAT_KEY_AUDIO_SESSION_ID,
+                                             AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PROGRAM_ID,
+                                             AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID,
+                                             AMEDIAFORMAT_KEY_AAC_SBR_MODE,
+                                             AMEDIAFORMAT_KEY_AAC_PROFILE,
+                                             AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT,
+                                             AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR,
+                                             AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR,
+                                             AMEDIAFORMAT_KEY_XMP_SIZE,
+                                             AMEDIAFORMAT_KEY_XMP_OFFSET,
+                                             AMEDIAFORMAT_KEY_TIME_US,
+                                             AMEDIAFORMAT_KEY_THUMBNAIL_TIME,
+                                             AMEDIAFORMAT_KEY_TARGET_TIME,
+                                             AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND,
+                                             AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET,
+                                             AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK,
+                                             AMEDIAFORMAT_KEY_EXIF_SIZE,
+                                             AMEDIAFORMAT_KEY_EXIF_OFFSET,
+                                             AMEDIAFORMAT_KEY_DURATION};
+
+static const std::string kFormatBufferKeys[] = {
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C,
+        AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA,
+        AMEDIAFORMAT_KEY_SEI,
+        AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP,
+        AMEDIAFORMAT_KEY_PSSH,
+        AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+        AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER,
+        AMEDIAFORMAT_KEY_MPEG_USER_DATA,
+        AMEDIAFORMAT_KEY_ICC_PROFILE,
+        AMEDIAFORMAT_KEY_HDR10_PLUS_INFO,
+        AMEDIAFORMAT_KEY_HDR_STATIC_INFO,
+        AMEDIAFORMAT_KEY_ESDS,
+        AMEDIAFORMAT_KEY_D263,
+        AMEDIAFORMAT_KEY_CSD_HEVC,
+        AMEDIAFORMAT_KEY_CSD_AVC,
+        AMEDIAFORMAT_KEY_CSD_2,
+        AMEDIAFORMAT_KEY_CSD_1,
+        AMEDIAFORMAT_KEY_CSD_0,
+        AMEDIAFORMAT_KEY_CSD,
+        AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES,
+        AMEDIAFORMAT_KEY_CRYPTO_KEY,
+        AMEDIAFORMAT_KEY_CRYPTO_IV,
+        AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES,
+        AMEDIAFORMAT_KEY_CREATE_INPUT_SURFACE_SUSPENDED,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO,
+        AMEDIAFORMAT_KEY_ALBUMART,
+};
+
+static const std::string kFormatFloatKeys[] = {AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+                                               AMEDIAFORMAT_KEY_CAPTURE_RATE};
+
+static const std::string kFormatStringKeys[] = {AMEDIAFORMAT_KEY_YEAR,
+                                                AMEDIAFORMAT_KEY_TITLE,
+                                                AMEDIAFORMAT_KEY_TEMPORAL_LAYERING,
+                                                AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS,
+                                                AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER,
+                                                AMEDIAFORMAT_KEY_MANUFACTURER,
+                                                AMEDIAFORMAT_KEY_LYRICIST,
+                                                AMEDIAFORMAT_KEY_LOCATION,
+                                                AMEDIAFORMAT_KEY_LANGUAGE,
+                                                AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE,
+                                                AMEDIAFORMAT_KEY_IS_AUTOSELECT,
+                                                AMEDIAFORMAT_KEY_IS_ADTS,
+                                                AMEDIAFORMAT_KEY_GENRE,
+                                                AMEDIAFORMAT_KEY_DISCNUMBER,
+                                                AMEDIAFORMAT_KEY_DATE,
+                                                AMEDIAFORMAT_KEY_COMPOSER,
+                                                AMEDIAFORMAT_KEY_COMPILATION,
+                                                AMEDIAFORMAT_KEY_COMPLEXITY,
+                                                AMEDIAFORMAT_KEY_CDTRACKNUMBER,
+                                                AMEDIAFORMAT_KEY_AUTHOR,
+                                                AMEDIAFORMAT_KEY_ARTIST,
+                                                AMEDIAFORMAT_KEY_ALBUMARTIST,
+                                                AMEDIAFORMAT_KEY_ALBUM};
+
+void formatSetString(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        std::string keyValue = fdp->ConsumeRandomLengthString(kMaxBytes);
+        AMediaFormat_setString(format, AMEDIAFORMAT_KEY, keyValue.c_str());
+    }
+}
+
+void formatSetInt(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        int32_t keyValue = fdp->ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue);
+        AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY, keyValue);
+    }
+}
+
+void formatSetFloat(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        float keyValue =
+                fdp->ConsumeFloatingPointInRange<float>(kMinFloatKeyValue, kMaxFloatKeyValue);
+        AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY, keyValue);
+    }
+}
+
+void formatSetBuffer(AMediaFormat* format, const char* AMEDIAFORMAT_KEY, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        std::vector<uint8_t> buffer = fdp->ConsumeBytes<uint8_t>(
+                fdp->ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+        AMediaFormat_setBuffer(format, AMEDIAFORMAT_KEY, buffer.data(), buffer.size());
+    }
+}
+
+AMediaCodec* NdkMediaCodecFuzzerBase::createAMediaCodecByname(bool isEncoder,
+                                                              bool isCodecForClient) {
+    std::string name;
+    if (isEncoder) {
+        name = mFdp->ConsumeBool() ? mFdp->PickValueInArray(kEncoderNames)
+                                   : mFdp->ConsumeRandomLengthString(kMaxBytes);
+    } else {
+        name = mFdp->ConsumeBool() ? mFdp->PickValueInArray(kDecoderNames)
+                                   : mFdp->ConsumeRandomLengthString(kMaxBytes);
+    }
+
+    if (isCodecForClient) {
+        pid_t pid = mFdp->ConsumeIntegral<pid_t>();
+        uid_t uid = mFdp->ConsumeIntegral<uid_t>();
+        return AMediaCodec_createCodecByNameForClient(name.c_str(), pid, uid);
+
+    } else {
+        return AMediaCodec_createCodecByName(name.c_str());
+    }
+}
+
+AMediaCodec* NdkMediaCodecFuzzerBase::createAMediaCodecByType(bool isEncoder,
+                                                              bool isCodecForClient) {
+    std::string mimeType;
+    const char* mime = nullptr;
+
+    if (mFdp->ConsumeBool()) {
+        mimeType = mFdp->ConsumeRandomLengthString(kMaxBytes);
+        mime = mimeType.c_str();
+    } else {
+        AMediaFormat_getString(mFormat, AMEDIAFORMAT_KEY_MIME, &mime);
+    }
+
+    if (isCodecForClient) {
+        pid_t pid = mFdp->ConsumeIntegral<pid_t>();
+        uid_t uid = mFdp->ConsumeIntegral<uid_t>();
+        return isEncoder ? AMediaCodec_createEncoderByTypeForClient(mime, pid, uid)
+                         : AMediaCodec_createDecoderByTypeForClient(mime, pid, uid);
+    } else {
+        return isEncoder ? AMediaCodec_createEncoderByType(mime)
+                         : AMediaCodec_createDecoderByType(mime);
+    }
+}
+
+void NdkMediaCodecFuzzerBase::setCodecFormat() {
+    std::string value;
+    int32_t count = 0;
+    int32_t maxFormatKeys = 0;
+    AMediaFormat_clear(mFormat);
+
+    /*set mimeType*/
+    if (mFdp->ConsumeBool()) {
+        value = mFdp->ConsumeRandomLengthString(kMaxBytes);
+    } else {
+        value = mFdp->PickValueInArray(kMimeTypes);
+    }
+    if (mFdp->ConsumeBool()) {
+        AMediaFormat_setString(mFormat, AMEDIAFORMAT_KEY_MIME, value.c_str());
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatStringKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatStringKeys);
+        formatSetString(mFormat, formatKey.c_str(), mFdp);
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatIntKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatIntKeys);
+        formatSetInt(mFormat, formatKey.c_str(), mFdp);
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatFloatKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatFloatKeys);
+        formatSetFloat(mFormat, formatKey.c_str(), mFdp);
+    }
+
+    maxFormatKeys = mFdp->ConsumeIntegralInRange<int32_t>(0, std::size(kFormatBufferKeys));
+    for (count = 0; count < maxFormatKeys; ++count) {
+        std::string formatKey = mFdp->PickValueInArray(kFormatBufferKeys);
+        formatSetBuffer(mFormat, formatKey.c_str(), mFdp);
+    }
+}
+
+AMediaCodec* NdkMediaCodecFuzzerBase::createCodec(bool isEncoder, bool isCodecForClient) {
+    setCodecFormat();
+    return (mFdp->ConsumeBool() ? createAMediaCodecByname(isEncoder, isCodecForClient)
+                                : createAMediaCodecByType(isEncoder, isCodecForClient));
+}
+
+void NdkMediaCodecFuzzerBase::invokeCodecFormatAPI(AMediaCodec* codec) {
+    AMediaFormat* codecFormat = nullptr;
+    size_t codecFormatAPI = mFdp->ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxCodecFormatAPIs);
+    switch (codecFormatAPI) {
+        case 0: {
+            codecFormat = AMediaCodec_getInputFormat(codec);
+            break;
+        }
+        case 1: {
+            codecFormat = AMediaCodec_getOutputFormat(codec);
+            break;
+        }
+        case 2:
+        default: {
+            AMediaCodecBufferInfo info;
+            int64_t timeOutUs = mFdp->ConsumeIntegralInRange<size_t>(kMinTimeOutUs, kMaxTimeOutUs);
+            ssize_t bufferIndex = 0;
+            if (mFdp->ConsumeBool()) {
+                bufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &info, timeOutUs);
+            } else {
+                bufferIndex =
+                        mFdp->ConsumeIntegralInRange<size_t>(kMinBufferIndex, kMaxBufferIndex);
+            }
+            codecFormat = AMediaCodec_getBufferFormat(codec, bufferIndex);
+            break;
+        }
+    }
+    if (codecFormat) {
+        AMediaFormat_delete(codecFormat);
+    }
+}
+
+void NdkMediaCodecFuzzerBase::invokeInputBufferOperationAPI(AMediaCodec* codec) {
+    size_t bufferSize = 0;
+    ssize_t bufferIndex = 0;
+    int64_t timeOutUs = mFdp->ConsumeIntegralInRange<size_t>(kMinTimeOutUs, kMaxTimeOutUs);
+    if (mFdp->ConsumeBool()) {
+        bufferIndex = AMediaCodec_dequeueInputBuffer(codec, timeOutUs);
+    } else {
+        bufferIndex = mFdp->ConsumeIntegralInRange<size_t>(kMinBufferIndex, kMaxBufferIndex);
+    }
+
+    uint8_t* buffer = AMediaCodec_getInputBuffer(codec, bufferIndex, &bufferSize);
+    if (buffer) {
+        std::vector<uint8_t> bytesRead = mFdp->ConsumeBytes<uint8_t>(
+                std::min(mFdp->ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes), bufferSize));
+        memcpy(buffer, bytesRead.data(), bytesRead.size());
+        bufferSize = bytesRead.size();
+    }
+
+    int32_t flag = mFdp->ConsumeIntegralInRange<size_t>(AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG,
+                                                        AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME);
+    if (mFdp->ConsumeBool()) {
+        AMediaCodec_queueInputBuffer(codec, bufferIndex, 0 /* offset */, bufferSize, 0 /* time */,
+                                     flag);
+    } else {
+        AMediaCodecCryptoInfo* cryptoInfo = getAMediaCodecCryptoInfo();
+        AMediaCodec_queueSecureInputBuffer(codec, bufferIndex, 0 /* offset */, cryptoInfo,
+                                           0 /* time */, flag);
+        AMediaCodecCryptoInfo_delete(cryptoInfo);
+    }
+}
+
+void NdkMediaCodecFuzzerBase::invokeOutputBufferOperationAPI(AMediaCodec* codec) {
+    ssize_t bufferIndex = 0;
+    int64_t timeOutUs = mFdp->ConsumeIntegralInRange<size_t>(kMinTimeOutUs, kMaxTimeOutUs);
+    if (mFdp->ConsumeBool()) {
+        AMediaCodecBufferInfo info;
+        bufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &info, timeOutUs);
+    } else {
+        bufferIndex = mFdp->ConsumeIntegralInRange<size_t>(kMinBufferIndex, kMaxBufferIndex);
+    }
+
+    if (mFdp->ConsumeBool()) {
+        size_t bufferSize = 0;
+        (void)AMediaCodec_getOutputBuffer(codec, bufferIndex, &bufferSize);
+    }
+
+    if (mFdp->ConsumeBool()) {
+        AMediaCodec_releaseOutputBuffer(codec, bufferIndex, mFdp->ConsumeBool());
+    } else {
+        AMediaCodec_releaseOutputBufferAtTime(codec, bufferIndex, timeOutUs);
+    }
+}
+
+AMediaCodecCryptoInfo* NdkMediaCodecFuzzerBase::getAMediaCodecCryptoInfo() {
+    uint8_t key[kMaxCryptoKey];
+    uint8_t iv[kMaxCryptoKey];
+    size_t clearBytes[kMaxCryptoKey];
+    size_t encryptedBytes[kMaxCryptoKey];
+
+    for (int32_t i = 0; i < kMaxCryptoKey; ++i) {
+        key[i] = mFdp->ConsumeIntegral<uint8_t>();
+        iv[i] = mFdp->ConsumeIntegral<uint8_t>();
+        clearBytes[i] = mFdp->ConsumeIntegral<size_t>();
+        encryptedBytes[i] = mFdp->ConsumeIntegral<size_t>();
+    }
+
+    return AMediaCodecCryptoInfo_new(kMaxCryptoKey, key, iv, AMEDIACODECRYPTOINFO_MODE_CLEAR,
+                                     clearBytes, encryptedBytes);
+}
diff --git a/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h
new file mode 100644
index 0000000..2875f9f
--- /dev/null
+++ b/media/ndk/fuzzer/NdkMediaCodecFuzzerBase.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+#include <android/native_window.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaCodecPlatform.h>
+#include <media/NdkMediaFormat.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+constexpr int32_t kMinBytes = 1;
+constexpr int32_t kMaxBytes = 256;
+constexpr int32_t kMinIntKeyValue = 0;
+constexpr int32_t kMaxIntKeyValue = 6000000;
+constexpr int32_t kMinFloatKeyValue = 1.0f;
+constexpr int32_t kMaxFloatKeyValue = 500.f;
+constexpr int32_t kMinTimeOutUs = 0;
+constexpr int32_t kMaxTimeOutUs = 5000;
+constexpr int32_t kMinAPICase = 0;
+constexpr int32_t kMaxCodecFormatAPIs = 2;
+constexpr int32_t kMaxCryptoKey = 16;
+constexpr int32_t kMinIterations = 10;
+constexpr int32_t kMaxIterations = 100;
+constexpr size_t kMinBufferIndex = 1;
+constexpr size_t kMaxBufferIndex = 128;
+
+class NdkMediaCodecFuzzerBase {
+  public:
+    NdkMediaCodecFuzzerBase() { mFormat = AMediaFormat_new(); }
+    void invokeCodecFormatAPI(AMediaCodec* codec);
+    void invokeInputBufferOperationAPI(AMediaCodec* codec);
+    void invokeOutputBufferOperationAPI(AMediaCodec* codec);
+    AMediaCodecCryptoInfo* getAMediaCodecCryptoInfo();
+    AMediaCodec* createCodec(bool isEncoder, bool isCodecForClient);
+    AMediaFormat* getCodecFormat() { return mFormat; };
+    void setFdp(FuzzedDataProvider* fdp) { mFdp = fdp; }
+    ~NdkMediaCodecFuzzerBase() {
+        if (mFormat) {
+            AMediaFormat_delete(mFormat);
+        }
+    }
+
+  private:
+    AMediaCodec* createAMediaCodecByname(bool isEncoder, bool isCodecForClient);
+    AMediaCodec* createAMediaCodecByType(bool isEncoder, bool isCodecForClient);
+    AMediaFormat* getSampleAudioFormat();
+    AMediaFormat* getSampleVideoFormat();
+    void setCodecFormat();
+    AMediaFormat* mFormat = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+};
diff --git a/media/ndk/fuzzer/README.md b/media/ndk/fuzzer/README.md
new file mode 100644
index 0000000..0fd08b0
--- /dev/null
+++ b/media/ndk/fuzzer/README.md
@@ -0,0 +1,158 @@
+# Fuzzers for libmediandk
+
+## Table of contents
++ [ndk_crypto_fuzzer](#NdkCrypto)
++ [ndk_image_reader_fuzzer](#NdkImageReader)
++ [ndk_extractor_fuzzer](#NdkExtractor)
++ [ndk_mediaformat_fuzzer](#NdkMediaFormat)
++ [ndk_drm_fuzzer](#NdkDrm)
++ [ndk_mediamuxer_fuzzer](#NdkMediaMuxer)
++ [ndk_sync_codec_fuzzer](#NdkSyncCodec)
+
+# <a name="NdkCrypto"></a> Fuzzer for NdkCrypto
+
+NdkCrypto supports the following parameters:
+    UniversalIdentifier (parameter name: "uuid")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+| `uuid`| `Array`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_crypto_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_crypto_fuzzer/ndk_crypto_fuzzer
+```
+
+# <a name="NdkImageReader"></a> Fuzzer for NdkImageReader
+
+NdkImageReader supports the following parameters:
+1. Width (parameter name: "imageWidth")
+2. Height (parameter name: "imageHeight")
+3. Format (parameter name: "imageFormat")
+4. Usage (parameter name: "imageUsage")
+5. Max images (parameter name: "imageMaxCount")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+| `width`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `height`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `format`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `usage`| `1 to INT_MAX`| Value obtained from FuzzedDataProvider|
+| `maxImages`| `1 to android::BufferQueue::MAX_MAX_ACQUIRED_BUFFERS`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_image_reader_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_image_reader_fuzzer/ndk_image_reader_fuzzer
+```
+
+# <a name="NdkExtractor"></a>Fuzzer for NdkExtractor
+
+NdkExtractor supports the following parameters:
+1. SeekMode (parameter name: "mode")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`mode`|0.`AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC`,<br/>1.`AMEDIAEXTRACTOR_SEEK_NEXT_SYNC`,<br/>2.`AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_extractor_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_extractor_fuzzer/ndk_extractor_fuzzer /data/fuzz/${TARGET_ARCH}/ndk_extractor_fuzzer/corpus
+```
+
+
+# <a name="NdkMediaFormat"></a>Fuzzer for NdkMediaFormat
+
+NdkMediaFormat supports the following parameters:
+1. Name (parameter name: "name")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`name`|1.`AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR`, 2.`AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR`, 3.`AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION`, 4.`AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL`, 5.`AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL`, 6.`AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT`, 7.`AMEDIAFORMAT_KEY_AAC_PROFILE`, 8.`AMEDIAFORMAT_KEY_AAC_SBR_MODE`, 9.`AMEDIAFORMAT_KEY_ALBUM`, 10.`AMEDIAFORMAT_KEY_ALBUMART`, 11.`AMEDIAFORMAT_KEY_ALBUMARTIST`, 12.`AMEDIAFORMAT_KEY_ARTIST`, 13.`AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO`, 14.`AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID`, 15.`AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PROGRAM_ID`, 16.`AMEDIAFORMAT_KEY_AUDIO_SESSION_ID`, 17.`AMEDIAFORMAT_KEY_AUTHOR`, 18.`AMEDIAFORMAT_KEY_BITRATE_MODE`, 19.`AMEDIAFORMAT_KEY_BIT_RATE`, 20.`AMEDIAFORMAT_KEY_BITS_PER_SAMPLE`, 21.`AMEDIAFORMAT_KEY_CAPTURE_RATE`, 22.`AMEDIAFORMAT_KEY_CDTRACKNUMBER`, 23.`AMEDIAFORMAT_KEY_CHANNEL_COUNT`, 24.`AMEDIAFORMAT_KEY_CHANNEL_MASK`, 25.`AMEDIAFORMAT_KEY_COLOR_FORMAT`, 26.`AMEDIAFORMAT_KEY_COLOR_RANGE`, 27.`AMEDIAFORMAT_KEY_COLOR_STANDARD`, 28.`AMEDIAFORMAT_KEY_COLOR_TRANSFER`, 29.`AMEDIAFORMAT_KEY_COMPILATION`, 30.`AMEDIAFORMAT_KEY_COMPLEXITY`, 31.`AMEDIAFORMAT_KEY_COMPOSER`, 32.`AMEDIAFORMAT_KEY_CREATE_INPUT_SURFACE_SUSPENDED`, 33.`AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE`, 34.`AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK`, 35.`AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES`, 36.`AMEDIAFORMAT_KEY_CRYPTO_IV`, 37.`AMEDIAFORMAT_KEY_CRYPTO_KEY`, 38.`AMEDIAFORMAT_KEY_CRYPTO_MODE`, 39.`AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES`, 40.`AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK`, 41.`AMEDIAFORMAT_KEY_CSD`, 42.`AMEDIAFORMAT_KEY_CSD_0`, 43.`AMEDIAFORMAT_KEY_CSD_1`, 44.`AMEDIAFORMAT_KEY_CSD_2`, 45.`AMEDIAFORMAT_KEY_CSD_AVC`, 46.`AMEDIAFORMAT_KEY_CSD_HEVC`, 47.`AMEDIAFORMAT_KEY_D263`, 48.`AMEDIAFORMAT_KEY_DATE`, 49.`AMEDIAFORMAT_KEY_DISCNUMBER`, 50.`AMEDIAFORMAT_KEY_DISPLAY_CROP`, 51.`AMEDIAFORMAT_KEY_DISPLAY_HEIGHT`, 52.`AMEDIAFORMAT_KEY_DISPLAY_WIDTH`, 53.`AMEDIAFORMAT_KEY_DURATION`, 54.`AMEDIAFORMAT_KEY_ENCODER_DELAY`, 55.`AMEDIAFORMAT_KEY_ENCODER_PADDING`, 56.`AMEDIAFORMAT_KEY_ESDS`, 57.`AMEDIAFORMAT_KEY_EXIF_OFFSET`, 58.`AMEDIAFORMAT_KEY_EXIF_SIZE`, 59.`AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL`, 60.`AMEDIAFORMAT_KEY_FRAME_COUNT`, 61.`AMEDIAFORMAT_KEY_FRAME_RATE`, 62.`AMEDIAFORMAT_KEY_GENRE`, 63.`AMEDIAFORMAT_KEY_GRID_COLUMNS`, 64.`AMEDIAFORMAT_KEY_GRID_ROWS`, 65.`AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT`, 66.`AMEDIAFORMAT_KEY_HDR_STATIC_INFO`, 67.`AMEDIAFORMAT_KEY_HDR10_PLUS_INFO`, 68.`AMEDIAFORMAT_KEY_HEIGHT`, 69.`AMEDIAFORMAT_KEY_ICC_PROFILE`, 70.`AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD`, 71.`AMEDIAFORMAT_KEY_IS_ADTS`, 72.`AMEDIAFORMAT_KEY_IS_AUTOSELECT`, 73.`AMEDIAFORMAT_KEY_IS_DEFAULT`, 74.`AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE`, 75.`AMEDIAFORMAT_KEY_IS_SYNC_FRAME`, 76.`AMEDIAFORMAT_KEY_I_FRAME_INTERVAL`, 77.`AMEDIAFORMAT_KEY_LANGUAGE`, 78.`AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK`, 79.`AMEDIAFORMAT_KEY_LATENCY`, 80.`AMEDIAFORMAT_KEY_LEVEL`, 81.`AMEDIAFORMAT_KEY_LOCATION`, 82.`AMEDIAFORMAT_KEY_LOOP`, 83.`AMEDIAFORMAT_KEY_LOW_LATENCY`, 84.`AMEDIAFORMAT_KEY_LYRICIST`, 85.`AMEDIAFORMAT_KEY_MANUFACTURER`, 86.`AMEDIAFORMAT_KEY_MAX_BIT_RATE`, 87.`AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER`, 88.`AMEDIAFORMAT_KEY_MAX_HEIGHT`, 89.`AMEDIAFORMAT_KEY_MAX_INPUT_SIZE`, 90.`AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER`, 91.`AMEDIAFORMAT_KEY_MAX_WIDTH`, 92.`AMEDIAFORMAT_KEY_MIME`, 93.`AMEDIAFORMAT_KEY_MPEG_USER_DATA`, 94.`AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER`, 95.`AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS`, 96.`AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION`, 97.`AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT`, 98.`AMEDIAFORMAT_KEY_OPERATING_RATE`, 99.`AMEDIAFORMAT_KEY_PCM_ENCODING`, 100.`AMEDIAFORMAT_KEY_PICTURE_TYPE`, 101.`AMEDIAFORMAT_KEY_PRIORITY`, 102.`AMEDIAFORMAT_KEY_PROFILE`, 103.`AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN`, 104.`AMEDIAFORMAT_KEY_PSSH`, 105.`AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP`, 106.`AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER`, 107.`AMEDIAFORMAT_KEY_ROTATION`, 108.`AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET`, 109.`AMEDIAFORMAT_KEY_SAMPLE_RATE`, 110.`AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND`, 111.`AMEDIAFORMAT_KEY_SAR_HEIGHT`, 112.`AMEDIAFORMAT_KEY_SAR_WIDTH`, 113.`AMEDIAFORMAT_KEY_SEI`, 114.`AMEDIAFORMAT_KEY_SLICE_HEIGHT`, 115.`AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS`, 116.`AMEDIAFORMAT_KEY_STRIDE`, 117.`AMEDIAFORMAT_KEY_TARGET_TIME`, 118.`AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT`, 119.`AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID`, 120.`AMEDIAFORMAT_KEY_TEMPORAL_LAYERING`, 121.`AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA`, 122.`AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C`, 123.`AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC`, 124.`AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT`, 125.`AMEDIAFORMAT_KEY_THUMBNAIL_TIME`, 126.`AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH`, 127.`AMEDIAFORMAT_KEY_TILE_HEIGHT`, 128.`AMEDIAFORMAT_KEY_TILE_WIDTH`, 129.`AMEDIAFORMAT_KEY_TIME_US`, 130.`AMEDIAFORMAT_KEY_TITLE`, 131.`AMEDIAFORMAT_KEY_TRACK_ID`, 132.`AMEDIAFORMAT_KEY_TRACK_INDEX`, 133.`AMEDIAFORMAT_KEY_VALID_SAMPLES`, 134.`AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL`, 135.`AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE`, 136.`AMEDIAFORMAT_VIDEO_QP_B_MAX`, 137.`AMEDIAFORMAT_VIDEO_QP_B_MIN`, 138.`AMEDIAFORMAT_VIDEO_QP_I_MAX`, 139.`AMEDIAFORMAT_VIDEO_QP_I_MIN`, 140.`AMEDIAFORMAT_VIDEO_QP_MAX`, 141.`AMEDIAFORMAT_VIDEO_QP_MIN`, 142.`AMEDIAFORMAT_VIDEO_QP_P_MAX`, 143.`AMEDIAFORMAT_VIDEO_QP_P_MIN`, 144.`AMEDIAFORMAT_KEY_WIDTH`, 145.`AMEDIAFORMAT_KEY_XMP_OFFSET`, 146.`AMEDIAFORMAT_KEY_XMP_SIZE`, 147.`AMEDIAFORMAT_KEY_YEAR`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_mediaformat_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/ndk_mediaformat_fuzzer/ndk_mediaformat_fuzzer /data/fuzz/${TARGET_ARCH}/ndk_mediaformat_fuzzer/corpus
+```
+
+# <a name="NdkDrm"></a> Fuzzer for NdkDrm
+
+NdkDrm supports the following parameters:
+1. ValidUUID(parameter name: "kCommonPsshBoxUUID" and "kClearKeyUUID")
+2. MimeType(parameter name: "kMimeType")
+3. MediaUUID(parameter name: "MediaUUID")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`ValidUUID`| 0.`kCommonPsshBoxUUID`,<br/> 1.`kClearKeyUUID`,<br/> 2.`kInvalidUUID`|Value obtained from FuzzedDataProvider|
+|`kMimeType`| 0.`video/mp4`,<br/> 1.`audio/mp4`|Value obtained from FuzzedDataProvider|
+|`MediaUUID`| 0.`INVALID_UUID`,<br/> 1.`PSSH_BOX_UUID`,<br/> 2.`CLEARKEY_UUID`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_drm_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_drm_fuzzer/ndk_drm_fuzzer
+```
+
+# <a name="NdkMediaMuxer"></a>Fuzzer for NdkMediaMuxer
+
+NdkMediaMuxer supports the following parameters:
+1. OutputFormat (parameter name: "outputFormat")
+2. AppendMode (parameter name: "appendMode")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`outputFormat`|0.`AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4`,<br/>1.`AMEDIAMUXER_OUTPUT_FORMAT_WEBM`,<br/>2.`AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP`| Value obtained from FuzzedDataProvider|
+|`appendMode`|0.`AMEDIAMUXER_APPEND_IGNORE_LAST_VIDEO_GOP`,<br/>1.`AMEDIAMUXER_APPEND_TO_EXISTING_DATA`| Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_mediamuxer_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_mediamuxer_fuzzer/ndk_mediamuxer_fuzzer
+```
+
+# <a name="NdkSyncCodec"></a>Fuzzer for NdkSyncCodec
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_sync_codec_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_sync_codec_fuzzer/ndk_sync_codec_fuzzer
+```
diff --git a/media/ndk/fuzzer/corpus/2822a2c3bcf57f46cb2bf142448d5baeead4d738 b/media/ndk/fuzzer/corpus/2822a2c3bcf57f46cb2bf142448d5baeead4d738
new file mode 100755
index 0000000..47d79c8
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/2822a2c3bcf57f46cb2bf142448d5baeead4d738
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/58833c3691292c199fa601fd51339d85c1f11ca6 b/media/ndk/fuzzer/corpus/58833c3691292c199fa601fd51339d85c1f11ca6
new file mode 100755
index 0000000..17c934c
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/58833c3691292c199fa601fd51339d85c1f11ca6
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/8556a97764e65bf337b5593058fa92adb68074ce b/media/ndk/fuzzer/corpus/8556a97764e65bf337b5593058fa92adb68074ce
new file mode 100755
index 0000000..00a32e2
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/8556a97764e65bf337b5593058fa92adb68074ce
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/8f76e2e87f79fe213f5cc8c71e5f91d1dcfc5950 b/media/ndk/fuzzer/corpus/8f76e2e87f79fe213f5cc8c71e5f91d1dcfc5950
new file mode 100755
index 0000000..86d4001
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/8f76e2e87f79fe213f5cc8c71e5f91d1dcfc5950
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/d702878cb53fb474230fb7b1a5c035bbb7c21c8d b/media/ndk/fuzzer/corpus/d702878cb53fb474230fb7b1a5c035bbb7c21c8d
new file mode 100755
index 0000000..496c7f3
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/d702878cb53fb474230fb7b1a5c035bbb7c21c8d
Binary files differ
diff --git a/media/ndk/fuzzer/corpus/edc2485f3927e07d7ab705337f16f0b978c57d0a b/media/ndk/fuzzer/corpus/edc2485f3927e07d7ab705337f16f0b978c57d0a
new file mode 100755
index 0000000..55437ac
--- /dev/null
+++ b/media/ndk/fuzzer/corpus/edc2485f3927e07d7ab705337f16f0b978c57d0a
Binary files differ
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
new file mode 100644
index 0000000..2b22f0f
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaCrypto.h>
+
+constexpr size_t kMaxString = 256;
+constexpr size_t kMinBytes = 0;
+constexpr size_t kMaxBytes = 1000;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    AMediaUUID uuid = {};
+    int32_t maxLen = fdp.ConsumeIntegralInRange<size_t>(kMinBytes, (size_t)sizeof(AMediaUUID));
+    for (size_t idx = 0; idx < maxLen; ++idx) {
+        uuid[idx] = fdp.ConsumeIntegral<uint8_t>();
+    }
+    std::vector<uint8_t> initData =
+            fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+    AMediaCrypto* crypto = AMediaCrypto_new(uuid, initData.data(), initData.size());
+    while (fdp.remaining_bytes()) {
+        auto invokeNdkCryptoFuzzer = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    AMediaCrypto_requiresSecureDecoderComponent(
+                            fdp.ConsumeRandomLengthString(kMaxString).c_str());
+                },
+                [&]() { AMediaCrypto_isCryptoSchemeSupported(uuid); },
+        });
+        invokeNdkCryptoFuzzer();
+    }
+    AMediaCrypto_delete(crypto);
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_drm_fuzzer.cpp b/media/ndk/fuzzer/ndk_drm_fuzzer.cpp
new file mode 100644
index 0000000..8c11c9d
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_drm_fuzzer.cpp
@@ -0,0 +1,355 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <media/NdkMediaCrypto.h>
+#include <media/NdkMediaDrm.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+constexpr int32_t kMinBytes = 1;
+constexpr int32_t kMaxBytes = 256;
+constexpr int32_t kMinParamVal = 0;
+constexpr int32_t kMaxParamVal = 3;
+constexpr int32_t kMediaUUIdSize = sizeof(AMediaUUID);
+constexpr int32_t kMinProvisionResponseSize = 0;
+constexpr int32_t kMaxProvisionResponseSize = 16;
+constexpr int32_t kMessageSize = 16;
+constexpr int32_t kMinAPIcase = 0;
+constexpr int32_t kMaxdecryptEncryptAPIs = 10;
+constexpr int32_t kMaxpropertyAPIs = 3;
+constexpr int32_t kMaxsetListenerAPIs = 2;
+constexpr int32_t kMaxndkDrmAPIs = 3;
+uint8_t signature[kMessageSize];
+
+enum MediaUUID { INVALID_UUID = 0, PSSH_BOX_UUID, CLEARKEY_UUID, kMaxValue = CLEARKEY_UUID };
+
+constexpr uint8_t kCommonPsshBoxUUID[] = {0x10, 0x77, 0xEF, 0xEC, 0xC0, 0xB2, 0x4D, 0x02,
+                                          0xAC, 0xE3, 0x3C, 0x1E, 0x52, 0xE2, 0xFB, 0x4B};
+
+constexpr uint8_t kClearKeyUUID[] = {0xE2, 0x71, 0x9D, 0x58, 0xA9, 0x85, 0xB3, 0xC9,
+                                     0x78, 0x1A, 0xB0, 0x30, 0xAF, 0x78, 0xD3, 0x0E};
+
+constexpr uint8_t kInvalidUUID[] = {0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80,
+                                    0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80};
+
+uint8_t kClearkeyPssh[] = {
+        // BMFF box header (4 bytes size + 'pssh')
+        0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
+        // full box header (version = 1 flags = 0)
+        0x01, 0x00, 0x00, 0x00,
+        // system id
+        0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02,
+        0xac, 0xe3, 0x3c, 0x1e, 0x52, 0xe2, 0xfb, 0x4b,
+        // number of key ids
+        0x00, 0x00, 0x00, 0x01,
+        // key id
+        0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30,
+        0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30,
+        // size of data, must be zero
+        0x00, 0x00, 0x00, 0x00};
+
+std::string kPropertyName = "clientId";
+std::string kMimeType[] = {"video/mp4", "audio/mp4"};
+std::string kCipherAlgorithm[] = {"AES/CBC/NoPadding", ""};
+std::string kMacAlgorithm[] = {"HmacSHA256", ""};
+
+class NdkMediaDrmFuzzer {
+  public:
+    NdkMediaDrmFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void invokeNdkDrm();
+    static void KeysChangeListener(AMediaDrm* drm, const AMediaDrmSessionId* sessionId,
+                                   const AMediaDrmKeyStatus* keysStatus, size_t numKeys,
+                                   bool hasNewUsableKey) {
+        (void)drm;
+        (void)sessionId;
+        (void)keysStatus;
+        (void)numKeys;
+        (void)hasNewUsableKey;
+    };
+
+    static void ExpirationUpdateListener(AMediaDrm* drm, const AMediaDrmSessionId* sessionId,
+                                         int64_t expiryTimeInMS) {
+        (void)drm;
+        (void)sessionId;
+        (void)expiryTimeInMS;
+    };
+
+    static void listener(AMediaDrm* drm, const AMediaDrmSessionId* sessionId,
+                         AMediaDrmEventType eventType, int extra, const uint8_t* data,
+                         size_t dataSize) {
+        (void)drm;
+        (void)sessionId;
+        (void)eventType;
+        (void)extra;
+        (void)data;
+        (void)dataSize;
+    }
+
+  private:
+    FuzzedDataProvider mFdp;
+    void invokeDrmCreatePlugin();
+    void invokeDrmSetListener();
+    void invokeDrmPropertyAPI();
+    void invokeDrmDecryptEncryptAPI();
+    void invokeDrmSecureStopAPI();
+    AMediaDrmSessionId mSessionId = {};
+    AMediaDrm* mDrm = nullptr;
+};
+
+void NdkMediaDrmFuzzer::invokeDrmCreatePlugin() {
+    const uint8_t* mediaUUID = nullptr;
+    uint32_t uuidEnum = mFdp.ConsumeEnum<MediaUUID>();
+    switch (uuidEnum) {
+        case INVALID_UUID: {
+            mediaUUID = kInvalidUUID;
+            break;
+        }
+        case PSSH_BOX_UUID: {
+            mediaUUID = kCommonPsshBoxUUID;
+            break;
+        }
+        case CLEARKEY_UUID:
+        default: {
+            mediaUUID = kClearKeyUUID;
+            break;
+        }
+    }
+    mDrm = AMediaDrm_createByUUID(mediaUUID);
+}
+
+void NdkMediaDrmFuzzer::invokeDrmSecureStopAPI() {
+    // get maximum number of secure stops
+    AMediaDrmSecureStop secureStops;
+    size_t numSecureStops = kMaxParamVal;
+    // The API behavior could change based on the drm object (clearkey or
+    // psshbox) This API detects secure stops msg and release them.
+    AMediaDrm_getSecureStops(mDrm, &secureStops, &numSecureStops);
+    AMediaDrm_releaseSecureStops(mDrm, &secureStops);
+}
+
+void NdkMediaDrmFuzzer::invokeDrmSetListener() {
+    int32_t setListenerAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxsetListenerAPIs);
+    switch (setListenerAPI) {
+        case 0: {  // set on key change listener
+            AMediaDrm_setOnKeysChangeListener(mDrm, KeysChangeListener);
+            break;
+        }
+        case 1: {  // set on expiration on update listener
+            AMediaDrm_setOnExpirationUpdateListener(mDrm, ExpirationUpdateListener);
+            break;
+        }
+        case 2:
+        default: {  // set on event listener
+            AMediaDrm_setOnEventListener(mDrm, listener);
+            break;
+        }
+    }
+}
+
+void NdkMediaDrmFuzzer::invokeDrmPropertyAPI() {
+    int32_t propertyAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxpropertyAPIs);
+    switch (propertyAPI) {
+        case 0: {  // set property byte array
+            uint8_t value[kMediaUUIdSize];
+            std::string name =
+                    mFdp.ConsumeBool() ? kPropertyName : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            const char* propertyName = name.c_str();
+            AMediaDrm_setPropertyByteArray(mDrm, propertyName, value, sizeof(value));
+            break;
+        }
+        case 1: {  // get property in byte array
+            AMediaDrmByteArray array;
+            std::string name =
+                    mFdp.ConsumeBool() ? kPropertyName : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            const char* propertyName = name.c_str();
+            AMediaDrm_getPropertyByteArray(mDrm, propertyName, &array);
+            break;
+        }
+        case 2: {  // set string type property
+            std::string propertyName = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            std::string value = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            AMediaDrm_setPropertyString(mDrm, propertyName.c_str(), value.c_str());
+            break;
+        }
+        case 3:
+        default: {  //  get property in string
+            const char* stringValue = nullptr;
+            std::string propertyName = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            AMediaDrm_getPropertyString(mDrm, propertyName.c_str(), &stringValue);
+            break;
+        }
+    }
+}
+
+void NdkMediaDrmFuzzer::invokeDrmDecryptEncryptAPI() {
+    int32_t decryptEncryptAPI =
+            mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxdecryptEncryptAPIs);
+    switch (decryptEncryptAPI) {
+        case 0: {  // Check if crypto scheme is supported
+            std::string mimeType = mFdp.ConsumeBool() ? mFdp.PickValueInArray(kMimeType)
+                                                      : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            AMediaDrm_isCryptoSchemeSupported(kClearKeyUUID, mimeType.c_str());
+            break;
+        }
+        case 1: {  // get a provision request byte array
+            const uint8_t* legacyRequest;
+            size_t legacyRequestSize = 1;
+            const char* legacyDefaultUrl;
+            AMediaDrm_getProvisionRequest(mDrm, &legacyRequest, &legacyRequestSize,
+                                          &legacyDefaultUrl);
+            break;
+        }
+        case 2: {  // provide a response to the DRM engine plugin
+            const int32_t provisionresponseSize = mFdp.ConsumeIntegralInRange<size_t>(
+                    kMinProvisionResponseSize, kMaxProvisionResponseSize);
+            uint8_t provisionResponse[provisionresponseSize];
+            AMediaDrm_provideProvisionResponse(mDrm, provisionResponse, sizeof(provisionResponse));
+            break;
+        }
+        case 3: {  // get key request
+            const uint8_t* keyRequest = nullptr;
+            size_t keyRequestSize = 0;
+            std::string mimeType = mFdp.ConsumeBool() ? mFdp.PickValueInArray(kMimeType)
+                                                      : mFdp.ConsumeRandomLengthString(kMaxBytes);
+            size_t numOptionalParameters =
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinParamVal, kMaxParamVal);
+            AMediaDrmKeyValue optionalParameters[numOptionalParameters];
+            std::string keys[numOptionalParameters];
+            std::string values[numOptionalParameters];
+            for (int i = 0; i < numOptionalParameters; ++i) {
+                keys[i] = mFdp.ConsumeRandomLengthString(kMaxBytes);
+                values[i] = mFdp.ConsumeRandomLengthString(kMaxBytes);
+                optionalParameters[i].mKey = keys[i].c_str();
+                optionalParameters[i].mValue = values[i].c_str();
+            }
+            AMediaDrmKeyType keyType = (AMediaDrmKeyType)mFdp.ConsumeIntegralInRange<int>(
+                    KEY_TYPE_STREAMING, KEY_TYPE_RELEASE);
+            AMediaDrm_getKeyRequest(mDrm, &mSessionId, kClearkeyPssh, sizeof(kClearkeyPssh),
+                                    mimeType.c_str(), keyType, optionalParameters,
+                                    numOptionalParameters, &keyRequest, &keyRequestSize);
+            break;
+        }
+        case 4: {  // query key status
+            size_t numPairs = mFdp.ConsumeIntegralInRange<size_t>(kMinParamVal, kMaxParamVal);
+            AMediaDrmKeyValue keyStatus[numPairs];
+            AMediaDrm_queryKeyStatus(mDrm, &mSessionId, keyStatus, &numPairs);
+            break;
+        }
+        case 5: {  // provide key response
+            std::string key = mFdp.ConsumeRandomLengthString(kMaxBytes);
+            const char* keyResponse = key.c_str();
+            AMediaDrmKeySetId keySetId;
+            AMediaDrm_provideKeyResponse(mDrm, &mSessionId,
+                                         reinterpret_cast<const uint8_t*>(keyResponse),
+                                         sizeof(keyResponse), &keySetId);
+            break;
+        }
+        case 6: {  // restore key
+            AMediaDrmKeySetId keySetId;
+            AMediaDrm_restoreKeys(mDrm, &mSessionId, &keySetId);
+            break;
+        }
+
+        case 7: {  // Check signature verification using the specified Algorithm
+            std::string algorithm = kMacAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> message = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            AMediaDrm_verify(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), message.data(),
+                             message.size(), signature, sizeof(signature));
+            break;
+        }
+        case 8: {  // Generate a signature using the specified Algorithm
+            std::string algorithm = kMacAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> message = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            size_t signatureSize = sizeof(signature);
+            AMediaDrm_sign(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), message.data(),
+                           message.size(), signature, &signatureSize);
+            break;
+        }
+        case 9: {  // Decrypt the data using algorithm
+            std::string algorithm = kCipherAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> iv = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> input = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            uint8_t output[kMessageSize];
+            AMediaDrm_decrypt(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), iv.data(),
+                              input.data(), output, input.size());
+            break;
+        }
+        case 10:
+        default: {  // Encrypt the data using algorithm
+            std::string algorithm = kCipherAlgorithm[mFdp.ConsumeBool()];
+            std::vector<uint8_t> keyId = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> iv = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            std::vector<uint8_t> input = mFdp.ConsumeBytes<uint8_t>(
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            uint8_t output[kMessageSize];
+            AMediaDrm_encrypt(mDrm, &mSessionId, algorithm.c_str(), keyId.data(), iv.data(),
+                              input.data(), output, input.size());
+            break;
+        }
+    }
+    AMediaDrm_removeKeys(mDrm, &mSessionId);
+}
+
+void NdkMediaDrmFuzzer::invokeNdkDrm() {
+    while (mFdp.remaining_bytes() > 0) {
+        // The API is called at start as it creates a AMediaDrm Object.
+        // mDrm AMediaDrm object is used in the below APIs.
+        invokeDrmCreatePlugin();
+        if (mDrm) {
+            // The API opens session and returns "mSessionId" session Id.
+            // "mSessionId" is required in the below APIs.
+            AMediaDrm_openSession(mDrm, &mSessionId);
+            int32_t ndkDrmAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPIcase, kMaxndkDrmAPIs);
+            switch (ndkDrmAPI) {
+                case 0: {
+                    invokeDrmDecryptEncryptAPI();
+                    break;
+                }
+                case 1: {
+                    invokeDrmPropertyAPI();
+                    break;
+                }
+                case 2: {
+                    invokeDrmSetListener();
+                    break;
+                }
+                case 3:
+                default: {
+                    invokeDrmSecureStopAPI();
+                    break;
+                }
+            }
+            AMediaDrm_closeSession(mDrm, &mSessionId);
+            AMediaDrm_release(mDrm);
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkMediaDrmFuzzer ndkMediaDrmFuzzer(data, size);
+    ndkMediaDrmFuzzer.invokeNdkDrm();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_extractor_fuzzer.cpp b/media/ndk/fuzzer/ndk_extractor_fuzzer.cpp
new file mode 100644
index 0000000..9bbb79c
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_extractor_fuzzer.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_process.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaExtractor.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+constexpr int32_t kCaseStart = 0;
+constexpr int32_t kCaseEnd = 8;
+constexpr float kMinDataSizeFactor = 0.5;
+constexpr int32_t kMaxIterations = 1000;
+const std::string kPathPrefix = "file://";
+
+constexpr SeekMode kSeekMode[] = {AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC,
+                                  AMEDIAEXTRACTOR_SEEK_NEXT_SYNC,
+                                  AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC};
+
+class NdkExtractorFuzzer {
+  public:
+    NdkExtractorFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {
+        mDataSourceFd = mkstemp(mTestPath);
+        std::vector<char> dataBuffer = mFdp.ConsumeBytes<char>(
+                mFdp.ConsumeIntegralInRange<int32_t>(kMinDataSizeFactor * size, size));
+        mDataSize = dataBuffer.size();
+        write(mDataSourceFd, dataBuffer.data(), dataBuffer.size());
+    };
+
+    ~NdkExtractorFuzzer() {
+        close(mDataSourceFd);
+        remove(mTestPath);
+    };
+
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+    int32_t mDataSourceFd = 0;
+    int32_t mDataSize = 0;
+
+    // Defined a mutable TestSource file path for mkstemp().
+    char mTestPath[64] = "/data/local/tmp/TestSource_XXXXXX";
+};
+
+void NdkExtractorFuzzer::process() {
+    AMediaExtractor* mMediaExtractor = AMediaExtractor_new();
+    AMediaDataSource* mDataSource = nullptr;
+
+    if (mFdp.ConsumeBool()) {
+        AMediaExtractor_setDataSourceFd(mMediaExtractor, mDataSourceFd, 0, mDataSize);
+    } else {
+        mDataSource = AMediaDataSource_newUri((kPathPrefix + mTestPath).c_str(), 0 /* numkeys */,
+                                              nullptr /* keyvalues */);
+        AMediaExtractor_setDataSourceCustom(mMediaExtractor, mDataSource);
+    }
+
+    /**
+     * Limiting the number of iterations of while loop
+     * to prevent a possible timeout.
+     */
+    int32_t count = 0;
+    while (mFdp.remaining_bytes() && count++ < kMaxIterations) {
+        switch (mFdp.ConsumeIntegralInRange<int32_t>(kCaseStart, kCaseEnd)) {
+            case 0:{
+                AMediaExtractor_selectTrack(mMediaExtractor,
+                                            mFdp.ConsumeIntegral<size_t>() /* idx */);
+                break;
+            }
+            case 1:{
+                AMediaExtractor_unselectTrack(mMediaExtractor,
+                                              mFdp.ConsumeIntegral<size_t>() /* idx */);
+                break;
+            }
+            case 2:{
+                int32_t sampleSize = AMediaExtractor_getSampleSize(mMediaExtractor);
+                if (sampleSize > 0) {
+                    std::vector<uint8_t> buffer(sampleSize);
+                    AMediaExtractor_readSampleData(
+                            mMediaExtractor, buffer.data(),
+                            mFdp.ConsumeIntegralInRange<size_t>(0, sampleSize) /* capacity */);
+                }
+                break;
+            }
+            case 3:{
+                AMediaExtractor_getSampleFlags(mMediaExtractor);
+                break;
+            }
+            case 4:{
+                AMediaExtractor_getSampleCryptoInfo(mMediaExtractor);
+                break;
+            }
+            case 5:{
+                AMediaExtractor_getPsshInfo(mMediaExtractor);
+                break;
+            }
+            case 6:{
+                AMediaExtractor_advance(mMediaExtractor);
+                break;
+            }
+            case 7:{
+                AMediaFormat* mediaFormat = mFdp.ConsumeBool() ? AMediaFormat_new() : nullptr;
+                AMediaExtractor_getSampleFormat(mMediaExtractor, mediaFormat);
+                AMediaFormat_delete(mediaFormat);
+                break;
+            }
+            case 8:{
+                AMediaExtractor_seekTo(mMediaExtractor,
+                                       mFdp.ConsumeIntegral<int64_t>() /* seekPosUs */,
+                                       mFdp.PickValueInArray(kSeekMode) /* mode */);
+                break;
+            }
+        };
+    }
+
+    AMediaDataSource_delete(mDataSource);
+    AMediaExtractor_delete(mMediaExtractor);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    /**
+     * Create a threadpool for incoming binder transactions,
+     * without this extractor results in a DoS after few instances.
+     */
+    ABinderProcess_startThreadPool();
+
+    NdkExtractorFuzzer ndkExtractorFuzzer(data, size);
+    ndkExtractorFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
new file mode 100644
index 0000000..6c11798
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_image_reader_fuzzer.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cutils/native_handle.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/BufferQueue.h>
+#include <media/NdkImageReader.h>
+
+constexpr int32_t kMaxSize = INT_MAX;
+constexpr int32_t kMinSize = 1;
+constexpr int32_t kMinImages = 1;
+
+class NdkImageReaderFuzzer {
+  public:
+    NdkImageReaderFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+    static void onImageAvailable(void*, AImageReader*){};
+    static void onBufferRemoved(void*, AImageReader*, AHardwareBuffer*){};
+};
+
+void NdkImageReaderFuzzer::process() {
+    AImageReader* reader = nullptr;
+    AImage* img = nullptr;
+    native_handle_t* handle = nullptr;
+    int32_t* acquireFenceFd = nullptr;
+    int32_t imageWidth = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageHeight = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageFormat = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageUsage = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+    int32_t imageMaxCount = mFdp.ConsumeIntegralInRange<int32_t>(
+            kMinImages, android::BufferQueue::MAX_MAX_ACQUIRED_BUFFERS);
+    AImageReader_ImageListener readerAvailableCb{this, NdkImageReaderFuzzer::onImageAvailable};
+    AImageReader_BufferRemovedListener readerDetachedCb{this, onBufferRemoved};
+
+    if (mFdp.ConsumeBool()) {
+        AImageReader_new(imageWidth, imageHeight, imageFormat, imageMaxCount, &reader);
+    } else {
+        AImageReader_newWithUsage(imageWidth, imageHeight, imageFormat, imageUsage, imageMaxCount,
+                                  &reader);
+    }
+    while (mFdp.remaining_bytes()) {
+        auto ndkImageFunction = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { AImageReader_acquireNextImage(reader, &img); },
+                [&]() { AImageReader_acquireLatestImage(reader, &img); },
+                [&]() { AImageReader_setImageListener(reader, &readerAvailableCb); },
+                [&]() { AImageReader_acquireNextImageAsync(reader, &img, acquireFenceFd); },
+                [&]() { AImageReader_acquireLatestImageAsync(reader, &img, acquireFenceFd); },
+                [&]() { AImageReader_setBufferRemovedListener(reader, &readerDetachedCb); },
+                [&]() { AImageReader_getWindowNativeHandle(reader, &handle); },
+        });
+        ndkImageFunction();
+    }
+    AImageReader_delete(reader);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkImageReaderFuzzer ndkImageReaderFuzzer(data, size);
+    ndkImageReaderFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
new file mode 100644
index 0000000..c19ea13
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
@@ -0,0 +1,257 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <datasource/FileSource.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaFormat.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <utils/Log.h>
+#include <fstream>
+
+const char* kValidKeys[] = {
+        AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR,
+        AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR,
+        AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION,
+        AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL,
+        AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL,
+        AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT,
+        AMEDIAFORMAT_KEY_AAC_PROFILE,
+        AMEDIAFORMAT_KEY_AAC_SBR_MODE,
+        AMEDIAFORMAT_KEY_ALBUM,
+        AMEDIAFORMAT_KEY_ALBUMART,
+        AMEDIAFORMAT_KEY_ALBUMARTIST,
+        AMEDIAFORMAT_KEY_ARTIST,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID,
+        AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PROGRAM_ID,
+        AMEDIAFORMAT_KEY_AUDIO_SESSION_ID,
+        AMEDIAFORMAT_KEY_AUTHOR,
+        AMEDIAFORMAT_KEY_BITRATE_MODE,
+        AMEDIAFORMAT_KEY_BIT_RATE,
+        AMEDIAFORMAT_KEY_BITS_PER_SAMPLE,
+        AMEDIAFORMAT_KEY_CAPTURE_RATE,
+        AMEDIAFORMAT_KEY_CDTRACKNUMBER,
+        AMEDIAFORMAT_KEY_CHANNEL_COUNT,
+        AMEDIAFORMAT_KEY_CHANNEL_MASK,
+        AMEDIAFORMAT_KEY_COLOR_FORMAT,
+        AMEDIAFORMAT_KEY_COLOR_RANGE,
+        AMEDIAFORMAT_KEY_COLOR_STANDARD,
+        AMEDIAFORMAT_KEY_COLOR_TRANSFER,
+        AMEDIAFORMAT_KEY_COMPILATION,
+        AMEDIAFORMAT_KEY_COMPLEXITY,
+        AMEDIAFORMAT_KEY_COMPOSER,
+        AMEDIAFORMAT_KEY_CREATE_INPUT_SURFACE_SUSPENDED,
+        AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE,
+        AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK,
+        AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_SIZES,
+        AMEDIAFORMAT_KEY_CRYPTO_IV,
+        AMEDIAFORMAT_KEY_CRYPTO_KEY,
+        AMEDIAFORMAT_KEY_CRYPTO_MODE,
+        AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES,
+        AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK,
+        AMEDIAFORMAT_KEY_CSD,
+        AMEDIAFORMAT_KEY_CSD_0,
+        AMEDIAFORMAT_KEY_CSD_1,
+        AMEDIAFORMAT_KEY_CSD_2,
+        AMEDIAFORMAT_KEY_CSD_AVC,
+        AMEDIAFORMAT_KEY_CSD_HEVC,
+        AMEDIAFORMAT_KEY_D263,
+        AMEDIAFORMAT_KEY_DATE,
+        AMEDIAFORMAT_KEY_DISCNUMBER,
+        AMEDIAFORMAT_KEY_DISPLAY_CROP,
+        AMEDIAFORMAT_KEY_DISPLAY_HEIGHT,
+        AMEDIAFORMAT_KEY_DISPLAY_WIDTH,
+        AMEDIAFORMAT_KEY_DURATION,
+        AMEDIAFORMAT_KEY_ENCODER_DELAY,
+        AMEDIAFORMAT_KEY_ENCODER_PADDING,
+        AMEDIAFORMAT_KEY_ESDS,
+        AMEDIAFORMAT_KEY_EXIF_OFFSET,
+        AMEDIAFORMAT_KEY_EXIF_SIZE,
+        AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
+        AMEDIAFORMAT_KEY_FRAME_COUNT,
+        AMEDIAFORMAT_KEY_FRAME_RATE,
+        AMEDIAFORMAT_KEY_GENRE,
+        AMEDIAFORMAT_KEY_GRID_COLUMNS,
+        AMEDIAFORMAT_KEY_GRID_ROWS,
+        AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT,
+        AMEDIAFORMAT_KEY_HDR_STATIC_INFO,
+        AMEDIAFORMAT_KEY_HDR10_PLUS_INFO,
+        AMEDIAFORMAT_KEY_HEIGHT,
+        AMEDIAFORMAT_KEY_ICC_PROFILE,
+        AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD,
+        AMEDIAFORMAT_KEY_IS_ADTS,
+        AMEDIAFORMAT_KEY_IS_AUTOSELECT,
+        AMEDIAFORMAT_KEY_IS_DEFAULT,
+        AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE,
+        AMEDIAFORMAT_KEY_IS_SYNC_FRAME,
+        AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+        AMEDIAFORMAT_KEY_LANGUAGE,
+        AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK,
+        AMEDIAFORMAT_KEY_LATENCY,
+        AMEDIAFORMAT_KEY_LEVEL,
+        AMEDIAFORMAT_KEY_LOCATION,
+        AMEDIAFORMAT_KEY_LOOP,
+        AMEDIAFORMAT_KEY_LOW_LATENCY,
+        AMEDIAFORMAT_KEY_LYRICIST,
+        AMEDIAFORMAT_KEY_MANUFACTURER,
+        AMEDIAFORMAT_KEY_MAX_BIT_RATE,
+        AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER,
+        AMEDIAFORMAT_KEY_MAX_HEIGHT,
+        AMEDIAFORMAT_KEY_MAX_INPUT_SIZE,
+        AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER,
+        AMEDIAFORMAT_KEY_MAX_WIDTH,
+        AMEDIAFORMAT_KEY_MIME,
+        AMEDIAFORMAT_KEY_MPEG_USER_DATA,
+        AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER,
+        AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS,
+        AMEDIAFORMAT_KEY_MPEGH_PROFILE_LEVEL_INDICATION,
+        AMEDIAFORMAT_KEY_MPEGH_REFERENCE_CHANNEL_LAYOUT,
+        AMEDIAFORMAT_KEY_OPERATING_RATE,
+        AMEDIAFORMAT_KEY_PCM_ENCODING,
+        AMEDIAFORMAT_KEY_PICTURE_TYPE,
+        AMEDIAFORMAT_KEY_PRIORITY,
+        AMEDIAFORMAT_KEY_PROFILE,
+        AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN,
+        AMEDIAFORMAT_KEY_PSSH,
+        AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP,
+        AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER,
+        AMEDIAFORMAT_KEY_ROTATION,
+        AMEDIAFORMAT_KEY_SAMPLE_FILE_OFFSET,
+        AMEDIAFORMAT_KEY_SAMPLE_RATE,
+        AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND,
+        AMEDIAFORMAT_KEY_SAR_HEIGHT,
+        AMEDIAFORMAT_KEY_SAR_WIDTH,
+        AMEDIAFORMAT_KEY_SEI,
+        AMEDIAFORMAT_KEY_SLICE_HEIGHT,
+        AMEDIAFORMAT_KEY_SLOW_MOTION_MARKERS,
+        AMEDIAFORMAT_KEY_STRIDE,
+        AMEDIAFORMAT_KEY_TARGET_TIME,
+        AMEDIAFORMAT_KEY_TEMPORAL_LAYER_COUNT,
+        AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID,
+        AMEDIAFORMAT_KEY_TEMPORAL_LAYERING,
+        AMEDIAFORMAT_KEY_TEXT_FORMAT_DATA,
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_AV1C,
+        AMEDIAFORMAT_KEY_THUMBNAIL_CSD_HEVC,
+        AMEDIAFORMAT_KEY_THUMBNAIL_HEIGHT,
+        AMEDIAFORMAT_KEY_THUMBNAIL_TIME,
+        AMEDIAFORMAT_KEY_THUMBNAIL_WIDTH,
+        AMEDIAFORMAT_KEY_TILE_HEIGHT,
+        AMEDIAFORMAT_KEY_TILE_WIDTH,
+        AMEDIAFORMAT_KEY_TIME_US,
+        AMEDIAFORMAT_KEY_TITLE,
+        AMEDIAFORMAT_KEY_TRACK_ID,
+        AMEDIAFORMAT_KEY_TRACK_INDEX,
+        AMEDIAFORMAT_KEY_VALID_SAMPLES,
+        AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
+        AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE,
+        AMEDIAFORMAT_VIDEO_QP_B_MAX,
+        AMEDIAFORMAT_VIDEO_QP_B_MIN,
+        AMEDIAFORMAT_VIDEO_QP_I_MAX,
+        AMEDIAFORMAT_VIDEO_QP_I_MIN,
+        AMEDIAFORMAT_VIDEO_QP_MAX,
+        AMEDIAFORMAT_VIDEO_QP_MIN,
+        AMEDIAFORMAT_VIDEO_QP_P_MAX,
+        AMEDIAFORMAT_VIDEO_QP_P_MIN,
+        AMEDIAFORMAT_KEY_WIDTH,
+        AMEDIAFORMAT_KEY_XMP_OFFSET,
+        AMEDIAFORMAT_KEY_XMP_SIZE,
+        AMEDIAFORMAT_KEY_YEAR,
+};
+constexpr size_t kMinBytes = 0;
+constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMinChoice = 0;
+constexpr size_t kMaxChoice = 9;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+    AMediaFormat* mediaFormat = AMediaFormat_new();
+    while (fdp.remaining_bytes()) {
+        const char* name = nullptr;
+        std::string nameString;
+        if (fdp.ConsumeBool()) {
+            nameString =
+                    fdp.ConsumeBool()
+                            ? fdp.PickValueInArray(kValidKeys)
+                            : fdp.ConsumeRandomLengthString(
+                                      fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+            name = nameString.c_str();
+        }
+        switch (fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice)) {
+            case 0: {
+                AMediaFormat_setInt32(mediaFormat, name,
+                                      fdp.ConsumeIntegral<int32_t>() /* value */);
+                break;
+            }
+            case 1: {
+                AMediaFormat_setInt64(mediaFormat, name,
+                                      fdp.ConsumeIntegral<int64_t>() /* value */);
+                break;
+            }
+            case 2: {
+                AMediaFormat_setFloat(mediaFormat, name,
+                                      fdp.ConsumeFloatingPoint<float>() /* value */);
+                break;
+            }
+            case 3: {
+                AMediaFormat_setDouble(mediaFormat, name,
+                                       fdp.ConsumeFloatingPoint<double>() /* value */);
+                break;
+            }
+            case 4: {
+                AMediaFormat_setSize(mediaFormat, name, fdp.ConsumeIntegral<size_t>() /* value */);
+                break;
+            }
+            case 5: {
+                std::string value;
+                if (fdp.ConsumeBool()) {
+                    value = fdp.ConsumeRandomLengthString(
+                            fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                }
+                AMediaFormat_setString(mediaFormat, name,
+                                       fdp.ConsumeBool() ? nullptr : value.c_str());
+                break;
+            }
+            case 6: {
+                AMediaFormat_setRect(mediaFormat, name, fdp.ConsumeIntegral<int32_t>() /* left */,
+                                     fdp.ConsumeIntegral<int32_t>() /* top */,
+                                     fdp.ConsumeIntegral<int32_t>() /* bottom */,
+                                     fdp.ConsumeIntegral<int32_t>() /* right */);
+                break;
+            }
+            case 7: {
+                std::vector<uint8_t> bufferData = fdp.ConsumeBytes<uint8_t>(
+                        fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                AMediaFormat_setBuffer(mediaFormat, name, bufferData.data(), bufferData.size());
+                break;
+            }
+            case 8: {
+                AMediaFormat_toString(mediaFormat);
+                break;
+            }
+            default: {
+                AMediaFormat* format = fdp.ConsumeBool() ? nullptr : AMediaFormat_new();
+                AMediaFormat_copy(format, mediaFormat);
+                AMediaFormat_delete(format);
+                break;
+            }
+        }
+    }
+    AMediaFormat_clear(mediaFormat);
+    AMediaFormat_delete(mediaFormat);
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_mediamuxer_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediamuxer_fuzzer.cpp
new file mode 100644
index 0000000..8c49d28
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_mediamuxer_fuzzer.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android/binder_process.h>
+#include <fcntl.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/NdkMediaMuxer.h>
+#include <sys/mman.h>
+#include <unistd.h>
+
+const std::string kMuxerFile = "mediaMuxer";
+const std::string kAppendFile = "mediaAppend";
+constexpr size_t kMinBytes = 0;
+constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMinChoice = 0;
+constexpr size_t kMaxChoice = 7;
+constexpr size_t kMaxStringLength = 20;
+constexpr size_t kOffset = 0;
+
+constexpr OutputFormat kOutputFormat[] = {AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4,
+                                          AMEDIAMUXER_OUTPUT_FORMAT_WEBM,
+                                          AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP};
+constexpr AppendMode kAppendMode[] = {AMEDIAMUXER_APPEND_IGNORE_LAST_VIDEO_GOP,
+                                      AMEDIAMUXER_APPEND_TO_EXISTING_DATA};
+
+const std::string kAudioMimeType[] = {"audio/3gpp", "audio/amr-wb", "audio/mp4a-latm",
+                                      "audio/flac", "audio/vorbis", "audio/opus"};
+
+const std::string kVideoMimeType[] = {"video/x-vnd.on2.vp8", "video/x-vnd.on2.vp9", "video/av01",
+                                      "video/avc",           "video/hevc",          "video/mp4v-es",
+                                      "video/3gpp"};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMediaFormat* format) {
+    std::string mimeType = fdp.ConsumeBool() ? fdp.ConsumeRandomLengthString(kMaxStringLength)
+                                             : fdp.PickValueInArray(kAudioMimeType);
+    AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mimeType.c_str());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt64(format, AMEDIAFORMAT_KEY_DURATION, fdp.ConsumeIntegral<int64_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMediaFormat* format) {
+    std::string mimeType = fdp.ConsumeBool() ? fdp.ConsumeRandomLengthString(kMaxStringLength)
+                                             : fdp.PickValueInArray(kAudioMimeType);
+    AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mimeType.c_str());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, fdp.ConsumeIntegral<int32_t>());
+    AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+                          fdp.ConsumeFloatingPoint<float>());
+    AMediaFormat_setFloat(format, AMEDIAFORMAT_KEY_CAPTURE_RATE, fdp.ConsumeFloatingPoint<float>());
+    AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, fdp.ConsumeIntegral<int32_t>());
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    /**
+     * Create a threadpool for incoming binder transactions,
+     * without this muxer results in a DoS after few instances.
+     */
+    ABinderProcess_startThreadPool();
+    FuzzedDataProvider fdp(data, size);
+    /**
+     * memfd_create() creates an anonymous file and returns a file
+     * descriptor that refers to it. MFD_ALLOW_SEALING allow sealing
+     * operations on this file.
+     */
+    int32_t fd = -1;
+    AMediaMuxer* muxer = nullptr;
+    if (fdp.ConsumeBool()) {
+        fd = memfd_create(kMuxerFile.c_str(), MFD_ALLOW_SEALING);
+        muxer = AMediaMuxer_new(fd, fdp.ConsumeBool()
+                                            ? fdp.PickValueInArray(kOutputFormat)
+                                            : (OutputFormat)fdp.ConsumeIntegral<int32_t>());
+    } else {
+        fd = memfd_create(kAppendFile.c_str(), MFD_ALLOW_SEALING);
+        std::vector<uint8_t> appendData =
+                fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+        write(fd, appendData.data(), appendData.size());
+        muxer = AMediaMuxer_append(fd, fdp.PickValueInArray(kAppendMode) /* mode */);
+    }
+    if (!muxer) {
+        close(fd);
+        return 0;
+    }
+    AMediaFormat* mediaFormat = nullptr;
+    ssize_t trackIdx = 0;
+    while (fdp.remaining_bytes()) {
+        int32_t kSwitchChoice = fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice);
+        switch (kSwitchChoice) {
+            case 0: {
+                AMediaMuxer_setLocation(muxer, fdp.ConsumeFloatingPoint<float>() /* latitude */,
+                                        fdp.ConsumeFloatingPoint<float>() /* longitude */);
+                break;
+            }
+            case 1: {
+                AMediaMuxer_setOrientationHint(muxer, fdp.ConsumeIntegral<int32_t>() /* degrees */);
+                break;
+            }
+            case 2: {
+                AMediaMuxer_start(muxer);
+                break;
+            }
+            case 3: {
+                AMediaMuxer_stop(muxer);
+                break;
+            }
+            case 4: {
+                AMediaMuxer_getTrackCount(muxer);
+                break;
+            }
+            case 5: {
+                AMediaFormat* getFormat =
+                        AMediaMuxer_getTrackFormat(muxer, fdp.ConsumeIntegral<size_t>() /* idx */);
+                AMediaFormat_delete(getFormat);
+                break;
+            }
+            case 6: {
+                mediaFormat = AMediaFormat_new();
+                fdp.ConsumeBool() ? getSampleAudioFormat(fdp, mediaFormat)
+                                  : getSampleVideoFormat(fdp, mediaFormat);
+                trackIdx = AMediaMuxer_addTrack(muxer, mediaFormat);
+                AMediaFormat_delete(mediaFormat);
+                break;
+            }
+            default: {
+                std::vector<uint8_t> sampleData = fdp.ConsumeBytes<uint8_t>(
+                        fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                AMediaCodecBufferInfo codecBuffer;
+                codecBuffer.size = sampleData.size();
+                codecBuffer.offset = kOffset;
+                codecBuffer.presentationTimeUs = fdp.ConsumeIntegral<int64_t>();
+                codecBuffer.flags = fdp.ConsumeIntegral<uint32_t>();
+                AMediaMuxer_writeSampleData(
+                        muxer,
+                        fdp.ConsumeBool() ? trackIdx : fdp.ConsumeIntegral<size_t>() /* trackIdx */,
+                        sampleData.data(), &codecBuffer);
+                break;
+            }
+        }
+    }
+    AMediaMuxer_delete(muxer);
+    close(fd);
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp
new file mode 100644
index 0000000..d348f66
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_sync_codec_fuzzer.cpp
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+
+constexpr int32_t kMaxNdkCodecAPIs = 12;
+
+class NdkSyncCodecFuzzer : public NdkMediaCodecFuzzerBase {
+  public:
+    NdkSyncCodecFuzzer(const uint8_t* data, size_t size)
+        : NdkMediaCodecFuzzerBase(), mFdp(data, size) {
+        setFdp(&mFdp);
+    };
+    void invokeSyncCodeConfigAPI();
+
+    static void CodecOnFrameRendered(AMediaCodec* codec, void* userdata, int64_t mediaTimeUs,
+                                     int64_t systemNano) {
+        (void)codec;
+        (void)userdata;
+        (void)mediaTimeUs;
+        (void)systemNano;
+    };
+
+  private:
+    FuzzedDataProvider mFdp;
+    AMediaCodec* mCodec = nullptr;
+    void invokekSyncCodecAPIs(bool isEncoder);
+};
+
+void NdkSyncCodecFuzzer::invokekSyncCodecAPIs(bool isEncoder) {
+    ANativeWindow* nativeWindow = nullptr;
+    int32_t numOfFrames = mFdp.ConsumeIntegralInRange<size_t>(kMinIterations, kMaxIterations);
+    int32_t count = 0;
+    while (++count <= numOfFrames) {
+        int32_t ndkcodecAPI = mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxNdkCodecAPIs);
+        switch (ndkcodecAPI) {
+            case 0: {  // configure the codec
+                AMediaCodec_configure(mCodec, getCodecFormat(), nativeWindow, nullptr /* crypto */,
+                                      (isEncoder ? AMEDIACODEC_CONFIGURE_FLAG_ENCODE : 0));
+                break;
+            }
+            case 1: {  // start codec
+                AMediaCodec_start(mCodec);
+                break;
+            }
+            case 2: {  // stop codec
+                AMediaCodec_stop(mCodec);
+                break;
+            }
+            case 3: {  // create persistent input surface
+                AMediaCodec_createPersistentInputSurface(&nativeWindow);
+                break;
+            }
+            case 4: {  // buffer operation APIs
+                invokeInputBufferOperationAPI(mCodec);
+                break;
+            }
+            case 5: {
+                invokeOutputBufferOperationAPI(mCodec);
+                break;
+            }
+            case 6: {  // get input and output Format
+                invokeCodecFormatAPI(mCodec);
+                break;
+            }
+            case 7: {
+                AMediaCodec_signalEndOfInputStream(mCodec);
+                break;
+            }
+            case 8: {  // set parameters
+                // Create a new parameter and set
+                AMediaFormat* params = AMediaFormat_new();
+                AMediaFormat_setInt32(
+                        params, "video-bitrate",
+                        mFdp.ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue));
+                AMediaCodec_setParameters(mCodec, params);
+                AMediaFormat_delete(params);
+                break;
+            }
+            case 9: {  // flush codec
+                AMediaCodec_flush(mCodec);
+                if (mFdp.ConsumeBool()) {
+                    AMediaCodec_start(mCodec);
+                }
+                break;
+            }
+            case 10: {  // get the codec name
+                char* name = nullptr;
+                AMediaCodec_getName(mCodec, &name);
+                AMediaCodec_releaseName(mCodec, name);
+                break;
+            }
+            case 11: {  // set callback API for frame render output
+                std::vector<uint8_t> userData = mFdp.ConsumeBytes<uint8_t>(
+                        mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                AMediaCodecOnFrameRendered callback = CodecOnFrameRendered;
+                AMediaCodec_setOnFrameRenderedCallback(mCodec, callback, userData.data());
+                break;
+            }
+            case 12:
+            default: {  // set persistent input surface
+                AMediaCodec_setInputSurface(mCodec, nativeWindow);
+            }
+        }
+    }
+    if (nativeWindow) {
+        ANativeWindow_release(nativeWindow);
+    }
+}
+
+void NdkSyncCodecFuzzer::invokeSyncCodeConfigAPI() {
+    while (mFdp.remaining_bytes() > 0) {
+        bool isEncoder = mFdp.ConsumeBool();
+        mCodec = createCodec(isEncoder, mFdp.ConsumeBool() /* isCodecForClient */);
+        if (mCodec) {
+            invokekSyncCodecAPIs(isEncoder);
+            AMediaCodec_delete(mCodec);
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkSyncCodecFuzzer ndkSyncCodecFuzzer(data, size);
+    ndkSyncCodecFuzzer.invokeSyncCodeConfigAPI();
+    return 0;
+}
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 71bc6d9..76270d3 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -583,7 +583,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param width the width of the image will be filled here if the method call succeeeds.
+ * @param width the width of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -599,7 +599,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param height the height of the image will be filled here if the method call succeeeds.
+ * @param height the height of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -617,7 +617,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param format the format of the image will be filled here if the method call succeeeds.
+ * @param format the format of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -636,7 +636,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param rect the cropped rectangle of the image will be filled here if the method call succeeeds.
+ * @param rect the cropped rectangle of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -662,7 +662,7 @@
  * Available since API level 24.
  *
  * @param image the {@link AImage} of interest.
- * @param timestampNs the timestamp of the image will be filled here if the method call succeeeds.
+ * @param timestampNs the timestamp of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -682,7 +682,7 @@
  *
  * @param image the {@link AImage} of interest.
  * @param numPlanes the number of planes of the image will be filled here if the method call
- *         succeeeds.
+ *         succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -706,7 +706,7 @@
  *
  * @param image the {@link AImage} of interest.
  * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
- * @param pixelStride the pixel stride of the image will be filled here if the method call succeeeds.
+ * @param pixelStride the pixel stride of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -735,7 +735,7 @@
  *
  * @param image the {@link AImage} of interest.
  * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
- * @param rowStride the row stride of the image will be filled here if the method call succeeeds.
+ * @param rowStride the row stride of the image will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -762,8 +762,8 @@
  *
  * @param image the {@link AImage} of interest.
  * @param planeIdx the index of the plane. Must be less than the number of planes of input image.
- * @param data the data pointer of the image will be filled here if the method call succeeeds.
- * @param dataLength the valid length of data will be filled here if the method call succeeeds.
+ * @param data the data pointer of the image will be filled here if the method call succeeds.
+ * @param dataLength the valid length of data will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -826,6 +826,25 @@
  */
 media_status_t AImage_getHardwareBuffer(const AImage* image, /*out*/AHardwareBuffer** buffer) __INTRODUCED_IN(26);
 
+/**
+ * Query the dataspace of the input {@link AImage}.
+ *
+ * Available since API level 34.
+ *
+ * @param image the {@link AImage} of interest.
+ * @param dataSpace the dataspace of the image will be filled here if the method call succeeds.
+ *                  This must be one of the ADATASPACE_* enum value defined in
+ *                  {@link ADataSpace}.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if image or dataSpace is NULL.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
+ *                 image has been deleted.</li></ul>
+ */
+media_status_t AImage_getDataSpace(const AImage* image,
+                                   /*out*/int32_t* dataSpace) __INTRODUCED_IN(34);
+
 __END_DECLS
 
 #endif //_NDK_IMAGE_H
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 4bd7f2a..b6dcaae 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -79,7 +79,7 @@
  *            by the user, one of them has to be released before a new {@link AImage} will become
  *            available for access through {@link AImageReader_acquireLatestImage} or
  *            {@link AImageReader_acquireNextImage}. Must be greater than 0.
- * @param reader The created image reader will be filled here if the method call succeeeds.
+ * @param reader The created image reader will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -133,7 +133,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param width the default width of the reader will be filled here if the method call succeeeds.
+ * @param width the default width of the reader will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -151,7 +151,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param height the default height of the reader will be filled here if the method call succeeeds.
+ * @param height the default height of the reader will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -165,7 +165,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param format the fromat of the reader will be filled here if the method call succeeeds. The
+ * @param format the format of the reader will be filled here if the method call succeeds. The
  *                value will be one of the AIMAGE_FORMAT_* enum value defiend in {@link NdkImage.h}.
  *
  * @return <ul>
@@ -181,7 +181,7 @@
  *
  * @param reader The image reader of interest.
  * @param maxImages the maximum number of concurrently acquired images of the reader will be filled
- *                here if the method call succeeeds.
+ *                here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -212,7 +212,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -257,7 +257,7 @@
  * Available since API level 24.
  *
  * @param reader The image reader of interest.
- * @param image the acquired {@link AImage} will be filled here if the method call succeeeds.
+ * @param image the acquired {@link AImage} will be filled here if the method call succeeds.
  *
  * @return <ul>
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -387,6 +387,44 @@
         /*out*/ AImageReader** reader) __INTRODUCED_IN(26);
 
 /**
+ * AImageReader constructor similar to {@link AImageReader_newWithUsage} that takes
+ * two additional parameters to build the format of the Image. All other parameters
+ * and the return values are identical to those passed to {@link AImageReader_newWithUsage}.
+ *
+ * <p>Instead of passing {@code format} parameter, this constructor accepts
+ * the combination of {@code hardwareBufferFormat} and {@code dataSpace} for the
+ * format of the Image that the reader will produce.</p>
+ *
+ * Available since API level 34.
+ *
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param usage specifies how the consumer will access the AImage.
+ *              See {@link AImageReader_newWithUsage} parameter description for more details.
+ * @param maxImages The maximum number of images the user will want to access simultaneously.
+ *                  See {@link AImageReader_newWithUsage} parameter description for more details.
+ * @param hardwareBufferFormat The hardware buffer format passed by the producer.
+ *                             This must be one of the AHARDWAREBUFFER_FORMAT_* enum values defined
+ *                             in {@link hardware_buffer.h}.
+ * @param dataSpace The dataspace of the Image passed by the producer.
+ *                  This must be one of the ADATASPACE_* enum values defined in
+ *                  {@link ADataSpace}.
+ * @param reader The created image reader will be filled here if the method call succeeds.
+ *
+ * @return <ul>
+ *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL, or one or more of width,
+ *                 height, maxImages, hardwareBufferFormat or dataSpace arguments
+ *                 is not supported.</li>
+ *         <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImageReader_newWithUsage
+ */
+media_status_t AImageReader_newWithDataSpace(int32_t width, int32_t height, uint64_t usage,
+        int32_t maxImages, uint32_t hardwareBufferFormat, int32_t dataSpace,
+        /*out*/ AImageReader** reader) __INTRODUCED_IN(34);
+
+/**
  * Acquire the next {@link AImage} from the image reader's queue asynchronously.
  *
  * <p>AImageReader acquire method similar to {@link AImageReader_acquireNextImage} that takes an
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 4938f76..598beb7 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -63,11 +63,41 @@
 typedef struct AMediaCodecBufferInfo AMediaCodecBufferInfo;
 typedef struct AMediaCodecCryptoInfo AMediaCodecCryptoInfo;
 
+
+/**
+ * Definitions of per-buffer flags for operation with NdkMediaCodec.
+ *
+ * The semantics of these enums match those of the same name
+ * in {@link android.media.MediaCodec}.
+ */
 enum {
+    /**
+     * This indicates that the (encoded) buffer marked as such contains
+     * the data for a key frame.
+     *
+     * Semantics are the same as {@link android.media.MediaCodec#BUFFER_FLAG_KEY_FRAME}
+     */
+    AMEDIACODEC_BUFFER_FLAG_KEY_FRAME = 1, // introduced in API 34
     AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG = 2,
     AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM = 4,
     AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME = 8,
+    /**
+     * This indicates that the buffer contains non-media data for the
+     * muxer to process.
+     *
+     * Semantics are the same as {@link android.media.MediaCodec#BUFFER_FLAG_MUXER_DATA}
+     */
+    AMEDIACODEC_BUFFER_FLAG_MUXER_DATA = 16,  // introduced in API 34
+    /**
+     * This indicates that the buffer is decoded and updates the internal state of the decoder,
+     * but does not produce any output buffer.
+     *
+     * Semantics are the same as {@link android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY}
+     */
+    AMEDIACODEC_BUFFER_FLAG_DECODE_ONLY = 32,  // introduced in API 34
+};
 
+enum {
     AMEDIACODEC_CONFIGURE_FLAG_ENCODE = 1,
     AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED = -3,
     AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED = -2,
diff --git a/media/ndk/include/media/NdkMediaDrm.h b/media/ndk/include/media/NdkMediaDrm.h
index 4eca3d7..8044140 100644
--- a/media/ndk/include/media/NdkMediaDrm.h
+++ b/media/ndk/include/media/NdkMediaDrm.h
@@ -261,8 +261,8 @@
 /**
  * Open a new session with the MediaDrm object.  A session ID is returned.
  *
- * Returns MEDIADRM_NOT_PROVISIONED_ERROR if provisioning is needed.
- * Returns MEDIADRM_RESOURCE_BUSY_ERROR if required resources are in use.
+ * Returns AMEDIA_DRM_NOT_PROVISIONED if provisioning is needed.
+ * Returns AMEDIA_DRM_RESOURCE_BUSY if required resources are in use.
  *
  * Available since API level 21.
  */
@@ -327,7 +327,7 @@
  *   2. keyRequestSize will be set to the size of the request
  *   If this does not return AMEDIA_OK, value of these parameters should not be used.
  *
- * Returns MEDIADRM_NOT_PROVISIONED_ERROR if reprovisioning is needed, due to a
+ * Returns AMEDIA_DRM_NOT_PROVISIONED if reprovisioning is needed, due to a
  * problem with the device certificate.
  *
  * Available since API level 21.
@@ -390,7 +390,7 @@
  *   4. keyRequestType will be set to the key request type. Passing in NULL means
 *       you don't need it to be reported.
  *
- * Returns MEDIADRM_NOT_PROVISIONED_ERROR if reprovisioning is needed, due to a
+ * Returns AMEDIA_DRM_NOT_PROVISIONED if reprovisioning is needed, due to a
  * problem with the device certificate.
  *
  * Available since API level 33.
@@ -457,7 +457,7 @@
  * On entry, numPairs should be set by the caller to the maximum number of pairs
  * that can be returned (the size of the array).  On exit, numPairs will be set
  * to the number of entries written to the array.  If the number of {key, value} pairs
- * to be returned is greater than *numPairs, MEDIADRM_SHORT_BUFFER will be returned
+ * to be returned is greater than *numPairs, AMEDIA_DRM_SHORT_BUFFER will be returned
  * and numPairs will be set to the number of pairs available.
  *
  * Available since API level 21.
@@ -495,7 +495,7 @@
  *   DRM engine plugin.
  * responseSize is the length of the provisioning response in bytes.
  *
- * Returns MEDIADRM_DEVICE_REVOKED_ERROR if the response indicates that the
+ * Returns AMEDIA_DRM_DEVICE_REVOKED if the response indicates that the
  * server rejected the request
  *
  * Available since API level 21.
@@ -522,7 +522,7 @@
  * numSecureStops is set by the caller to the maximum number of secure stops to
  * return.  On exit, *numSecureStops will be set to the number actually returned.
  * If *numSecureStops is too small for the number of secure stops available,
- * MEDIADRM_SHORT_BUFFER will be returned and *numSecureStops will be set to the
+ * AMEDIA_DRM_SHORT_BUFFER will be returned and *numSecureStops will be set to the
  * number required.
  *
  * Available since API level 21.
@@ -657,7 +657,7 @@
  * Generate a signature using the specified macAlgorithm over the message data
  * referenced by message of size messageSize and store the signature in the
  * buffer referenced signature of max size *signatureSize.  If the buffer is not
- * large enough to hold the signature, MEDIADRM_SHORT_BUFFER is returned and
+ * large enough to hold the signature, AMEDIA_DRM_SHORT_BUFFER is returned and
  * *signatureSize is set to the buffer size required.  The key to use is identified
  * by the 16 byte keyId.  The key must have been loaded into the session using
  * provideKeyResponse.
@@ -670,7 +670,7 @@
 
 /*
  * Perform a signature verification using the specified macAlgorithm over the message
- * data referenced by the message parameter of size messageSize. Returns MEDIADRM_OK
+ * data referenced by the message parameter of size messageSize. Returns AMEDIA_OK
  * if the signature matches, otherwise MEDAIDRM_VERIFY_FAILED is returned. The key to
  * use is identified by the 16 byte keyId.  The key must have been loaded into the
  * session using provideKeyResponse.
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 2195657..b2cdf8d 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -135,6 +135,14 @@
 extern const char* AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_AAC_PROFILE __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_AAC_SBR_MODE __INTRODUCED_IN(28);
+/**
+ * A key for applications to opt out of allowing
+ * a Surface to discard undisplayed/unconsumed frames
+ * as means to catch up after falling behind.
+ *
+ * Semantics match those of {@link android.media.MediaFormat#KEY_ALLOW_FRAME_DROP}
+ */
+extern const char* AMEDIAFORMAT_KEY_ALLOW_FRAME_DROP __INTRODUCED_IN(34);
 extern const char* AMEDIAFORMAT_KEY_AUDIO_SESSION_ID __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_BITRATE_MODE __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_BIT_RATE __INTRODUCED_IN(21);
@@ -169,6 +177,13 @@
 extern const char* AMEDIAFORMAT_KEY_LANGUAGE __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_LATENCY __INTRODUCED_IN(28);
 extern const char* AMEDIAFORMAT_KEY_LEVEL __INTRODUCED_IN(28);
+/**
+ * A key describing the maximum number of B frames between I or P frames,
+ * to be used by a video encoder.
+ *
+ * Semantics match those of {@link android.media.MediaFormat#KEY_MAX_B_FRAMES}
+ */
+extern const char* AMEDIAFORMAT_KEY_MAX_B_FRAMES __INTRODUCED_IN(34);
 extern const char* AMEDIAFORMAT_KEY_MAX_HEIGHT __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE __INTRODUCED_IN(21);
 extern const char* AMEDIAFORMAT_KEY_MAX_WIDTH __INTRODUCED_IN(21);
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index d7eccb8..1674ffa 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -48,10 +48,22 @@
 struct AMediaMuxer;
 typedef struct AMediaMuxer AMediaMuxer;
 
+/**
+ * Defines the output format. These constants are used with constructor.
+ *
+ * These enums match the ones used in {@link android.media.MediaMuxer.OutputFormat}
+ */
 typedef enum {
+    /** MPEG4 media file format*/
     AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 = 0,
-    AMEDIAMUXER_OUTPUT_FORMAT_WEBM   = 1,
-    AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP   = 2,
+    /** WEBM media file format*/
+    AMEDIAMUXER_OUTPUT_FORMAT_WEBM = 1,
+    /** 3GPP media file format*/
+    AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP = 2,
+    /** HEIF media file format*/
+    AMEDIAMUXER_OUTPUT_FORMAT_HEIF = 3,  // introduced in API 34
+    /** Ogg media file format*/
+    AMEDIAMUXER_OUTPUT_FORMAT_OGG = 4,  // introduced in API 34
 } OutputFormat;
 
 typedef enum {
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index c8faced..4f045fd 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -13,11 +13,13 @@
     AImageReader_getWindow; # introduced=24
     AImageReader_new; # introduced=24
     AImageReader_newWithUsage; # introduced=26
+    AImageReader_newWithDataSpace; # introduced=UpsideDownCake
     AImageReader_setBufferRemovedListener; # introduced=26
     AImageReader_setImageListener; # introduced=24
     AImage_delete; # introduced=24
     AImage_deleteAsync; # introduced=26
     AImage_getCropRect; # introduced=24
+    AImage_getDataSpace; # introduced=UpsideDownCake
     AImage_getFormat; # introduced=24
     AImage_getHardwareBuffer; # introduced=26
     AImage_getHeight; # introduced=24
@@ -45,6 +47,7 @@
     AMEDIAFORMAT_KEY_ALBUM; # var introduced=29
     AMEDIAFORMAT_KEY_ALBUMART; # var introduced=29
     AMEDIAFORMAT_KEY_ALBUMARTIST; # var introduced=29
+    AMEDIAFORMAT_KEY_ALLOW_FRAME_DROP; # var introduced=34
     AMEDIAFORMAT_KEY_ARTIST; # var introduced=29
     AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO; # var introduced=29
     AMEDIAFORMAT_KEY_AUDIO_SESSION_ID; # var introduced=28
@@ -117,6 +120,7 @@
     AMEDIAFORMAT_KEY_LOW_LATENCY; # var introduced=30
     AMEDIAFORMAT_KEY_LYRICIST; # var introduced=29
     AMEDIAFORMAT_KEY_MANUFACTURER; # var introduced=29
+    AMEDIAFORMAT_KEY_MAX_B_FRAMES; # var introduced=34
     AMEDIAFORMAT_KEY_MAX_BIT_RATE; # var introduced=29
     AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER; # var introduced=29
     AMEDIAFORMAT_KEY_MAX_HEIGHT; # var introduced=21
@@ -181,11 +185,11 @@
     AMediaCodecCryptoInfo_setPattern; # introduced=24
     AMediaCodec_configure;
     AMediaCodec_createCodecByName;
-    AMediaCodec_createCodecByNameForClient; # apex # introduced=31
+    AMediaCodec_createCodecByNameForClient; # systemapi # introduced=31
     AMediaCodec_createDecoderByType;
-    AMediaCodec_createDecoderByTypeForClient; # apex # introduced=31
+    AMediaCodec_createDecoderByTypeForClient; # systemapi # introduced=31
     AMediaCodec_createEncoderByType;
-    AMediaCodec_createEncoderByTypeForClient; # apex # introduced=31
+    AMediaCodec_createEncoderByTypeForClient; # systemapi # introduced=31
     AMediaCodec_delete;
     AMediaCodec_dequeueInputBuffer;
     AMediaCodec_dequeueOutputBuffer;
diff --git a/media/ndk/tests/NdkMediaFormat_test.cpp b/media/ndk/tests/NdkMediaFormat_test.cpp
index 668d0a4..18690b8 100644
--- a/media/ndk/tests/NdkMediaFormat_test.cpp
+++ b/media/ndk/tests/NdkMediaFormat_test.cpp
@@ -51,6 +51,13 @@
    EXPECT_FALSE(AMediaFormat_getInt64(fmt1, "five", &i64));
    EXPECT_EQ(i32, 5);
 
+   // verify detecting some bad parameters.
+   AMediaFormat_setInt32(nullptr, "whatever", 6);
+   AMediaFormat_setInt32(fmt1, nullptr, 6);
+
+   EXPECT_FALSE(AMediaFormat_getInt32(nullptr, "whatever", &i32));
+   EXPECT_FALSE(AMediaFormat_getInt32(fmt1, nullptr, &i32));
+
    AMediaFormat_delete(fmt1);
 }
 
@@ -67,6 +74,13 @@
    EXPECT_FALSE(AMediaFormat_getInt64(fmt1, "five", &i64));
    EXPECT_EQ(i64, -1);
 
+   // verify detecting some bad parameters.
+   AMediaFormat_setInt64(nullptr, "whatever", 6);
+   AMediaFormat_setInt64(fmt1, nullptr, 6);
+
+   EXPECT_FALSE(AMediaFormat_getInt64(nullptr, "whatever", &i64));
+   EXPECT_FALSE(AMediaFormat_getInt64(fmt1, nullptr, &i64));
+
    AMediaFormat_delete(fmt1);
 }
 
@@ -80,6 +94,13 @@
    EXPECT_TRUE(AMediaFormat_getSize(fmt1, "medium", &size));
    EXPECT_EQ(size, 10);
 
+   // verify detecting some bad parameters.
+   AMediaFormat_setSize(nullptr, "whatever", 6);
+   AMediaFormat_setSize(fmt1, nullptr, 6);
+
+   EXPECT_FALSE(AMediaFormat_getSize(nullptr, "whatever", &size));
+   EXPECT_FALSE(AMediaFormat_getSize(fmt1, nullptr, &size));
+
    AMediaFormat_delete(fmt1);
 }
 
@@ -90,6 +111,14 @@
    AMediaFormat_setFloat(fmt1, "ship", 0.5);
    EXPECT_TRUE(AMediaFormat_getFloat(fmt1, "boat", &f));
    EXPECT_EQ(f, 1.5);
+
+   // verify detecting some bad parameters.
+   AMediaFormat_setFloat(nullptr, "whatever", 1.5);
+   AMediaFormat_setFloat(fmt1, nullptr, 1.5);
+
+   EXPECT_FALSE(AMediaFormat_getFloat(nullptr, "whatever", &f));
+   EXPECT_FALSE(AMediaFormat_getFloat(fmt1, nullptr, &f));
+
    AMediaFormat_delete(fmt1);
 }
 
@@ -100,6 +129,14 @@
    AMediaFormat_setDouble(fmt1, "dip", 0.5);
    EXPECT_TRUE(AMediaFormat_getDouble(fmt1, "trouble", &d));
    EXPECT_EQ(d, 100.5);
+
+   // verify detecting some bad parameters.
+   AMediaFormat_setDouble(nullptr, "whatever", 1.5);
+   AMediaFormat_setDouble(fmt1, nullptr, 1.5);
+
+   EXPECT_FALSE(AMediaFormat_getDouble(nullptr, "whatever", &d));
+   EXPECT_FALSE(AMediaFormat_getDouble(fmt1, nullptr, &d));
+
    AMediaFormat_delete(fmt1);
 }
 
@@ -111,8 +148,16 @@
    AMediaFormat_setString(fmt1, "stringtheory", content);
    EXPECT_TRUE(AMediaFormat_getString(fmt1, "stringtheory", &out));
    EXPECT_NE(out, nullptr);
+   EXPECT_NE(out, content);     // should not be the original
    EXPECT_EQ(strcmp(out,content), 0);
 
+   // verify detecting some bad parameters.
+   AMediaFormat_setString(nullptr, "whatever", content);
+   AMediaFormat_setString(fmt1, nullptr, content);
+
+   EXPECT_FALSE(AMediaFormat_getString(nullptr, "whatever", &out));
+   EXPECT_FALSE(AMediaFormat_getString(fmt1, nullptr, &out));
+
    AMediaFormat_delete(fmt1);
 }
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 4b44dcf..d41a7f9 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -69,6 +69,6 @@
 java_defaults {
     name: "MediaBenchmark-defaults",
 
-    min_sdk_version: "28",
+    min_sdk_version: "29",
     target_sdk_version: "30",
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml b/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
index eea9914..772a29b 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
@@ -20,8 +20,6 @@
     package="com.android.media.benchmark">
     <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
-    <uses-permission android:name="android.permission.READ_INTERNAL_STORAGE" />
-    <uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />
 
     <application
         tools:ignore="AllowBackup,GoogleAppIndexingWarning,MissingApplicationIcon"
@@ -31,4 +29,4 @@
     <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
             android:targetPackage="com.android.media.benchmark"
             android:label="Benchmark Media Test"/>
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b222d47..a2af701 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -30,7 +30,7 @@
     compileSdkVersion 30
     defaultConfig {
         applicationId "com.android.media.benchmark"
-        minSdkVersion 28
+        minSdkVersion 29
         targetSdkVersion 30
         versionCode 1
         versionName "1.0"
@@ -73,4 +73,4 @@
     testImplementation 'junit:junit:4.13.2'
     androidTestImplementation 'androidx.test:runner:1.3.0'
     androidTestImplementation 'androidx.test.ext:junit:1.1.2'
-}
\ No newline at end of file
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
index 21ba957..7fd2752 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
@@ -60,14 +60,12 @@
     private static final String mStatsFile =
             mContext.getExternalFilesDir(null) + "/Muxer." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "MuxerTest";
-    private static final Map<String, Integer> mMapFormat = new Hashtable<String, Integer>() {
-        {
-            put("mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
-            put("webm", MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM);
-            put("3gpp", MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP);
-            put("ogg", MediaMuxer.OutputFormat.MUXER_OUTPUT_OGG);
-        }
-    };
+    private static final Map<String, Integer> mMapFormat = Map.of(
+            "mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4,
+            "webm", MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM,
+            "3gpp", MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP,
+            "ogg", MediaMuxer.OutputFormat.MUXER_OUTPUT_OGG);
+
     private String mInputFileName;
     private String mFormat;
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
index 043bc9e..0b8e7b2 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeDecoder.cpp
@@ -19,6 +19,7 @@
 
 #include <jni.h>
 #include <fstream>
+#include <memory>
 #include <stdio.h>
 #include <string.h>
 #include <sys/stat.h>
@@ -42,7 +43,7 @@
         return -1;
     }
 
-    Decoder *decoder = new Decoder();
+    std::unique_ptr<Decoder> decoder(new (std::nothrow) Decoder());
     Extractor *extractor = decoder->getExtractor();
     if (!extractor) {
         ALOGE("Extractor creation failed");
@@ -79,7 +80,6 @@
         vector<AMediaCodecBufferInfo> frameInfo;
         AMediaCodecBufferInfo info;
         uint32_t inputBufferOffset = 0;
-
         // Get frame data
         while (1) {
             status = extractor->getFrameSample(info);
@@ -110,7 +110,7 @@
         const char *statsFile = env->GetStringUTFChars(jStatsFile, nullptr);
         string sInputReference = string(inputReference);
         decoder->dumpStatistics(sInputReference, sCodecName, (asyncMode ? "async" : "sync"),
-                                statsFile);
+                                (statsFile == nullptr ? "" : statsFile));
         env->ReleaseStringUTFChars(jCodecName, codecName);
         env->ReleaseStringUTFChars(jStatsFile, statsFile);
         env->ReleaseStringUTFChars(jFileName, inputReference);
@@ -125,6 +125,5 @@
         inputFp = nullptr;
     }
     extractor->deInitExtractor();
-    delete decoder;
     return 0;
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeMuxer.cpp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeMuxer.cpp
index a5ef5b8..a297526 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeMuxer.cpp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/NativeMuxer.cpp
@@ -19,6 +19,7 @@
 
 #include <jni.h>
 #include <fstream>
+#include <memory>
 #include <string>
 #include <sys/stat.h>
 
@@ -47,7 +48,7 @@
         return MUXER_OUTPUT_FORMAT_INVALID;
     }
 
-    Muxer *muxerObj = new Muxer();
+    std::unique_ptr<Muxer> muxerObj(new (std::nothrow) Muxer());
     Extractor *extractor = muxerObj->getExtractor();
     if (!extractor) {
         ALOGE("Extractor creation failed");
@@ -159,7 +160,6 @@
     }
     env->ReleaseStringUTFChars(jFormat, fmt);
     extractor->deInitExtractor();
-    delete muxerObj;
 
     return 0;
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
index 08035c9..1e10b37 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
@@ -2,13 +2,12 @@
 
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecList;
+import android.media.MediaFormat;
 import android.os.Build;
-
 import java.util.ArrayList;
 
 public class CodecUtils {
     private CodecUtils() {}
-
     /**
      * Queries the MediaCodecList and returns codec names of supported codecs.
      *
@@ -36,4 +35,46 @@
         }
         return supportedCodecs;
     }
+    /**
+     * Returns a decoder that supports the given MediaFormat along with the "features".
+     *
+     * @param format  MediaFormat that the codec should support
+     * @param isSoftware Specifies if this is a software / hardware decoder
+     * @param isEncoder Specifies if the request is for encoders or not.
+     * @param features is the feature that should be supported.
+     * @return name of the codec.
+     */
+    public static String getMediaCodec(MediaFormat format, boolean isSoftware,
+                                  String[] features, boolean isEncoder) {
+        MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
+        MediaCodecInfo[] codecInfos = mcl.getCodecInfos();
+        String mime = format.getString(MediaFormat.KEY_MIME);
+        for (MediaCodecInfo codecInfo : codecInfos) {
+            if (codecInfo.isEncoder() != isEncoder) continue;
+            if (isSoftware != codecInfo.isSoftwareOnly()) continue;
+            String[] types = codecInfo.getSupportedTypes();
+            for (String type : types) {
+                if (type.equalsIgnoreCase(mime)) {
+                    boolean isOk = true;
+                    MediaCodecInfo.CodecCapabilities codecCapabilities =
+                        codecInfo.getCapabilitiesForType(type);
+                    if (!codecCapabilities.isFormatSupported(format)) {
+                        isOk = false;
+                    }
+                    if (features != null) {
+                        for (String feature : features) {
+                            if (!codecCapabilities.isFeatureSupported(feature)) {
+                                isOk = false;
+                                break;
+                            }
+                        }
+                    }
+                    if (isOk) {
+                        return codecInfo.getName();
+                    }
+                }
+            }
+        }
+        return null;
+    }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index 66fee33..9e0d5e4 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -16,6 +16,8 @@
 
 package com.android.media.benchmark.library;
 
+import android.view.Surface;
+
 import android.media.MediaCodec;
 import android.media.MediaCodec.BufferInfo;
 import android.media.MediaFormat;
@@ -28,13 +30,17 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 
-public class Decoder {
+import com.android.media.benchmark.library.IBufferXfer;
+
+public class Decoder implements IBufferXfer.IReceiveBuffer {
     private static final String TAG = "Decoder";
     private static final boolean DEBUG = false;
     private static final int kQueueDequeueTimeoutUs = 1000;
 
     private final Object mLock = new Object();
     private MediaCodec mCodec;
+    private Surface mSurface = null;
+    private boolean mRender = false;
     private ArrayList<BufferInfo> mInputBufferInfo;
     private Stats mStats;
 
@@ -42,14 +48,37 @@
     private boolean mSawOutputEOS;
     private boolean mSignalledError;
 
+    private int mNumInFramesProvided;
+    private int mNumInFramesRequired;
+
     private int mNumOutputFrame;
     private int mIndex;
 
     private ArrayList<ByteBuffer> mInputBuffer;
     private FileOutputStream mOutputStream;
+    private FrameReleaseQueue mFrameReleaseQueue = null;
+    private IBufferXfer.ISendBuffer mIBufferSend = null;
 
+    /* success for decoder */
+    public static final int DECODE_SUCCESS = 0;
+    /* some error happened during decoding */
+    public static final int DECODE_DECODER_ERROR = -1;
+    /* error while creating a decoder */
+    public static final int DECODE_CREATE_ERROR = -2;
     public Decoder() { mStats = new Stats(); }
-
+    public Stats getStats() { return mStats; };
+    @Override
+    public boolean receiveBuffer(IBufferXfer.BufferXferInfo info) {
+        MediaCodec codec = (MediaCodec)info.obj;
+        codec.releaseOutputBuffer(info.idx, mRender);
+        return true;
+    }
+    @Override
+    public boolean connect(IBufferXfer.ISendBuffer receiver) {
+        Log.d(TAG,"Setting interface of the sender");
+        mIBufferSend = receiver;
+        return true;
+    }
     /**
      * Setup of decoder
      *
@@ -59,6 +88,23 @@
         mSignalledError = false;
         mOutputStream = outputStream;
     }
+    public void setupDecoder(Surface surface, boolean render,
+            boolean useFrameReleaseQueue, int frameRate) {
+        setupDecoder(surface, render, useFrameReleaseQueue, frameRate, -1);
+    }
+    public void setupDecoder(Surface surface, boolean render,
+            boolean useFrameReleaseQueue, int frameRate, int numInFramesRequired) {
+        mSignalledError = false;
+        mOutputStream = null;
+        mSurface = surface;
+        mRender = render;
+        if (useFrameReleaseQueue) {
+            Log.i(TAG, "Using FrameReleaseQueue with frameRate " + frameRate);
+            mFrameReleaseQueue = new FrameReleaseQueue(mRender, frameRate);
+        }
+        mNumInFramesRequired = numInFramesRequired;
+        Log.i(TAG, "Decoding " + mNumInFramesRequired + " frames");
+    }
 
     private MediaCodec createCodec(String codecName, MediaFormat format) throws IOException {
         String mime = format.getString(MediaFormat.KEY_MIME);
@@ -95,7 +141,8 @@
      * @param asyncMode       Will run on async implementation if true
      * @param format          For creating the decoder if codec name is empty and configuring it
      * @param codecName       Will create the decoder with codecName
-     * @return 0 if decode was successful , -1 for fail, -2 for decoder not created
+     * @return DECODE_SUCCESS if decode was successful, DECODE_DECODER_ERROR for fail,
+     *         DECODE_CREATE_ERROR for decoder not created
      * @throws IOException if the codec cannot be created.
      */
     public int decode(@NonNull ArrayList<ByteBuffer> inputBuffer,
@@ -109,10 +156,17 @@
         mSawOutputEOS = false;
         mNumOutputFrame = 0;
         mIndex = 0;
+        mNumInFramesProvided = 0;
+        if (mNumInFramesRequired < 0) {
+            mNumInFramesRequired = mInputBuffer.size();
+        }
         long sTime = mStats.getCurTime();
         mCodec = createCodec(codecName, format);
         if (mCodec == null) {
-            return -2;
+            return DECODE_CREATE_ERROR;
+        }
+        if (mFrameReleaseQueue != null) {
+            mFrameReleaseQueue.setMediaCodec(mCodec);
         }
         if (asyncMode) {
             mCodec.setCallback(new MediaCodec.Callback() {
@@ -158,7 +212,7 @@
         if (DEBUG) {
             Log.d(TAG, "Media Format : " + format.toString());
         }
-        mCodec.configure(format, null, null, isEncoder);
+        mCodec.configure(format, mSurface, null, isEncoder);
         mCodec.start();
         Log.i(TAG, "Codec started ");
         long eTime = mStats.getCurTime();
@@ -168,7 +222,7 @@
             try {
                 synchronized (mLock) { mLock.wait(); }
                 if (mSignalledError) {
-                    return -1;
+                    return DECODE_DECODER_ERROR;
                 }
             } catch (InterruptedException e) {
                 e.printStackTrace();
@@ -201,7 +255,7 @@
                         Log.e(TAG,
                                 "MediaCodec.dequeueOutputBuffer"
                                         + " returned invalid index " + outputBufferId);
-                        return -1;
+                        return DECODE_DECODER_ERROR;
                     }
                 } else {
                     mStats.addOutputTime();
@@ -212,9 +266,13 @@
                 }
             }
         }
+        if (mFrameReleaseQueue != null) {
+            Log.i(TAG, "Ending FrameReleaseQueue");
+            mFrameReleaseQueue.stopFrameRelease();
+        }
         mInputBuffer.clear();
         mInputBufferInfo.clear();
-        return 0;
+        return DECODE_SUCCESS;
     }
 
     /**
@@ -260,12 +318,22 @@
     }
 
     private void onInputAvailable(int inputBufferId, MediaCodec mediaCodec) {
-        if ((inputBufferId >= 0) && !mSawInputEOS) {
+        if (inputBufferId >= 0) {
             ByteBuffer inputCodecBuffer = mediaCodec.getInputBuffer(inputBufferId);
-            BufferInfo bufInfo = mInputBufferInfo.get(mIndex);
-            inputCodecBuffer.put(mInputBuffer.get(mIndex).array());
-            mIndex++;
+            BufferInfo bufInfo;
+            if (mNumInFramesProvided >= mNumInFramesRequired) {
+                Log.i(TAG, "Input frame limit reached");
+                mIndex = mInputBufferInfo.size() - 1;
+                bufInfo = mInputBufferInfo.get(mIndex);
+                if ((bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
+                    Log.e(TAG, "Error in EOS flag for Decoder");
+                }
+            }
+            bufInfo = mInputBufferInfo.get(mIndex);
             mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+            inputCodecBuffer.put(mInputBuffer.get(mIndex).array());
+            mNumInFramesProvided++;
+            mIndex = mNumInFramesProvided % (mInputBufferInfo.size() - 1);
             if (mSawInputEOS) {
                 Log.i(TAG, "Saw input EOS");
             }
@@ -290,7 +358,9 @@
         if (DEBUG) {
             Log.d(TAG,
                     "In OutputBufferAvailable ,"
-                            + " output frame number = " + mNumOutputFrame);
+                            + " output frame number = " + mNumOutputFrame
+                            + " timestamp = " + outputBufferInfo.presentationTimeUs
+                            + " size = " + outputBufferInfo.size);
         }
         if (mOutputStream != null) {
             try {
@@ -303,7 +373,21 @@
                 Log.d(TAG, "Error Dumping File: Exception " + e.toString());
             }
         }
-        mediaCodec.releaseOutputBuffer(outputBufferId, false);
+        if (mFrameReleaseQueue != null) {
+            mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
+                                            outputBufferInfo.presentationTimeUs);
+        } else if (mIBufferSend != null) {
+            IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+            info.buf = mediaCodec.getOutputBuffer(outputBufferId);
+            info.idx = outputBufferId;
+            info.obj = mediaCodec;
+            info.bytesRead = outputBufferInfo.size;
+            info.presentationTimeUs = outputBufferInfo.presentationTimeUs;
+            info.flag = outputBufferInfo.flags;
+            mIBufferSend.sendBuffer(this, info);
+        } else {
+            mediaCodec.releaseOutputBuffer(outputBufferId, mRender);
+        }
         mSawOutputEOS = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
         if (mSawOutputEOS) {
             Log.i(TAG, "Saw output EOS");
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
index 754cd8e..63d17ee 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
@@ -19,6 +19,7 @@
 import android.media.MediaCodec;
 import android.media.MediaCodec.CodecException;
 import android.media.MediaFormat;
+import android.view.Surface;
 import android.util.Log;
 
 import androidx.annotation.NonNull;
@@ -28,34 +29,43 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
-public class Encoder {
+public class Encoder implements IBufferXfer.IReceiveBuffer {
     // Change in AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE should also be taken to
     // kDefaultAudioEncodeFrameSize present in BenchmarkCommon.h
     private static final int AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE = 4096;
     private static final String TAG = "Encoder";
     private static final boolean DEBUG = false;
     private static final int kQueueDequeueTimeoutUs = 1000;
-
     private final Object mLock = new Object();
-    private MediaCodec mCodec;
+    private MediaCodec mCodec = null;
     private String mMime;
     private Stats mStats;
 
     private int mOffset;
     private int mFrameSize;
     private int mNumInputFrame;
-    private int mNumFrames;
+    private int mNumFrames = 0;
     private int mFrameRate;
     private int mSampleRate;
     private long mInputBufferSize;
 
+    private int mMinOutputBuffers = 0;
+    private int mNumOutputBuffers = 0;
+    private boolean mUseSurface = false;
+
     private boolean mSawInputEOS;
     private boolean mSawOutputEOS;
     private boolean mSignalledError;
 
-    private FileInputStream mInputStream;
-    private FileOutputStream mOutputStream;
-
+    private FileInputStream mInputStream = null;
+    private FileOutputStream mOutputStream = null;
+    private IBufferXfer.ISendBuffer mIBufferSend = null;
+    /* success for encoder */
+    public static final int ENCODE_SUCCESS = 0;
+    /* some error happened during encoding */
+    public static final int ENCODE_ENCODER_ERROR = -1;
+    /* error while creating an encoder */
+    public static final int ENCODE_CREATE_ERROR = -2;
     public Encoder() {
         mStats = new Stats();
         mNumInputFrame = 0;
@@ -63,6 +73,25 @@
         mSawOutputEOS = false;
         mSignalledError = false;
     }
+    @Override
+    public boolean receiveBuffer(IBufferXfer.BufferXferInfo info) {
+        if (DEBUG) {
+            Log.d(TAG,"Encoder Getting buffers from external: "
+                + " Bytes Read: " + info.bytesRead
+                + " PresentationUs " + info.presentationTimeUs
+                + " flags: " + info.flag);
+        }
+        MediaCodec codec = (MediaCodec)info.obj;
+        codec.queueInputBuffer(info.idx, 0, info.bytesRead,
+            info.presentationTimeUs, info.flag);
+        return true;
+    }
+    @Override
+    public boolean connect(IBufferXfer.ISendBuffer receiver) {
+        mIBufferSend = receiver;
+        return true;
+    }
+    public Stats getStats() { return mStats; };
 
     /**
      * Setup of encoder
@@ -75,6 +104,17 @@
         this.mInputStream = fileInputStream;
         this.mOutputStream = encoderOutputStream;
     }
+    /**
+     * Setup of encoder
+     *
+     * @param useSurface, indicates that application is using surface for input
+     * @param numOutputBuffers indicate the minimum buffers to signal Output
+     * end of stream
+     */
+    public void setupEncoder(boolean useSurface, int numOutputBuffers) {
+        this.mUseSurface = useSurface;
+        this.mMinOutputBuffers = numOutputBuffers;
+    }
 
     private MediaCodec createCodec(String codecName, String mime) throws IOException {
         try {
@@ -100,7 +140,52 @@
             return null;
         }
     }
+    /**
+     * Creates and configures the encoder with the given name, format and mime.
+     * provided a valid list of parameters are passed as inputs. This is needed
+     * to first configure the codec and then may be get surface etc and then
+     * use for encode.
+     *
+     * @param codecName    Will create the encoder with codecName
+     * @param encodeFormat Format of the output data
+     * @param mime         For creating encode format
+     * @return ENCODE_SUCCESS if encode was successful,
+     *         ENCODE_CREATE_ERROR for encoder not created
+     * @throws IOException If the codec cannot be created.
+     */
 
+    public int createAndConfigure(String codecName, MediaFormat encodeFormat,
+                                  String mime) throws IOException {
+        if (mCodec == null) {
+            mMime = mime;
+            mCodec = createCodec(codecName, mime);
+            if (mCodec == null) {
+                return ENCODE_CREATE_ERROR;
+            }
+            /*Configure Codec*/
+            try {
+                mCodec.configure(encodeFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+            } catch(IllegalArgumentException
+                  | IllegalStateException
+                  | MediaCodec.CryptoException e) {
+                Log.e(TAG, "Failed to configure " + mCodec.getName() + " encoder.");
+                e.printStackTrace();
+                return ENCODE_CREATE_ERROR;
+            }
+        }
+        return ENCODE_SUCCESS;
+    }
+    /**
+     * Requests the surface to use as input to the encoder
+     * @return a valid surface or null if not called after configure.
+     */
+    public Surface getInputSurface() {
+        Surface inputSurface = null;
+        if (mCodec != null) {
+            inputSurface = mCodec.createInputSurface();
+        }
+        return inputSurface;
+    }
     /**
      * Encodes the given raw input file and measures the performance of encode operation,
      * provided a valid list of parameters are passed as inputs.
@@ -110,43 +195,39 @@
      * @param encodeFormat Format of the output data
      * @param frameSize    Size of the frame
      * @param asyncMode    Will run on async implementation if true
-     * @return 0 if encode was successful , -1 for fail, -2 for encoder not created
+     * @return ENCODE_SUCCESS if encode was successful ,ENCODE_ENCODER_ERROR for fail,
+     *         ENCODE_CREATE_ERROR for encoder not created
      * @throws IOException If the codec cannot be created.
      */
     public int encode(String codecName, MediaFormat encodeFormat, String mime, int frameRate,
                       int sampleRate, int frameSize, boolean asyncMode) throws IOException {
-        mInputBufferSize = mInputStream.getChannel().size();
-        mMime = mime;
+        mInputBufferSize = (mInputStream != null) ? mInputStream.getChannel().size() : 0;
         mOffset = 0;
         mFrameRate = frameRate;
         mSampleRate = sampleRate;
         long sTime = mStats.getCurTime();
-        mCodec = createCodec(codecName, mime);
         if (mCodec == null) {
-            return -2;
-        }
-        /*Configure Codec*/
-        try {
-            mCodec.configure(encodeFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
-        } catch (IllegalArgumentException | IllegalStateException | MediaCodec.CryptoException e) {
-            Log.e(TAG, "Failed to configure " + mCodec.getName() + " encoder.");
-            e.printStackTrace();
-            return -2;
-        }
-        if (mMime.startsWith("video/")) {
-            mFrameSize = frameSize;
-        } else {
-            int maxInputSize = AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE;
-            MediaFormat format = mCodec.getInputFormat();
-            if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
-                maxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
-            }
-            mFrameSize = frameSize;
-            if (mFrameSize > maxInputSize && maxInputSize > 0) {
-                mFrameSize = maxInputSize;
+            int status = createAndConfigure(codecName, encodeFormat, mime);
+            if(status != ENCODE_SUCCESS) {
+              return status;
             }
         }
-        mNumFrames = (int) ((mInputBufferSize + mFrameSize - 1) / mFrameSize);
+        if (!mUseSurface) {
+            if (mMime.startsWith("video/")) {
+                mFrameSize = frameSize;
+            } else {
+                int maxInputSize = AUDIO_ENCODE_DEFAULT_MAX_INPUT_SIZE;
+                MediaFormat format = mCodec.getInputFormat();
+                if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
+                    maxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
+                }
+                mFrameSize = frameSize;
+                if (mFrameSize > maxInputSize && maxInputSize > 0) {
+                    mFrameSize = maxInputSize;
+                }
+            }
+            mNumFrames = (int) ((mInputBufferSize + mFrameSize - 1) / mFrameSize);
+        }
         if (asyncMode) {
             mCodec.setCallback(new MediaCodec.Callback() {
                 @Override
@@ -196,7 +277,7 @@
             try {
                 synchronized (mLock) { mLock.wait(); }
                 if (mSignalledError) {
-                    return -1;
+                    return ENCODE_ENCODER_ERROR;
                 }
             } catch (InterruptedException e) {
                 e.printStackTrace();
@@ -204,12 +285,12 @@
         } else {
             while (!mSawOutputEOS && !mSignalledError) {
                 /* Queue input data */
-                if (!mSawInputEOS) {
+                if (!mSawInputEOS && !mUseSurface) {
                     int inputBufferId = mCodec.dequeueInputBuffer(kQueueDequeueTimeoutUs);
                     if (inputBufferId < 0 && inputBufferId != MediaCodec.INFO_TRY_AGAIN_LATER) {
                         Log.e(TAG, "MediaCodec.dequeueInputBuffer " + "returned invalid index : " +
                                 inputBufferId);
-                        return -1;
+                        return ENCODE_ENCODER_ERROR;
                     }
                     mStats.addInputTime();
                     onInputAvailable(mCodec, inputBufferId);
@@ -225,7 +306,7 @@
                     } else if (outputBufferId != MediaCodec.INFO_TRY_AGAIN_LATER) {
                         Log.e(TAG, "MediaCodec.dequeueOutputBuffer" + " returned invalid index " +
                                 outputBufferId);
-                        return -1;
+                        return ENCODE_ENCODER_ERROR;
                     }
                 } else {
                     mStats.addOutputTime();
@@ -236,7 +317,7 @@
                 }
             }
         }
-        return 0;
+        return ENCODE_SUCCESS;
     }
 
     private void onOutputAvailable(MediaCodec mediaCodec, int outputBufferId,
@@ -260,13 +341,25 @@
                 return;
             }
         }
+        mNumOutputBuffers++;
+        if (DEBUG) {
+            Log.d(TAG,
+                "In OutputBufferAvailable ,"
+                + " timestamp = " + outputBufferInfo.presentationTimeUs
+                + " size = " + outputBufferInfo.size
+                + " flags = " + outputBufferInfo.flags);
+        }
+
         mStats.addFrameSize(outputBuffer.remaining());
         mediaCodec.releaseOutputBuffer(outputBufferId, false);
         mSawOutputEOS = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+        if (mUseSurface && !mSawOutputEOS) {
+            mSawOutputEOS = (mNumOutputBuffers >= mMinOutputBuffers) ? true : false;
+        }
     }
 
     private void onInputAvailable(MediaCodec mediaCodec, int inputBufferId) throws IOException {
-        if (mSawInputEOS || inputBufferId < 0) {
+        if (mSawInputEOS || inputBufferId < 0 || this.mUseSurface) {
             if (mSawInputEOS) {
                 Log.i(TAG, "Saw input EOS");
             }
@@ -282,6 +375,14 @@
             mSignalledError = true;
             return;
         }
+        if (mIBufferSend != null) {
+            IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+            info.buf = inputBuffer;
+            info.idx = inputBufferId;
+            info.obj = mediaCodec;
+            mIBufferSend.sendBuffer(this, info);
+            return;
+        }
         int bufSize = inputBuffer.capacity();
         int bytesToRead = mFrameSize;
         if (mInputBufferSize - mOffset < mFrameSize) {
@@ -356,9 +457,11 @@
         mOffset = 0;
         mInputBufferSize = 0;
         mNumInputFrame = 0;
+        mMinOutputBuffers = 0;
         mSawInputEOS = false;
         mSawOutputEOS = false;
         mSignalledError = false;
+        mUseSurface = false;
         mStats.reset();
     }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
new file mode 100644
index 0000000..84554d3
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+
+import android.media.MediaCodec;
+import android.util.Log;
+import androidx.annotation.NonNull;
+import java.nio.ByteBuffer;
+import java.util.concurrent.LinkedBlockingQueue;
+
+public class FrameReleaseQueue {
+    private static final String TAG = "FrameReleaseQueue";
+
+    private MediaCodec mCodec;
+    private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
+    private ReleaseThread mReleaseThread;
+    private boolean doFrameRelease = false;
+    private boolean mRender = false;
+    private int mWaitTime = 40; // milliseconds per frame
+    private int mWaitTimeCorrection = 0;
+    private int mCorrectionLoopCount;
+    private int firstReleaseTime = -1;
+    private int THRESHOLD_TIME = 5;
+
+    private static class FrameInfo {
+        private int number;
+        private int bufferId;
+        private int displayTime;
+        public FrameInfo(int frameNumber, int frameBufferId, int frameDisplayTime) {
+            this.number = frameNumber;
+            this.bufferId = frameBufferId;
+            this.displayTime = frameDisplayTime;
+        }
+    }
+
+    private class ReleaseThread extends Thread {
+        public void run() {
+            int nextReleaseTime = 0;
+            int loopCount = 0;
+            while (doFrameRelease || mFrameInfoQueue.size() > 0) {
+                FrameInfo curFrameInfo = mFrameInfoQueue.peek();
+                if (curFrameInfo == null) {
+                    nextReleaseTime += mWaitTime;
+                } else {
+                    if (curFrameInfo.displayTime == 0) {
+                        // first frame of loop
+                        firstReleaseTime = getCurSysTime();
+                        nextReleaseTime = firstReleaseTime + mWaitTime;
+                        popAndRelease(curFrameInfo, true);
+                    } else if (!doFrameRelease && mFrameInfoQueue.size() == 1) {
+                        // EOS
+                        Log.i(TAG, "EOS");
+                        popAndRelease(curFrameInfo, false);
+                    } else {
+                        nextReleaseTime += mWaitTime;
+                        int curSysTime = getCurSysTime();
+                        int curMediaTime = curSysTime - firstReleaseTime;
+                        while (curFrameInfo != null && curFrameInfo.displayTime > 0 &&
+                                curFrameInfo.displayTime <= curMediaTime) {
+                            if (!((curMediaTime - curFrameInfo.displayTime) < THRESHOLD_TIME)) {
+                                Log.d(TAG, "Dropping expired frame " + curFrameInfo.number +
+                                    " display time " + curFrameInfo.displayTime +
+                                    " current time " + curMediaTime);
+                                popAndRelease(curFrameInfo, false);
+                            } else {
+                                popAndRelease(curFrameInfo, true);
+                            }
+                            curFrameInfo = mFrameInfoQueue.peek();
+                        }
+                        if (curFrameInfo != null && curFrameInfo.displayTime > curMediaTime) {
+                            if ((curFrameInfo.displayTime - curMediaTime) < THRESHOLD_TIME) {
+                                popAndRelease(curFrameInfo, true);
+                            }
+                        }
+                    }
+                }
+                int sleepTime = nextReleaseTime - getCurSysTime();
+                if (sleepTime > 0) {
+                    try {
+                        mReleaseThread.sleep(sleepTime);
+                    } catch (InterruptedException e) {
+                        Log.e(TAG, "Threw InterruptedException on sleep");
+                    }
+                } else {
+                    Log.d(TAG, "Thread sleep time less than 1");
+                }
+                if (loopCount % mCorrectionLoopCount == 0) {
+                    nextReleaseTime += mWaitTimeCorrection;
+                }
+                loopCount += 1;
+            }
+        }
+    }
+
+    public FrameReleaseQueue(boolean render, int frameRate) {
+        this.mFrameInfoQueue = new LinkedBlockingQueue();
+        this.mReleaseThread = new ReleaseThread();
+        this.doFrameRelease = true;
+        this.mRender = render;
+        this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
+        int waitTimeRemainder = 1000 % frameRate;
+        int gcd = gcd(frameRate, waitTimeRemainder);
+        this.mCorrectionLoopCount = frameRate / gcd;
+        this.mWaitTimeCorrection = waitTimeRemainder / gcd;
+        Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
+    }
+
+    private static int gcd(int a, int b) {
+        return b == 0 ? a : gcd(b, a % b);
+    }
+
+    public void setMediaCodec(MediaCodec mediaCodec) {
+        this.mCodec = mediaCodec;
+    }
+
+    public boolean pushFrame(int frameNumber, int frameBufferId, long frameDisplayTime) {
+        int frameDisplayTimeMs = (int)(frameDisplayTime/1000);
+        FrameInfo curFrameInfo = new FrameInfo(frameNumber, frameBufferId, frameDisplayTimeMs);
+        boolean pushSuccess = mFrameInfoQueue.offer(curFrameInfo);
+        if (!pushSuccess) {
+            Log.e(TAG, "Failed to push frame with buffer id " + curFrameInfo.bufferId);
+            return false;
+        }
+        if (!mReleaseThread.isAlive()) {
+            mReleaseThread.start();
+            Log.i(TAG, "Started frame release thread");
+        }
+        return true;
+    }
+
+    private int getCurSysTime() {
+        return (int)(System.nanoTime()/1000000);
+    }
+
+    private void popAndRelease(FrameInfo curFrameInfo, boolean renderThisFrame) {
+        try {
+            curFrameInfo = mFrameInfoQueue.take();
+        } catch (InterruptedException e) {
+            Log.e(TAG, "Threw InterruptedException on take");
+        }
+        boolean actualRender = (renderThisFrame && mRender);
+        try {
+            mCodec.releaseOutputBuffer(curFrameInfo.bufferId, actualRender);
+        } catch (IllegalStateException e) {
+            Log.e(TAG,
+                    "Threw IllegalStateException on releaseOutputBuffer for frame "
+                            + curFrameInfo.number);
+        }
+    }
+
+    public void stopFrameRelease() {
+        doFrameRelease = false;
+        try {
+            mReleaseThread.join();
+            Log.i(TAG, "Joined frame release thread");
+        } catch (InterruptedException e) {
+            Log.e(TAG, "Threw InterruptedException on thread join");
+        }
+    }
+}
+
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
new file mode 100644
index 0000000..a75962c
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+import android.media.MediaCodec;
+import java.nio.ByteBuffer;
+/**
+ * interfaces that can be used to implement
+ * sending of buffers to external and receive using callbacks
+ */
+public class IBufferXfer {
+  static class BufferXferInfo {
+      public ByteBuffer buf;
+      public int idx;
+      public Object obj;
+      int flag;
+      int bytesRead;
+      long presentationTimeUs;
+  }
+
+  public interface IReceiveBuffer {
+      // Implemented by sender to get buffers back
+      boolean receiveBuffer(BufferXferInfo info);
+      // Establishes a connection between the buffer sender and receiver.
+      // Implemented by the entity that sends the buffers to receiver.
+      // the receiverInterface is the interface of the receiver.
+      // The sender uses this interface to send buffers.
+      boolean connect(IBufferXfer.ISendBuffer receiverInterface);
+  }
+  // Implemented by an entity that does not own the buffers and only
+  // wants to manage the buffers. ( Usually the receiver)
+  // The receiver uses returnIface to return the buffers to sender
+  public interface ISendBuffer {
+      boolean sendBuffer(IBufferXfer.IReceiveBuffer returnIface,
+                              BufferXferInfo info);
+  }
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
new file mode 100644
index 0000000..ab55df5
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+
+/**
+ * Class that manages the buffer senders
+*/
+import com.android.media.benchmark.library.IBufferXfer;
+import java.util.ArrayDeque;
+import android.util.Log;
+public class IBufferXferImpl implements IBufferXfer.ISendBuffer {
+
+  private static class BufferInfo {
+      public IBufferXfer.IReceiveBuffer rIface;
+      public IBufferXfer.BufferXferInfo info;
+  }
+  private final String TAG = "IBufferXferImpl";
+  private final ArrayDeque<BufferInfo> mProducerQueue = new ArrayDeque<>();
+  private final ArrayDeque<BufferInfo> mConsumerQueue = new ArrayDeque<>();
+  private IBufferXfer.IReceiveBuffer mProducer = null;
+  private IBufferXfer.IReceiveBuffer mConsumer = null;
+  private final Object mLock = new Object();
+
+  public IBufferXferImpl(IBufferXfer.IReceiveBuffer producer,
+      IBufferXfer.IReceiveBuffer consumer) {
+      mProducer = producer;
+      mConsumer = consumer;
+      // Attach this to be their receiver
+      mProducer.connect(this);
+      mConsumer.connect(this);
+  }
+  @Override
+  public boolean sendBuffer(IBufferXfer.IReceiveBuffer rIface,
+                     IBufferXfer.BufferXferInfo bufferInfo) {
+      if (rIface != mProducer && rIface != mConsumer) {
+         Log.e(TAG, "Interfaces does not match");
+        return false;
+      }
+      boolean status = true;
+      BufferInfo pBuf = null, cBuf = null;
+      synchronized(mLock) {
+          // see which interface this buffer belongs to
+          // producer has a filled buffer and the consumer
+          // buffer needs to be filled.
+          if ( rIface == mProducer ) {
+              if (mConsumerQueue.size() > 0) {
+                  cBuf = mConsumerQueue.remove();
+                  pBuf = new BufferInfo();
+                  pBuf.rIface = rIface;
+                  pBuf.info = bufferInfo;
+              } else {
+                  BufferInfo info = new BufferInfo();
+                  info.rIface = rIface;
+                  info.info = bufferInfo;
+                  mProducerQueue.add(info);
+              }
+          } else if(rIface == mConsumer) {
+              if (mProducerQueue.size() > 0) {
+                  pBuf = mProducerQueue.remove();
+                  cBuf = new BufferInfo();
+                  cBuf.rIface = rIface;
+                  cBuf.info = bufferInfo;
+              } else {
+                  BufferInfo info = new BufferInfo();
+                  info.rIface = rIface;
+                  info.info = bufferInfo;
+                  mConsumerQueue.add(info);
+              }
+          } else {
+              status = false;
+          }
+      }
+
+      if ( pBuf != null && cBuf != null) {
+          int bytesRead = 0;
+          if (cBuf.info.buf != null && pBuf.info.buf != null) {
+              if (cBuf.info.buf.remaining() >= pBuf.info.buf.remaining()) {
+                  bytesRead = pBuf.info.buf.remaining();
+                  cBuf.info.buf.put(pBuf.info.buf);
+              } else {
+                  Log.e(TAG, "Something is wrong with the sizes P:" +
+                      pBuf.info.buf.remaining() +" C:" + cBuf.info.buf.remaining());
+              }
+          }
+          cBuf.info.bytesRead = bytesRead;
+          cBuf.info.presentationTimeUs = pBuf.info.presentationTimeUs;
+          cBuf.info.flag = pBuf.info.flag;
+
+          if (pBuf.rIface != null) {
+              pBuf.rIface.receiveBuffer(pBuf.info);
+          }
+          if (cBuf.rIface != null) {
+              cBuf.rIface.receiveBuffer(cBuf.info);
+          }
+      }
+      return status;
+  }
+  public boolean resetAll() {
+      synchronized(mLock) {
+          while (mProducerQueue.size() > 0) {
+              BufferInfo info = mProducerQueue.remove();
+              info.rIface.receiveBuffer(info.info);
+          }
+          while (mConsumerQueue.size() > 0) {
+              BufferInfo info = mConsumerQueue.remove();
+              info.rIface.receiveBuffer(info.info);
+          }
+          mProducer = null;
+          mConsumer = null;
+      }
+  return true;
+  }
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
index 7245a3a..0ebf798 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
@@ -33,7 +33,17 @@
     private long mDeInitTimeNs;
     private long mStartTimeNs;
     private ArrayList<Integer> mFrameSizes;
+    /*
+     * Array for holding the wallclock time
+     * for each input buffer available.
+     */
     private ArrayList<Long> mInputTimer;
+    /*
+     * Array for holding the wallclock time
+     * for each output buffer available.
+     * This is used for determining the decoded
+     * frame intervals.
+     */
     private ArrayList<Long> mOutputTimer;
 
     public Stats() {
@@ -76,9 +86,15 @@
 
     public long getDeInitTime() { return mDeInitTimeNs; }
 
+    public long getStartTime() { return mStartTimeNs; }
+
+    public ArrayList<Long> getOutputTimers() { return mOutputTimer; }
+
+    public ArrayList<Long> getInputTimers() { return mInputTimer; }
+
     public long getTimeDiff(long sTime, long eTime) { return (eTime - sTime); }
 
-    private long getTotalTime() {
+    public long getTotalTime() {
         if (mOutputTimer.size() == 0) {
             return -1;
         }
@@ -86,7 +102,7 @@
         return lastTime - mStartTimeNs;
     }
 
-    private long getTotalSize() {
+    public long getTotalSize() {
         long totalSize = 0;
         for (long size : mFrameSizes) {
             totalSize += size;
diff --git a/media/tests/benchmark/src/native/common/BenchmarkCommon.h b/media/tests/benchmark/src/native/common/BenchmarkCommon.h
index 40a8c9e..ba3e81a 100644
--- a/media/tests/benchmark/src/native/common/BenchmarkCommon.h
+++ b/media/tests/benchmark/src/native/common/BenchmarkCommon.h
@@ -50,7 +50,7 @@
         {
             lock_guard<mutex> lock(mMutex);
             needsNotify = mQueue.empty();
-            mQueue.push(move(elem));
+            mQueue.push(std::move(elem));
         }
         if (needsNotify) mQueueNotEmptyCondition.notify_one();
     }
diff --git a/media/tests/benchmark/src/native/common/Stats.cpp b/media/tests/benchmark/src/native/common/Stats.cpp
index bfde125..d55a22d 100644
--- a/media/tests/benchmark/src/native/common/Stats.cpp
+++ b/media/tests/benchmark/src/native/common/Stats.cpp
@@ -35,13 +35,18 @@
  * \param mode           the operating mode: sync/async.
  * \param statsFile      the file where the stats data is to be written.
  */
-void Stats::dumpStatistics(string operation, string inputReference, int64_t durationUs,
-                           string componentName, string mode, string statsFile) {
+void Stats::dumpStatistics(const string& operation, const string& inputReference,
+                           int64_t durationUs, const string& componentName,
+                           const string& mode, const string& statsFile) {
     ALOGV("In %s", __func__);
     if (!mOutputTimer.size()) {
         ALOGE("No output produced");
         return;
     }
+    if (statsFile.empty()) {
+        return uploadMetrics(operation, inputReference, durationUs, componentName,
+                              mode);
+    }
     nsecs_t totalTimeTakenNs = getTotalTime();
     nsecs_t timeTakenPerSec = (totalTimeTakenNs * 1000000) / durationUs;
     nsecs_t timeToFirstFrameNs = *mOutputTimer.begin() - mStartTimeNs;
@@ -87,3 +92,67 @@
     out << rowData;
     out.close();
 }
+
+/**
+ * Dumps the stats of the operation for a given input media to a listener.
+ *
+ * \param operation      describes the operation performed on the input media
+ *                       (i.e. extract/mux/decode/encode)
+ * \param inputReference input media
+ * \param durationUs     is a duration of the input media in microseconds.
+ * \param componentName  describes the codecName/muxFormat/mimeType.
+ * \param mode           the operating mode: sync/async.
+ *
+ */
+
+#define LOG_METRIC(...) \
+    __android_log_print(ANDROID_LOG_INFO, "ForTimingCollector", __VA_ARGS__)
+
+void Stats::uploadMetrics(const string& operation, const string& inputReference,
+                          const int64_t& durationUs, const string& componentName,
+                          const string& mode) {
+
+    ALOGV("In %s", __func__);
+    (void)durationUs;
+    (void)componentName;
+    if (!mOutputTimer.size()) {
+        ALOGE("No output produced");
+        return;
+    }
+    nsecs_t totalTimeTakenNs = getTotalTime();
+    nsecs_t timeToFirstFrameNs = *mOutputTimer.begin() - mStartTimeNs;
+    int32_t size = std::accumulate(mFrameSizes.begin(), mFrameSizes.end(), 0);
+    // get min and max output intervals.
+    nsecs_t intervalNs;
+    nsecs_t minTimeTakenNs = INT64_MAX;
+    nsecs_t maxTimeTakenNs = 0;
+    nsecs_t prevIntervalNs = mStartTimeNs;
+    for (int32_t idx = 0; idx < mOutputTimer.size() - 1; idx++) {
+        intervalNs = mOutputTimer.at(idx) - prevIntervalNs;
+        prevIntervalNs = mOutputTimer.at(idx);
+        if (minTimeTakenNs > intervalNs) minTimeTakenNs = intervalNs;
+        else if (maxTimeTakenNs < intervalNs) maxTimeTakenNs = intervalNs;
+    }
+
+    // Write the stats data to file.
+    int64_t dataSize = size;
+    int64_t bytesPerSec = ((int64_t)dataSize * 1000000000) / totalTimeTakenNs;
+    (void)mode;
+    (void)operation;
+    (void)inputReference;
+    string prefix = "CodecStats_NativeDec";
+    prefix.append("_").append(componentName);
+    // Reports the time taken to initialize the codec.
+    LOG_METRIC("%s_CodecInitTimeNs:%lld", prefix.c_str(), (long long)mInitTimeNs);
+    // Reports the time taken to free the codec.
+    LOG_METRIC("%s_CodecDeInitTimeNs:%lld", prefix.c_str(), (long long)mDeInitTimeNs);
+    // Reports the min time taken between output frames from the codec
+    LOG_METRIC("%s_CodecMinTimeNs:%lld", prefix.c_str(), (long long)minTimeTakenNs);
+    // Reports the max time between the output frames from the codec
+    LOG_METRIC("%s_CodecMaxTimeNs:%lld", prefix.c_str(), (long long)maxTimeTakenNs);
+    // Report raw throughout ( bytes/sec ) of the codec for the entire media
+    LOG_METRIC("%s_ProcessedBytesPerSec:%lld", prefix.c_str(), (long long)bytesPerSec);
+    // Reports the time taken to get the first frame from the codec
+    LOG_METRIC("%s_TimeforFirstFrame:%lld", prefix.c_str(), (long long)timeToFirstFrameNs);
+
+}
diff --git a/media/tests/benchmark/src/native/common/Stats.h b/media/tests/benchmark/src/native/common/Stats.h
index 18e4b06..0ba511f 100644
--- a/media/tests/benchmark/src/native/common/Stats.h
+++ b/media/tests/benchmark/src/native/common/Stats.h
@@ -102,8 +102,12 @@
         return (*(mOutputTimer.end() - 1) - mStartTimeNs);
     }
 
-    void dumpStatistics(string operation, string inputReference, int64_t duarationUs,
-                        string codecName = "", string mode = "", string statsFile = "");
-};
+    void dumpStatistics(const string& operation, const string& inputReference,
+                        int64_t duarationUs, const string& componentName = "",
+                        const string& mode = "", const string& statsFile = "");
 
+    void uploadMetrics(const string& operation, const string& inputReference,
+                      const int64_t& durationUs, const string& componentName = "",
+                      const string& mode = "");
+};
 #endif  // __STATS_H__
diff --git a/media/tests/benchmark/src/native/encoder/C2Encoder.cpp b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
index ca79881..5b7a471 100644
--- a/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/C2Encoder.cpp
@@ -17,11 +17,13 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2Encoder"
 
+#include <memory>
+
 #include "C2Encoder.h"
 
 int32_t C2Encoder::createCodec2Component(string compName, AMediaFormat *format) {
     ALOGV("In %s", __func__);
-    mListener.reset(new CodecListener(
+    mListener.reset(new (std::nothrow) CodecListener(
             [this](std::list<std::unique_ptr<C2Work>> &workItems) { handleWorkDone(workItems); }));
     if (!mListener) return -1;
 
@@ -125,7 +127,7 @@
     }
     int32_t numFrames = (inputBufferSize + frameSize - 1) / frameSize;
     // Temporary buffer to read data from the input file
-    char *data = (char *)malloc(frameSize);
+    std::unique_ptr<char[]> data(new (std::nothrow) char[frameSize]);
     if (!data) {
         ALOGE("Insufficient memory to read from input file");
         return -1;
@@ -169,7 +171,7 @@
         if (inputBufferSize - offset < frameSize) {
             frameSize = inputBufferSize - offset;
         }
-        eleStream.read(data, frameSize);
+        eleStream.read(data.get(), frameSize);
         if (eleStream.gcount() != frameSize) {
             ALOGE("read() from file failed. Incorrect bytes read");
             return -1;
@@ -191,8 +193,8 @@
                     return view.error();
                 }
 
-                memcpy(view.base(), data, frameSize);
-                work->input.buffers.emplace_back(new LinearBuffer(block));
+                memcpy(view.base(), data.get(), frameSize);
+                work->input.buffers.emplace_back(new (std::nothrow) LinearBuffer(block));
             } else {
                 std::shared_ptr<C2GraphicBlock> block;
                 status = mGraphicPool->fetchGraphicBlock(
@@ -211,16 +213,16 @@
                 uint8_t *pY = view.data()[C2PlanarLayout::PLANE_Y];
                 uint8_t *pU = view.data()[C2PlanarLayout::PLANE_U];
                 uint8_t *pV = view.data()[C2PlanarLayout::PLANE_V];
-                memcpy(pY, data, mWidth * mHeight);
-                memcpy(pU, data + mWidth * mHeight, (mWidth * mHeight >> 2));
-                memcpy(pV, data + (mWidth * mHeight * 5 >> 2), mWidth * mHeight >> 2);
-                work->input.buffers.emplace_back(new GraphicBuffer(block));
+                memcpy(pY, data.get(), mWidth * mHeight);
+                memcpy(pU, data.get() + mWidth * mHeight, (mWidth * mHeight >> 2));
+                memcpy(pV, data.get() + (mWidth * mHeight * 5 >> 2), mWidth * mHeight >> 2);
+                work->input.buffers.emplace_back(new (std::nothrow) GraphicBuffer(block));
             }
             mStats->addFrameSize(frameSize);
         }
 
         work->worklets.clear();
-        work->worklets.emplace_back(new C2Worklet);
+        work->worklets.emplace_back(new (std::nothrow) C2Worklet);
 
         std::list<std::unique_ptr<C2Work>> items;
         items.push_back(std::move(work));
@@ -234,7 +236,6 @@
         numFrames--;
         mNumInputFrame++;
     }
-    free(data);
     return status;
 }
 
diff --git a/media/tests/benchmark/tests/C2DecoderTest.cpp b/media/tests/benchmark/tests/C2DecoderTest.cpp
index 85dcbc1..6e4d9e1 100644
--- a/media/tests/benchmark/tests/C2DecoderTest.cpp
+++ b/media/tests/benchmark/tests/C2DecoderTest.cpp
@@ -20,6 +20,7 @@
 #include <fstream>
 #include <iostream>
 #include <limits>
+#include <memory>
 
 #include "BenchmarkTestEnvironment.h"
 #include "C2Decoder.h"
@@ -50,7 +51,7 @@
 };
 
 void C2DecoderTest::setupC2DecoderTest() {
-    mDecoder = new C2Decoder();
+    mDecoder = new (std::nothrow) C2Decoder();
     ASSERT_NE(mDecoder, nullptr) << "C2Decoder creation failed";
 
     int32_t status = mDecoder->setupCodec2();
@@ -66,7 +67,7 @@
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
 
-    Extractor *extractor = new Extractor();
+    std::unique_ptr<Extractor> extractor(new (std::nothrow) Extractor());
     ASSERT_NE(extractor, nullptr) << "Extractor creation failed";
 
     // Read file properties
@@ -85,7 +86,7 @@
         int32_t status = extractor->setupTrackFormat(curTrack);
         ASSERT_EQ(status, 0) << "Track Format invalid";
 
-        uint8_t *inputBuffer = (uint8_t *)malloc(fileSize);
+        std::unique_ptr<uint8_t[]> inputBuffer(new (std::nothrow) uint8_t[fileSize]);
         ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
 
         vector<AMediaCodecBufferInfo> frameInfo;
@@ -100,7 +101,7 @@
             // copy the meta data and buffer to be passed to decoder
             ASSERT_LE(inputBufferOffset + info.size, fileSize) << "Memory allocated not sufficient";
 
-            memcpy(inputBuffer + inputBufferOffset, csdBuffer, info.size);
+            memcpy(inputBuffer.get() + inputBufferOffset, csdBuffer, info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
             idx++;
@@ -113,7 +114,7 @@
             // copy the meta data and buffer to be passed to decoder
             ASSERT_LE(inputBufferOffset + info.size, fileSize) << "Memory allocated not sufficient";
 
-            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            memcpy(inputBuffer.get() + inputBufferOffset, extractor->getFrameBuf(), info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
         }
@@ -127,7 +128,7 @@
                 ASSERT_EQ(status, 0) << "Create component failed for " << codecName;
 
                 // Send the inputs to C2 Decoder and wait till all buffers are returned.
-                status = mDecoder->decodeFrames(inputBuffer, frameInfo);
+                status = mDecoder->decodeFrames(inputBuffer.get(), frameInfo);
                 ASSERT_EQ(status, 0) << "Decoder failed for " << codecName;
 
                 mDecoder->waitOnInputConsumption();
@@ -141,10 +142,8 @@
                 mDecoder->resetDecoder();
             }
         }
-        free(inputBuffer);
         fclose(inputFp);
         extractor->deInitExtractor();
-        delete extractor;
         delete mDecoder;
         mDecoder = nullptr;
     }
@@ -174,7 +173,7 @@
                           make_pair("crowd_1920x1080_25fps_4000kbps_h265.mkv", "hevc")));
 
 int main(int argc, char **argv) {
-    gEnv = new BenchmarkTestEnvironment();
+    gEnv = new (std::nothrow) BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
diff --git a/media/tests/benchmark/tests/C2EncoderTest.cpp b/media/tests/benchmark/tests/C2EncoderTest.cpp
index b18d856..dfbe496 100644
--- a/media/tests/benchmark/tests/C2EncoderTest.cpp
+++ b/media/tests/benchmark/tests/C2EncoderTest.cpp
@@ -20,6 +20,7 @@
 #include <fstream>
 #include <iostream>
 #include <limits>
+#include <memory>
 
 #include "BenchmarkTestEnvironment.h"
 #include "C2Encoder.h"
@@ -50,7 +51,7 @@
 };
 
 void C2EncoderTest::setupC2EncoderTest() {
-    mEncoder = new C2Encoder();
+    mEncoder = new (std::nothrow) C2Encoder();
     ASSERT_NE(mEncoder, nullptr) << "C2Encoder creation failed";
 
     int32_t status = mEncoder->setupCodec2();
@@ -66,7 +67,7 @@
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(inputFp, nullptr) << "Unable to open input file for reading";
 
-    Decoder *decoder = new Decoder();
+    std::unique_ptr<Decoder> decoder(new (std::nothrow) Decoder());
     ASSERT_NE(decoder, nullptr) << "Decoder creation failed";
 
     Extractor *extractor = decoder->getExtractor();
@@ -88,7 +89,7 @@
         int32_t status = extractor->setupTrackFormat(curTrack);
         ASSERT_EQ(status, 0) << "Track Format invalid";
 
-        uint8_t *inputBuffer = (uint8_t *)malloc(fileSize);
+        std::unique_ptr<uint8_t[]> inputBuffer(new (std::nothrow) uint8_t[fileSize]);
         ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
 
         vector<AMediaCodecBufferInfo> frameInfo;
@@ -102,7 +103,7 @@
             // copy the meta data and buffer to be passed to decoder
             ASSERT_LE(inputBufferOffset + info.size, fileSize) << "Memory allocated not sufficient";
 
-            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            memcpy(inputBuffer.get() + inputBufferOffset, extractor->getFrameBuf(), info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
         }
@@ -114,7 +115,8 @@
                                   << " for dumping decoder's output";
 
         decoder->setupDecoder();
-        status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
+        status = decoder->decode(inputBuffer.get(), frameInfo, decName,
+                                 false /*asyncMode */, outFp);
         ASSERT_EQ(status, AMEDIA_OK) << "Decode returned error : " << status;
 
         // Encode the given input stream for all C2 codecs supported by device
@@ -149,11 +151,9 @@
         // Destroy the decoder for the given input
         decoder->deInitCodec();
         decoder->resetDecoder();
-        free(inputBuffer);
     }
     fclose(inputFp);
     extractor->deInitExtractor();
-    delete decoder;
     delete mEncoder;
     mEncoder = nullptr;
 }
@@ -176,7 +176,7 @@
                           make_pair("crowd_1920x1080_25fps_4000kbps_h265.mkv", "hevc")));
 
 int main(int argc, char **argv) {
-    gEnv = new BenchmarkTestEnvironment();
+    gEnv = new (std::nothrow) BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 3666724..b363df3 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -20,6 +20,7 @@
 #include <fstream>
 #include <iostream>
 #include <limits>
+#include <memory>
 
 #include <android/binder_process.h>
 
@@ -38,7 +39,7 @@
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
 
-    Decoder *decoder = new Decoder();
+    std::unique_ptr<Decoder> decoder(new (std::nothrow) Decoder());
     ASSERT_NE(decoder, nullptr) << "Decoder creation failed";
 
     Extractor *extractor = decoder->getExtractor();
@@ -57,7 +58,7 @@
         int32_t status = extractor->setupTrackFormat(curTrack);
         ASSERT_EQ(status, 0) << "Track Format invalid";
 
-        uint8_t *inputBuffer = (uint8_t *)malloc(kMaxBufferSize);
+        std::unique_ptr<uint8_t[]> inputBuffer(new (std::nothrow) uint8_t[kMaxBufferSize]);
         ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
 
         vector<AMediaCodecBufferInfo> frameInfo;
@@ -72,7 +73,7 @@
             ASSERT_LE(inputBufferOffset + info.size, kMaxBufferSize)
                     << "Memory allocated not sufficient";
 
-            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            memcpy(inputBuffer.get() + inputBufferOffset, extractor->getFrameBuf(), info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
         }
@@ -80,7 +81,7 @@
         string codecName = get<1>(params);
         bool asyncMode = get<2>(params);
         decoder->setupDecoder();
-        status = decoder->decode(inputBuffer, frameInfo, codecName, asyncMode);
+        status = decoder->decode(inputBuffer.get(), frameInfo, codecName, asyncMode);
         ASSERT_EQ(status, AMEDIA_OK) << "Decoder failed for " << codecName;
 
         decoder->deInitCodec();
@@ -88,12 +89,10 @@
         string inputReference = get<0>(params);
         decoder->dumpStatistics(inputReference, codecName, (asyncMode ? "async" : "sync"),
                                 gEnv->getStatsFile());
-        free(inputBuffer);
         decoder->resetDecoder();
     }
     fclose(inputFp);
     extractor->deInitExtractor();
-    delete decoder;
 }
 
 // TODO: (b/140549596)
@@ -178,7 +177,7 @@
 
 int main(int argc, char **argv) {
     ABinderProcess_startThreadPool();
-    gEnv = new BenchmarkTestEnvironment();
+    gEnv = new (std::nothrow) BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
@@ -190,4 +189,4 @@
         ALOGV("Decoder Test result = %d\n", status);
     }
     return status;
-}
\ No newline at end of file
+}
diff --git a/media/tests/benchmark/tests/EncoderTest.cpp b/media/tests/benchmark/tests/EncoderTest.cpp
index 7e1681d..6703b99 100644
--- a/media/tests/benchmark/tests/EncoderTest.cpp
+++ b/media/tests/benchmark/tests/EncoderTest.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "encoderTest"
 
 #include <fstream>
+#include <memory>
 
 #include "BenchmarkTestEnvironment.h"
 #include "Decoder.h"
@@ -39,7 +40,7 @@
     FILE *inputFp = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(inputFp, nullptr) << "Unable to open " << inputFile << " file for reading";
 
-    Decoder *decoder = new Decoder();
+    std::unique_ptr<Decoder> decoder(new (std::nothrow) Decoder());
     ASSERT_NE(decoder, nullptr) << "Decoder creation failed";
 
     Extractor *extractor = decoder->getExtractor();
@@ -54,14 +55,14 @@
     int32_t trackCount = extractor->initExtractor(fd, fileSize);
     ASSERT_GT(trackCount, 0) << "initExtractor failed";
 
-    Encoder *encoder = new Encoder();
+    std::unique_ptr<Encoder> encoder(new (std::nothrow) Encoder());
     ASSERT_NE(encoder, nullptr) << "Decoder creation failed";
 
     for (int curTrack = 0; curTrack < trackCount; curTrack++) {
         int32_t status = extractor->setupTrackFormat(curTrack);
         ASSERT_EQ(status, 0) << "Track Format invalid";
 
-        uint8_t *inputBuffer = (uint8_t *)malloc(kMaxBufferSize);
+        std::unique_ptr<uint8_t[]> inputBuffer(new (std::nothrow) uint8_t[kMaxBufferSize]);
         ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
 
         vector<AMediaCodecBufferInfo> frameInfo;
@@ -76,7 +77,7 @@
             ASSERT_LE(inputBufferOffset + info.size, kMaxBufferSize)
                     << "Memory allocated not sufficient";
 
-            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            memcpy(inputBuffer.get() + inputBufferOffset, extractor->getFrameBuf(), info.size);
             frameInfo.push_back(info);
             inputBufferOffset += info.size;
         }
@@ -88,7 +89,7 @@
                                   << " for dumping decoder's output";
 
         decoder->setupDecoder();
-        status = decoder->decode(inputBuffer, frameInfo, decName, false /*asyncMode */, outFp);
+        status = decoder->decode(inputBuffer.get(), frameInfo, decName, false /*asyncMode */, outFp);
         ASSERT_EQ(status, AMEDIA_OK) << "Decode returned error : " << status;
         AMediaFormat *decoderFormat = decoder->getFormat();
 
@@ -154,13 +155,10 @@
         }
         encoder->resetEncoder();
         decoder->deInitCodec();
-        free(inputBuffer);
         decoder->resetDecoder();
     }
-    delete encoder;
     fclose(inputFp);
     extractor->deInitExtractor();
-    delete decoder;
 }
 
 INSTANTIATE_TEST_SUITE_P(
@@ -220,7 +218,7 @@
                                             "c2.android.hevc.encoder", true)));
 
 int main(int argc, char **argv) {
-    gEnv = new BenchmarkTestEnvironment();
+    gEnv = new (std::nothrow) BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index 27ee9ba..35fb465 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -17,6 +17,8 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "extractorTest"
 
+#include <memory>
+
 #include <gtest/gtest.h>
 
 #include <android/binder_process.h>
@@ -29,7 +31,7 @@
 class ExtractorTest : public ::testing::TestWithParam<pair<string, int32_t>> {};
 
 TEST_P(ExtractorTest, Extract) {
-    Extractor *extractObj = new Extractor();
+    std::unique_ptr<Extractor> extractObj(new (std::nothrow) Extractor());
     ASSERT_NE(extractObj, nullptr) << "Extractor creation failed";
 
     string inputFile = gEnv->getRes() + GetParam().first;
@@ -53,7 +55,6 @@
     extractObj->dumpStatistics(GetParam().first, "", gEnv->getStatsFile());
 
     fclose(inputFp);
-    delete extractObj;
 }
 
 INSTANTIATE_TEST_SUITE_P(ExtractorTestAll, ExtractorTest,
@@ -76,7 +77,7 @@
 
 int main(int argc, char **argv) {
     ABinderProcess_startThreadPool();
-    gEnv = new BenchmarkTestEnvironment();
+    gEnv = new (std::nothrow) BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
diff --git a/media/tests/benchmark/tests/MuxerTest.cpp b/media/tests/benchmark/tests/MuxerTest.cpp
index 991644b..65a3df4 100644
--- a/media/tests/benchmark/tests/MuxerTest.cpp
+++ b/media/tests/benchmark/tests/MuxerTest.cpp
@@ -20,6 +20,7 @@
 
 #include <fstream>
 #include <iostream>
+#include <memory>
 
 #include "BenchmarkTestEnvironment.h"
 #include "Muxer.h"
@@ -59,7 +60,7 @@
     MUXER_OUTPUT_T outputFormat = getMuxerOutFormat(fmt);
     ASSERT_NE(outputFormat, MUXER_OUTPUT_FORMAT_INVALID) << "Invalid muxer output format";
 
-    Muxer *muxerObj = new Muxer();
+    std::unique_ptr<Muxer> muxerObj(new (std::nothrow) Muxer());
     ASSERT_NE(muxerObj, nullptr) << "Muxer creation failed";
 
     Extractor *extractor = muxerObj->getExtractor();
@@ -78,7 +79,7 @@
         int32_t status = extractor->setupTrackFormat(curTrack);
         ASSERT_EQ(status, 0) << "Track Format invalid";
 
-        uint8_t *inputBuffer = (uint8_t *)malloc(kMaxBufferSize);
+        std::unique_ptr<uint8_t[]> inputBuffer(new (std::nothrow) uint8_t[kMaxBufferSize]);
         ASSERT_NE(inputBuffer, nullptr) << "Insufficient memory";
 
         // AMediaCodecBufferInfo : <size of frame> <flags> <presentationTimeUs> <offset>
@@ -94,7 +95,7 @@
             ASSERT_LE(inputBufferOffset + info.size, kMaxBufferSize)
                     << "Memory allocated not sufficient";
 
-            memcpy(inputBuffer + inputBufferOffset, extractor->getFrameBuf(), info.size);
+            memcpy(inputBuffer.get() + inputBufferOffset, extractor->getFrameBuf(), info.size);
             info.offset = inputBufferOffset;
             frameInfos.push_back(info);
             inputBufferOffset += info.size;
@@ -109,18 +110,16 @@
         status = muxerObj->initMuxer(fd, outputFormat);
         ASSERT_EQ(status, 0) << "initMuxer failed";
 
-        status = muxerObj->mux(inputBuffer, frameInfos);
+        status = muxerObj->mux(inputBuffer.get(), frameInfos);
         ASSERT_EQ(status, 0) << "Mux failed";
 
         muxerObj->deInitMuxer();
         muxerObj->dumpStatistics(GetParam().first + "." + fmt.c_str(), fmt, gEnv->getStatsFile());
-        free(inputBuffer);
         fclose(outputFp);
         muxerObj->resetMuxer();
     }
     fclose(inputFp);
     extractor->deInitExtractor();
-    delete muxerObj;
 }
 
 INSTANTIATE_TEST_SUITE_P(
@@ -146,7 +145,7 @@
                           make_pair("bbb_16000hz_1ch_9kbps_amrwb_5mins.3gp", "3gpp")));
 
 int main(int argc, char **argv) {
-    gEnv = new BenchmarkTestEnvironment();
+    gEnv = new (std::nothrow) BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
     int status = gEnv->initFromOptions(argc, argv);
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index a38ef57..7abb0b6 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -21,17 +21,34 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library {
-    name: "libmediautils",
+cc_defaults {
+    name: "libmediautils_defaults",
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+    sanitize: {
+        cfi: true,
+        integer_overflow: true,
+    },
+    target: {
+        host: {
+            sanitize: {
+                cfi: false,
+            },
+        },
+    },
+}
 
+filegroup {
+    name: "libmediautils_core_srcs",
     srcs: [
         "AImageReaderUtils.cpp",
         "BatteryNotifier.cpp",
         "ISchedulingPolicyService.cpp",
         "Library.cpp",
-        "LimitProcessMemory.cpp",
         "MediaUtilsDelayed.cpp",
-        "MemoryLeakTrackUtil.cpp",
         "MethodStatistics.cpp",
         "Process.cpp",
         "ProcessInfo.cpp",
@@ -41,20 +58,41 @@
         "TimeCheck.cpp",
         "TimerThread.cpp",
     ],
+}
+
+cc_library_headers {
+    name: "libmediautils_headers",
+    host_supported: true,
+    vendor_available: true, // required for platform/hardware/interfaces
+    shared_libs: [
+        "liblog",
+    ],
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+}
+
+
+cc_library {
+    name: "libmediautils",
+    host_supported: true,
+    defaults: ["libmediautils_defaults"],
+    srcs: [
+        ":libmediautils_core_srcs",
+    ],
     static_libs: [
-        "libc_malloc_debug_backtrace",
         "libbatterystats_aidl",
         "libprocessinfoservice_aidl",
     ],
     shared_libs: [
         "libaudioclient_aidl_conversion",
         "libaudioutils", // for clock.h, Statistics.h
+        "libbase",
         "libbinder",
         "libcutils",
-        "liblog",
-        "libutils",
         "libhidlbase",
+        "liblog",
         "libpermission",
+        "libutils",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
         "packagemanager_aidl-cpp",
@@ -69,11 +107,7 @@
         "-Wall",
         "-Wextra",
         "-Werror",
-    ],
-
-    header_libs: [
-        "bionic_libc_platform_headers",
-        "libmedia_headers",
+        "-Wthread-safety",
     ],
 
     export_shared_lib_headers: [
@@ -81,27 +115,39 @@
     ],
 
     required: [
-        "libmediautils_delayed",  // lazy loaded
+        "libmediautils_delayed", // lazy loaded
     ],
 
-    include_dirs: [
-        // For DEBUGGER_SIGNAL
-        "system/core/debuggerd/include",
-    ],
+    target: {
+        android: {
+            srcs: [
+                "LimitProcessMemory.cpp",
+                "MemoryLeakTrackUtil.cpp",
+            ],
+            static_libs: [
+                "libc_malloc_debug_backtrace",
+            ],
+            include_dirs: [
+                // For DEBUGGER_SIGNAL
+                "system/core/debuggerd/include",
+            ],
+            header_libs: [
+                "bionic_libc_platform_headers",
+            ],
+        },
+    },
+
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
 
 cc_library {
     name: "libmediautils_delayed", // match with MEDIAUTILS_DELAYED_LIBRARY_NAME
+    host_supported: true,
+    defaults: ["libmediautils_defaults"],
     srcs: [
         "MediaUtilsDelayedLibrary.cpp",
     ],
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
     shared_libs: [
         "liblog",
         "libutils",
@@ -111,16 +157,12 @@
 
 cc_library {
     name: "libmediautils_vendor",
-    vendor_available: true,  // required for platform/hardware/interfaces
+    defaults: ["libmediautils_defaults"],
+    vendor_available: true, // required for platform/hardware/interfaces
     srcs: [
         "MemoryLeakTrackUtil.cpp",
     ],
 
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
     shared_libs: [
         "liblog",
         "libutils",
@@ -137,23 +179,3 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
-
-
-cc_library_headers {
-    name: "libmediautils_headers",
-    vendor_available: true,  // required for platform/hardware/interfaces
-
-    export_include_dirs: ["include"],
-}
-
-cc_test {
-    name: "libmediautils_test",
-    srcs: [
-        "memory-test.cpp",
-        "TimerThread-test.cpp",
-    ],
-    shared_libs: [
-      "libmediautils",
-      "libutils",
-    ]
-}
diff --git a/media/utils/MemoryLeakTrackUtil.cpp b/media/utils/MemoryLeakTrackUtil.cpp
index fdb8c4f..7451033 100644
--- a/media/utils/MemoryLeakTrackUtil.cpp
+++ b/media/utils/MemoryLeakTrackUtil.cpp
@@ -33,10 +33,8 @@
 #define ABI_STRING "arm"
 #elif defined(__aarch64__)
 #define ABI_STRING "arm64"
-#elif defined(__mips__) && !defined(__LP64__)
-#define ABI_STRING "mips"
-#elif defined(__mips__) && defined(__LP64__)
-#define ABI_STRING "mips64"
+#elif defined(__riscv)
+#define ABI_STRING "riscv64"
 #elif defined(__i386__)
 #define ABI_STRING "x86"
 #elif defined(__x86_64__)
diff --git a/media/utils/MethodStatistics.cpp b/media/utils/MethodStatistics.cpp
index b179b20..086757b 100644
--- a/media/utils/MethodStatistics.cpp
+++ b/media/utils/MethodStatistics.cpp
@@ -22,7 +22,8 @@
 
 std::shared_ptr<std::vector<std::string>>
 getStatisticsClassesForModule(std::string_view moduleName) {
-    static const std::map<std::string, std::shared_ptr<std::vector<std::string>>> m {
+    static const std::map<std::string, std::shared_ptr<std::vector<std::string>>,
+            std::less<> /* transparent comparator */> m {
         {
             METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL,
             std::shared_ptr<std::vector<std::string>>(
@@ -34,13 +35,14 @@
               })
         },
     };
-    auto it = m.find({moduleName.begin(), moduleName.end()});
+    auto it = m.find(moduleName);
     if (it == m.end()) return {};
     return it->second;
 }
 
 static void addClassesToMap(const std::shared_ptr<std::vector<std::string>> &classNames,
-        std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> &map) {
+        std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>,
+                std::less<> /* transparent comparator */> &map) {
     if (classNames) {
         for (const auto& className : *classNames) {
             map.emplace(className, std::make_shared<MethodStatistics<std::string>>());
@@ -51,17 +53,18 @@
 // singleton statistics for DeviceHalHidl StreamOutHalHidl StreamInHalHidl
 std::shared_ptr<MethodStatistics<std::string>>
 getStatisticsForClass(std::string_view className) {
-    static const std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m =
+    static const std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>,
+            std::less<> /* transparent comparator */> m =
         // copy elided initialization of map m.
         [](){
-            std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m;
+            std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>, std::less<>> m;
             addClassesToMap(
                     getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL),
                     m);
             return m;
         }();
 
-    auto it = m.find({className.begin(), className.end()});
+    auto it = m.find(className);
     if (it == m.end()) return {};
     return it->second;
 }
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index da199c4..6296351 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "ProcessInfo"
 #include <utils/Log.h>
 
-#include <media/stagefright/ProcessInfo.h>
+#include <mediautils/ProcessInfo.h>
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
@@ -30,21 +30,75 @@
 static constexpr int32_t INVALID_ADJ = -10000;
 static constexpr int32_t NATIVE_ADJ = -1000;
 
+/* Make sure this matches with ActivityManager::PROCESS_STATE_NONEXISTENT
+ * #include <binder/ActivityManager.h>
+ * using ActivityManager::PROCESS_STATE_NONEXISTENT;
+ */
+static constexpr int32_t PROCESS_STATE_NONEXISTENT = 20;
+
 ProcessInfo::ProcessInfo() {}
 
+/*
+ * Checks whether the list of processes with given pids exist or not.
+ *
+ * Arguments:
+ *  - pids (input): List of pids for which to check whether they are Existent or not.
+ *  - existent (output): boolean vector corresponds to Existent state of each pids.
+ *
+ * On successful return:
+ *     - existent[i] true corresponds to pids[i] still active and
+ *     - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+ * On unsuccessful return, the output argument existent is invalid.
+ */
+bool ProcessInfo::checkProcessExistent(const std::vector<int32_t>& pids,
+                                       std::vector<bool>* existent) {
+    sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
+    sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
+
+    // Get the process state of the applications managed/tracked by the ActivityManagerService.
+    // Don't have to look into the native processes.
+    // If we really need the state of native process, then we can use ==> mOverrideMap
+    size_t count = pids.size();
+    std::vector<int32_t> states(count, PROCESS_STATE_NONEXISTENT);
+    status_t err = service->getProcessStatesFromPids(count,
+                                                     const_cast<int32_t*>(pids.data()),
+                                                     states.data());
+    if (err != OK) {
+        ALOGE("%s: IProcessInfoService::getProcessStatesFromPids failed with %d",
+              __func__, err);
+        return false;
+    }
+
+    existent->clear();
+    for (size_t index = 0; index < states.size(); index++) {
+        // If this process is not tracked by ActivityManagerService, look for overrides.
+        if (states[index] == PROCESS_STATE_NONEXISTENT) {
+            std::scoped_lock lock{mOverrideLock};
+            std::map<int, ProcessInfoOverride>::iterator it =
+                mOverrideMap.find(pids[index]);
+            if (it != mOverrideMap.end()) {
+                states[index] = it->second.procState;
+            }
+        }
+        existent->push_back(states[index] != PROCESS_STATE_NONEXISTENT);
+    }
+
+    return true;
+}
+
 bool ProcessInfo::getPriority(int pid, int* priority) {
-    sp<IBinder> binder = defaultServiceManager()->getService(String16("processinfo"));
+    sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
     sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
 
     size_t length = 1;
     int32_t state;
     int32_t score = INVALID_ADJ;
     status_t err = service->getProcessStatesAndOomScoresFromPids(length, &pid, &state, &score);
+    ALOGV("%s: pid:%d state:%d score:%d err:%d", __FUNCTION__, pid, state, score, err);
     if (err != OK) {
         ALOGE("getProcessStatesAndOomScoresFromPids failed");
         return false;
     }
-    ALOGV("pid %d state %d score %d", pid, state, score);
     if (score <= NATIVE_ADJ) {
         std::scoped_lock lock{mOverrideLock};
 
diff --git a/media/utils/SchedulingPolicyService.cpp b/media/utils/SchedulingPolicyService.cpp
index ad38862..6e515ff 100644
--- a/media/utils/SchedulingPolicyService.cpp
+++ b/media/utils/SchedulingPolicyService.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 
 #include <binder/IServiceManager.h>
+#include <cutils/properties.h>
 #include <utils/Mutex.h>
 #include "ISchedulingPolicyService.h"
 #include "mediautils/SchedulingPolicyService.h"
@@ -86,4 +87,25 @@
     return ret;
 }
 
+int requestSpatializerPriority(pid_t pid, pid_t tid) {
+    if (pid == -1 || tid == -1) return BAD_VALUE;
+
+    // update priority to RT if specified.
+    constexpr int32_t kRTPriorityMin = 1;
+    constexpr int32_t kRTPriorityMax = 3;
+    const int32_t priorityBoost =
+            property_get_int32("audio.spatializer.priority", kRTPriorityMin);
+    if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+        const status_t status = requestPriority(
+                pid, tid, priorityBoost, false /* isForApp */, true /*asynchronous*/);
+        if (status != OK) {
+            ALOGW("%s: Cannot request spatializer priority boost %d, status:%d",
+                    __func__, priorityBoost, status);
+            return status < 0 ? status : UNKNOWN_ERROR;
+        }
+        return priorityBoost;
+    }
+    return 0;  // no boost requested
+}
+
 }   // namespace android
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 07f4529..83b84e3 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -103,11 +103,10 @@
     AttributionSourceState myAttributionSource;
     myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
     myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
-    if (callerAttributionSource.token != nullptr) {
-        myAttributionSource.token = callerAttributionSource.token;
-    } else {
-        myAttributionSource.token = sp<BBinder>::make();
-    }
+    // Create a static token for audioserver requests, which identifies the
+    // audioserver to the app ops system
+    static sp<BBinder> appOpsToken = sp<BBinder>::make();
+    myAttributionSource.token = appOpsToken;
     myAttributionSource.next.push_back(nextAttributionSource);
 
     return std::optional<AttributionSourceState>{myAttributionSource};
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 0848eb3..a5378e6 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -14,19 +14,36 @@
  * limitations under the License.
  */
 
+#include <csignal>
+#include "mediautils/TimerThread.h"
 #define LOG_TAG "TimeCheck"
 
 #include <optional>
 
 #include <android-base/logging.h>
+#include <android-base/strings.h>
 #include <audio_utils/clock.h>
 #include <mediautils/EventLog.h>
+#include <mediautils/FixedString.h>
 #include <mediautils/MethodStatistics.h>
 #include <mediautils/TimeCheck.h>
+#include <mediautils/TidWrapper.h>
 #include <utils/Log.h>
+
+#if defined(__ANDROID__)
 #include "debuggerd/handler.h"
+#endif
+
 
 namespace android::mediautils {
+// This function appropriately signals a pid to dump a backtrace if we are
+// running on device (and the HAL exists). If we are not running on an Android
+// device, there is no HAL to signal (so we do nothing).
+static inline void signalAudioHAL([[maybe_unused]] pid_t pid) {
+#if defined(__ANDROID__)
+    sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
+#endif
+}
 
 /**
  * Returns the std::string "HH:MM:SS.MSc" from a system_clock time_point.
@@ -135,25 +152,27 @@
 std::string TimeCheck::toString() {
     // note pending and retired are individually locked for maximum concurrency,
     // snapshot is not instantaneous at a single time.
-    return getTimeCheckThread().toString();
+    return getTimeCheckThread().getSnapshotAnalysis().toString();
 }
 
-TimeCheck::TimeCheck(std::string tag, OnTimerFunc&& onTimer, uint32_t timeoutMs,
-        bool crashOnTimeout)
-    : mTimeCheckHandler(new TimeCheckHandler{
-            std::move(tag), std::move(onTimer), crashOnTimeout,
-            std::chrono::system_clock::now(), gettid()})
-    , mTimerHandle(timeoutMs == 0
+TimeCheck::TimeCheck(std::string_view tag, OnTimerFunc&& onTimer, Duration requestedTimeoutDuration,
+        Duration secondChanceDuration, bool crashOnTimeout)
+    : mTimeCheckHandler{ std::make_shared<TimeCheckHandler>(
+            tag, std::move(onTimer), crashOnTimeout, requestedTimeoutDuration,
+            secondChanceDuration, std::chrono::system_clock::now(), getThreadIdWrapper()) }
+    , mTimerHandle(requestedTimeoutDuration.count() == 0
+              /* for TimeCheck we don't consider a non-zero secondChanceDuration here */
               ? getTimeCheckThread().trackTask(mTimeCheckHandler->tag)
               : getTimeCheckThread().scheduleTask(
                       mTimeCheckHandler->tag,
                       // Pass in all the arguments by value to this task for safety.
                       // The thread could call the callback before the constructor is finished.
                       // The destructor is not blocked on callback.
-                      [ timeCheckHandler = mTimeCheckHandler ] {
-                          timeCheckHandler->onTimeout();
+                      [ timeCheckHandler = mTimeCheckHandler ](TimerThread::Handle timerHandle) {
+                          timeCheckHandler->onTimeout(timerHandle);
                       },
-                      std::chrono::milliseconds(timeoutMs))) {}
+                      requestedTimeoutDuration,
+                      secondChanceDuration)) {}
 
 TimeCheck::~TimeCheck() {
     if (mTimeCheckHandler) {
@@ -161,30 +180,84 @@
     }
 }
 
+/* static */
+std::string TimeCheck::analyzeTimeouts(
+        float requestedTimeoutMs, float elapsedSteadyMs, float elapsedSystemMs) {
+    // Track any OS clock issues with suspend.
+    // It is possible that the elapsedSystemMs is much greater than elapsedSteadyMs if
+    // a suspend occurs; however, we always expect the timeout ms should always be slightly
+    // less than the elapsed steady ms regardless of whether a suspend occurs or not.
+
+    std::string s("Timeout ms ");
+    s.append(std::to_string(requestedTimeoutMs))
+        .append(" elapsed steady ms ").append(std::to_string(elapsedSteadyMs))
+        .append(" elapsed system ms ").append(std::to_string(elapsedSystemMs));
+
+    // Is there something unusual?
+    static constexpr float TOLERANCE_CONTEXT_SWITCH_MS = 200.f;
+
+    if (requestedTimeoutMs > elapsedSteadyMs || requestedTimeoutMs > elapsedSystemMs) {
+        s.append("\nError: early expiration - "
+                "requestedTimeoutMs should be less than elapsed time");
+    }
+
+    if (elapsedSteadyMs > elapsedSystemMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+        s.append("\nWarning: steady time should not advance faster than system time");
+    }
+
+    // This has been found in suspend stress testing.
+    if (elapsedSteadyMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+        s.append("\nWarning: steady time significantly exceeds timeout "
+                "- possible thread stall or aborted suspend");
+    }
+
+    // This has been found in suspend stress testing.
+    if (elapsedSystemMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+        s.append("\nInformation: system time significantly exceeds timeout "
+                "- possible suspend");
+    }
+    return s;
+}
+
+// To avoid any potential race conditions, the timer handle
+// (expiration = clock steady start + timeout) is passed into the callback.
 void TimeCheck::TimeCheckHandler::onCancel(TimerThread::Handle timerHandle) const
 {
     if (TimeCheck::getTimeCheckThread().cancelTask(timerHandle) && onTimer) {
-        const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
-        onTimer(false /* timeout */,
-                std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
-                        endTime - startTime).count());
+        const std::chrono::steady_clock::time_point endSteadyTime =
+                std::chrono::steady_clock::now();
+        const float elapsedSteadyMs = std::chrono::duration_cast<FloatMs>(
+                endSteadyTime - timerHandle + timeoutDuration).count();
+        // send the elapsed steady time for statistics.
+        onTimer(false /* timeout */, elapsedSteadyMs);
     }
 }
 
-void TimeCheck::TimeCheckHandler::onTimeout() const
+// To avoid any potential race conditions, the timer handle
+// (expiration = clock steady start + timeout) is passed into the callback.
+void TimeCheck::TimeCheckHandler::onTimeout(TimerThread::Handle timerHandle) const
 {
-    const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+    const std::chrono::steady_clock::time_point endSteadyTime = std::chrono::steady_clock::now();
+    const std::chrono::system_clock::time_point endSystemTime = std::chrono::system_clock::now();
+    // timerHandle incorporates the timeout
+    const float elapsedSteadyMs = std::chrono::duration_cast<FloatMs>(
+            endSteadyTime - (timerHandle - timeoutDuration)).count();
+    const float elapsedSystemMs = std::chrono::duration_cast<FloatMs>(
+            endSystemTime - startSystemTime).count();
+    const float requestedTimeoutMs = std::chrono::duration_cast<FloatMs>(
+            timeoutDuration).count();
+    const float secondChanceMs = std::chrono::duration_cast<FloatMs>(
+            secondChanceDuration).count();
+
     if (onTimer) {
-        onTimer(true /* timeout */,
-                std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
-                        endTime - startTime).count());
+        onTimer(true /* timeout */, elapsedSteadyMs);
     }
 
     if (!crashOnTimeout) return;
 
     // Generate the TimerThread summary string early before sending signals to the
     // HAL processes which can affect thread behavior.
-    const std::string summary = getTimeCheckThread().toString(4 /* retiredCount */);
+    const auto snapshotAnalysis = getTimeCheckThread().getSnapshotAnalysis(4 /* retiredCount */);
 
     // Generate audio HAL processes tombstones and allow time to complete
     // before forcing restart
@@ -194,7 +267,7 @@
         for (const auto& pid : pids) {
             ALOGI("requesting tombstone for pid: %d", pid);
             halPids.append(std::to_string(pid)).append(" ");
-            sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
+            signalAudioHAL(pid);
         }
         sleep(1);
     } else {
@@ -207,10 +280,12 @@
     // Create abort message string - caution: this can be very large.
     const std::string abortMessage = std::string("TimeCheck timeout for ")
             .append(tag)
-            .append(" scheduled ").append(formatTime(startTime))
+            .append(" scheduled ").append(formatTime(startSystemTime))
             .append(" on thread ").append(std::to_string(tid)).append("\n")
+            .append(analyzeTimeouts(requestedTimeoutMs + secondChanceMs,
+                    elapsedSteadyMs, elapsedSystemMs)).append("\n")
             .append(halPids).append("\n")
-            .append(summary);
+            .append(snapshotAnalysis.toString());
 
     // Note: LOG_ALWAYS_FATAL limits the size of the string - per log/log.h:
     // Log message text may be truncated to less than an
@@ -220,27 +295,40 @@
     // to avoid the size limitation. LOG(FATAL) does an abort whereas
     // LOG(FATAL_WITHOUT_ABORT) does not abort.
 
-    LOG(FATAL) << abortMessage;
+    static constexpr pid_t invalidPid = TimerThread::SnapshotAnalysis::INVALID_PID;
+    pid_t tidToAbort = invalidPid;
+    if (snapshotAnalysis.suspectTid != invalidPid) {
+        tidToAbort = snapshotAnalysis.suspectTid;
+    } else if (snapshotAnalysis.timeoutTid != invalidPid) {
+        tidToAbort = snapshotAnalysis.timeoutTid;
+    }
+
+    LOG(FATAL_WITHOUT_ABORT) << abortMessage;
+    const auto ret = abortTid(tidToAbort);
+    if (ret < 0) {
+        LOG(FATAL) << "TimeCheck thread signal failed, aborting process. "
+                       "errno: " << errno << base::ErrnoNumberAsString(errno);
+    }
 }
 
 // Automatically create a TimeCheck class for a class and method.
-// This is used for Audio HIDL support.
+// This is used for Audio HAL support.
 mediautils::TimeCheck makeTimeCheckStatsForClassMethod(
         std::string_view className, std::string_view methodName) {
     std::shared_ptr<MethodStatistics<std::string>> statistics =
             mediautils::getStatisticsForClass(className);
     if (!statistics) return {}; // empty TimeCheck.
     return mediautils::TimeCheck(
-            std::string(className).append("::").append(methodName),
-            [ clazz = std::string(className), method = std::string(methodName),
+            FixedString62(className).append("::").append(methodName),
+            [ safeMethodName = FixedString30(methodName),
               stats = std::move(statistics) ]
             (bool timeout, float elapsedMs) {
                     if (timeout) {
                         ; // ignored, there is no timeout value.
                     } else {
-                        stats->event(method, elapsedMs);
+                        stats->event(safeMethodName.asStringView(), elapsedMs);
                     }
-            }, 0 /* timeoutMs */);
+            }, {} /* timeoutDuration */, {} /* secondChanceDuration */, false /* crashOnTimeout */);
 }
 
 }  // namespace android::mediautils
diff --git a/media/utils/TimerThread-test.cpp b/media/utils/TimerThread-test.cpp
deleted file mode 100644
index 93cd64c..0000000
--- a/media/utils/TimerThread-test.cpp
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <chrono>
-#include <thread>
-#include <gtest/gtest.h>
-#include <mediautils/TimerThread.h>
-
-using namespace std::chrono_literals;
-using namespace android::mediautils;
-
-namespace {
-
-constexpr auto kJitter = 10ms;
-
-// Each task written by *ToString() will start with a left brace.
-constexpr char REQUEST_START = '{';
-
-inline size_t countChars(std::string_view s, char c) {
-    return std::count(s.begin(), s.end(), c);
-}
-
-TEST(TimerThread, Basic) {
-    std::atomic<bool> taskRan = false;
-    TimerThread thread;
-    thread.scheduleTask("Basic", [&taskRan] { taskRan = true; }, 100ms);
-    std::this_thread::sleep_for(100ms - kJitter);
-    ASSERT_FALSE(taskRan);
-    std::this_thread::sleep_for(2 * kJitter);
-    ASSERT_TRUE(taskRan);
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
-}
-
-TEST(TimerThread, Cancel) {
-    std::atomic<bool> taskRan = false;
-    TimerThread thread;
-    TimerThread::Handle handle =
-            thread.scheduleTask("Cancel", [&taskRan] { taskRan = true; }, 100ms);
-    std::this_thread::sleep_for(100ms - kJitter);
-    ASSERT_FALSE(taskRan);
-    ASSERT_TRUE(thread.cancelTask(handle));
-    std::this_thread::sleep_for(2 * kJitter);
-    ASSERT_FALSE(taskRan);
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
-}
-
-TEST(TimerThread, CancelAfterRun) {
-    std::atomic<bool> taskRan = false;
-    TimerThread thread;
-    TimerThread::Handle handle =
-            thread.scheduleTask("CancelAfterRun", [&taskRan] { taskRan = true; }, 100ms);
-    std::this_thread::sleep_for(100ms + kJitter);
-    ASSERT_TRUE(taskRan);
-    ASSERT_FALSE(thread.cancelTask(handle));
-    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
-}
-
-TEST(TimerThread, MultipleTasks) {
-    std::array<std::atomic<bool>, 6> taskRan{};
-    TimerThread thread;
-
-    auto startTime = std::chrono::steady_clock::now();
-
-    thread.scheduleTask("0", [&taskRan] { taskRan[0] = true; }, 300ms);
-    thread.scheduleTask("1", [&taskRan] { taskRan[1] = true; }, 100ms);
-    thread.scheduleTask("2", [&taskRan] { taskRan[2] = true; }, 200ms);
-    thread.scheduleTask("3", [&taskRan] { taskRan[3] = true; }, 400ms);
-    auto handle4 = thread.scheduleTask("4", [&taskRan] { taskRan[4] = true; }, 200ms);
-    thread.scheduleTask("5", [&taskRan] { taskRan[5] = true; }, 200ms);
-
-    // 6 tasks pending
-    ASSERT_EQ(6, countChars(thread.pendingToString(), REQUEST_START));
-    // 0 tasks completed
-    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
-
-    // Task 1 should trigger around 100ms.
-    std::this_thread::sleep_until(startTime + 100ms - kJitter);
-    ASSERT_FALSE(taskRan[0]);
-    ASSERT_FALSE(taskRan[1]);
-    ASSERT_FALSE(taskRan[2]);
-    ASSERT_FALSE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_FALSE(taskRan[5]);
-
-    std::this_thread::sleep_until(startTime + 100ms + kJitter);
-    ASSERT_FALSE(taskRan[0]);
-    ASSERT_TRUE(taskRan[1]);
-    ASSERT_FALSE(taskRan[2]);
-    ASSERT_FALSE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_FALSE(taskRan[5]);
-
-    // Cancel task 4 before it gets a chance to run.
-    thread.cancelTask(handle4);
-
-    // Tasks 2 and 5 should trigger around 200ms.
-    std::this_thread::sleep_until(startTime + 200ms - kJitter);
-    ASSERT_FALSE(taskRan[0]);
-    ASSERT_TRUE(taskRan[1]);
-    ASSERT_FALSE(taskRan[2]);
-    ASSERT_FALSE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_FALSE(taskRan[5]);
-
-    std::this_thread::sleep_until(startTime + 200ms + kJitter);
-    ASSERT_FALSE(taskRan[0]);
-    ASSERT_TRUE(taskRan[1]);
-    ASSERT_TRUE(taskRan[2]);
-    ASSERT_FALSE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_TRUE(taskRan[5]);
-
-    // Task 0 should trigger around 300ms.
-    std::this_thread::sleep_until(startTime + 300ms - kJitter);
-    ASSERT_FALSE(taskRan[0]);
-    ASSERT_TRUE(taskRan[1]);
-    ASSERT_TRUE(taskRan[2]);
-    ASSERT_FALSE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_TRUE(taskRan[5]);
-
-    std::this_thread::sleep_until(startTime + 300ms + kJitter);
-    ASSERT_TRUE(taskRan[0]);
-    ASSERT_TRUE(taskRan[1]);
-    ASSERT_TRUE(taskRan[2]);
-    ASSERT_FALSE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_TRUE(taskRan[5]);
-
-    // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
-    // 4 tasks ran and 1 cancelled
-    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
-
-    // Task 3 should trigger around 400ms.
-    std::this_thread::sleep_until(startTime + 400ms - kJitter);
-    ASSERT_TRUE(taskRan[0]);
-    ASSERT_TRUE(taskRan[1]);
-    ASSERT_TRUE(taskRan[2]);
-    ASSERT_FALSE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_TRUE(taskRan[5]);
-
-    // 4 tasks ran and 1 cancelled
-    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
-
-    std::this_thread::sleep_until(startTime + 400ms + kJitter);
-    ASSERT_TRUE(taskRan[0]);
-    ASSERT_TRUE(taskRan[1]);
-    ASSERT_TRUE(taskRan[2]);
-    ASSERT_TRUE(taskRan[3]);
-    ASSERT_FALSE(taskRan[4]);
-    ASSERT_TRUE(taskRan[5]);
-
-    // 0 tasks pending
-    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
-    // 5 tasks ran and 1 cancelled
-    ASSERT_EQ(5 + 1, countChars(thread.retiredToString(), REQUEST_START));
-}
-
-TEST(TimerThread, TrackedTasks) {
-    TimerThread thread;
-
-    auto handle0 = thread.trackTask("0");
-    auto handle1 = thread.trackTask("1");
-    auto handle2 = thread.trackTask("2");
-
-    // 3 tasks pending
-    ASSERT_EQ(3, countChars(thread.pendingToString(), REQUEST_START));
-    // 0 tasks retired
-    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
-
-    ASSERT_TRUE(thread.cancelTask(handle0));
-    ASSERT_TRUE(thread.cancelTask(handle1));
-
-    // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
-    // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
-
-    // handle1 is stale, cancel returns false.
-    ASSERT_FALSE(thread.cancelTask(handle1));
-
-    // 1 task pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
-    // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
-
-    // Add another tracked task.
-    auto handle3 = thread.trackTask("3");
-
-    // 2 tasks pending
-    ASSERT_EQ(2, countChars(thread.pendingToString(), REQUEST_START));
-    // 2 tasks retired
-    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
-
-    ASSERT_TRUE(thread.cancelTask(handle2));
-
-    // 1 tasks pending
-    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
-    // 3 tasks retired
-    ASSERT_EQ(3, countChars(thread.retiredToString(), REQUEST_START));
-
-    ASSERT_TRUE(thread.cancelTask(handle3));
-
-    // 0 tasks pending
-    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
-    // 4 tasks retired
-    ASSERT_EQ(4, countChars(thread.retiredToString(), REQUEST_START));
-}
-
-}  // namespace
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
index 6de6b13..3966103 100644
--- a/media/utils/TimerThread.cpp
+++ b/media/utils/TimerThread.cpp
@@ -22,68 +22,66 @@
 #include <vector>
 
 #include <mediautils/MediaUtilsDelayed.h>
+#include <mediautils/TidWrapper.h>
 #include <mediautils/TimerThread.h>
+#include <utils/Log.h>
 #include <utils/ThreadDefs.h>
 
+using namespace std::chrono_literals;
+
 namespace android::mediautils {
 
 extern std::string formatTime(std::chrono::system_clock::time_point t);
 extern std::string_view timeSuffix(std::string_view time1, std::string_view time2);
 
 TimerThread::Handle TimerThread::scheduleTask(
-        std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout) {
+        std::string_view tag, TimerCallback&& func,
+        Duration timeoutDuration, Duration secondChanceDuration) {
     const auto now = std::chrono::system_clock::now();
-    std::shared_ptr<const Request> request{
-            new Request{ now, now + timeout, gettid(), std::move(tag) }};
-    return mMonitorThread.add(std::move(request), std::move(func), timeout);
+    auto request = std::make_shared<const Request>(now, now +
+            std::chrono::duration_cast<std::chrono::system_clock::duration>(timeoutDuration),
+            secondChanceDuration, getThreadIdWrapper(), tag);
+    return mMonitorThread.add(std::move(request), std::move(func), timeoutDuration);
 }
 
-TimerThread::Handle TimerThread::trackTask(std::string tag) {
+TimerThread::Handle TimerThread::trackTask(std::string_view tag) {
     const auto now = std::chrono::system_clock::now();
-    std::shared_ptr<const Request> request{
-            new Request{ now, now, gettid(), std::move(tag) }};
+    auto request = std::make_shared<const Request>(now, now,
+            Duration{} /* secondChanceDuration */, getThreadIdWrapper(), tag);
     return mNoTimeoutMap.add(std::move(request));
 }
 
 bool TimerThread::cancelTask(Handle handle) {
-    std::shared_ptr<const Request> request = mNoTimeoutMap.isValidHandle(handle) ?
+    std::shared_ptr<const Request> request = isNoTimeoutHandle(handle) ?
              mNoTimeoutMap.remove(handle) : mMonitorThread.remove(handle);
     if (!request) return false;
     mRetiredQueue.add(std::move(request));
     return true;
 }
 
-std::string TimerThread::toString(size_t retiredCount) const {
+
+std::string TimerThread::SnapshotAnalysis::toString() const {
     // Note: These request queues are snapshot very close together but
     // not at "identical" times as we don't use a class-wide lock.
-
-    std::vector<std::shared_ptr<const Request>> timeoutRequests;
-    std::vector<std::shared_ptr<const Request>> retiredRequests;
-    mTimeoutQueue.copyRequests(timeoutRequests);
-    mRetiredQueue.copyRequests(retiredRequests, retiredCount);
-    std::vector<std::shared_ptr<const Request>> pendingRequests =
-        getPendingRequests();
-
-    struct Analysis analysis = analyzeTimeout(timeoutRequests, pendingRequests);
-    std::string analysisSummary;
-    if (!analysis.summary.empty()) {
-        analysisSummary = std::string("\nanalysis [ ").append(analysis.summary).append(" ]");
-    }
+    std::string analysisSummary = std::string("\nanalysis [ ").append(description).append(" ]");
     std::string timeoutStack;
-    if (analysis.timeoutTid != -1) {
-        timeoutStack = std::string("\ntimeout(")
-                .append(std::to_string(analysis.timeoutTid)).append(") callstack [\n")
-                .append(getCallStackStringForTid(analysis.timeoutTid)).append("]");
-    }
     std::string blockedStack;
-    if (analysis.HALBlockedTid != -1) {
+    if (timeoutTid != -1) {
+        timeoutStack = std::string(suspectTid == timeoutTid ? "\ntimeout/blocked(" : "\ntimeout(")
+                .append(std::to_string(timeoutTid)).append(") callstack [\n")
+                .append(getCallStackStringForTid(timeoutTid)).append("]");
+    }
+
+    if (suspectTid != -1 && suspectTid != timeoutTid) {
         blockedStack = std::string("\nblocked(")
-                .append(std::to_string(analysis.HALBlockedTid)).append(")  callstack [\n")
-                .append(getCallStackStringForTid(analysis.HALBlockedTid)).append("]");
+                .append(std::to_string(suspectTid)).append(")  callstack [\n")
+                .append(getCallStackStringForTid(suspectTid)).append("]");
     }
 
     return std::string("now ")
             .append(formatTime(std::chrono::system_clock::now()))
+            .append("\nsecondChanceCount ")
+            .append(std::to_string(secondChanceCount))
             .append(analysisSummary)
             .append("\ntimeout [ ")
             .append(requestsToString(timeoutRequests))
@@ -106,34 +104,41 @@
 //
 /* static */
 bool TimerThread::isRequestFromHal(const std::shared_ptr<const Request>& request) {
-    const size_t hidlPos = request->tag.find("Hidl");
+    const size_t hidlPos = request->tag.asStringView().find("Hidl");
     if (hidlPos == std::string::npos) return false;
     // should be a separator afterwards Hidl which indicates the string was in the class.
-    const size_t separatorPos = request->tag.find("::", hidlPos);
+    const size_t separatorPos = request->tag.asStringView().find("::", hidlPos);
     return separatorPos != std::string::npos;
 }
 
-/* static */
-struct TimerThread::Analysis TimerThread::analyzeTimeout(
-    const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
-    const std::vector<std::shared_ptr<const Request>>& pendingRequests) {
-
-    if (timeoutRequests.empty() || pendingRequests.empty()) return {}; // nothing to say.
-
+struct TimerThread::SnapshotAnalysis TimerThread::getSnapshotAnalysis(size_t retiredCount) const {
+    struct SnapshotAnalysis analysis{};
+    // The following snapshot of the TimerThread state will be utilized for
+    // analysis. Note, there is no lock around these calls, so there could be
+    // a state update between them.
+    mTimeoutQueue.copyRequests(analysis.timeoutRequests);
+    mRetiredQueue.copyRequests(analysis.retiredRequests, retiredCount);
+    analysis.pendingRequests = getPendingRequests();
+    analysis.secondChanceCount = mMonitorThread.getSecondChanceCount();
+    // No call has timed out, so there is no analysis to be done.
+    if (analysis.timeoutRequests.empty())
+        return analysis;
     // for now look at last timeout (in our case, the only timeout)
-    const std::shared_ptr<const Request> timeout = timeoutRequests.back();
-
+    const std::shared_ptr<const Request> timeout = analysis.timeoutRequests.back();
+    analysis.timeoutTid = timeout->tid;
+    if (analysis.pendingRequests.empty())
+      return analysis;
     // pending Requests that are problematic.
     std::vector<std::shared_ptr<const Request>> pendingExact;
     std::vector<std::shared_ptr<const Request>> pendingPossible;
 
-    // We look at pending requests that were scheduled no later than kDuration
+    // We look at pending requests that were scheduled no later than kPendingDuration
     // after the timeout request. This prevents false matches with calls
     // that naturally block for a short period of time
     // such as HAL write() and read().
     //
-    auto constexpr kDuration = std::chrono::milliseconds(1000);
-    for (const auto& pending : pendingRequests) {
+    constexpr Duration kPendingDuration = 1000ms;
+    for (const auto& pending : analysis.pendingRequests) {
         // If the pending tid is the same as timeout tid, problem identified.
         if (pending->tid == timeout->tid) {
             pendingExact.emplace_back(pending);
@@ -141,34 +146,32 @@
         }
 
         // if the pending tid is scheduled within time limit
-        if (pending->scheduled - timeout->scheduled < kDuration) {
+        if (pending->scheduled - timeout->scheduled < kPendingDuration) {
             pendingPossible.emplace_back(pending);
         }
     }
 
-    struct Analysis analysis{};
-
-    analysis.timeoutTid = timeout->tid;
-    std::string& summary = analysis.summary;
+    std::string& description = analysis.description;
     if (!pendingExact.empty()) {
         const auto& request = pendingExact.front();
         const bool hal = isRequestFromHal(request);
 
         if (hal) {
-            summary = std::string("Blocked directly due to HAL call: ")
+            description = std::string("Blocked directly due to HAL call: ")
                 .append(request->toString());
+            analysis.suspectTid= request->tid;
         }
     }
-    if (summary.empty() && !pendingPossible.empty()) {
+    if (description.empty() && !pendingPossible.empty()) {
         for (const auto& request : pendingPossible) {
             const bool hal = isRequestFromHal(request);
             if (hal) {
                 // The first blocked call is the most likely one.
                 // Recent calls might be temporarily blocked
                 // calls such as write() or read() depending on kDuration.
-                summary = std::string("Blocked possibly due to HAL call: ")
+                description = std::string("Blocked possibly due to HAL call: ")
                     .append(request->toString());
-                analysis.HALBlockedTid = request->tid;
+                analysis.suspectTid= request->tid;
             }
        }
     }
@@ -241,15 +244,11 @@
     }
 }
 
-bool TimerThread::NoTimeoutMap::isValidHandle(Handle handle) const {
-    return handle > getIndexedHandle(mNoTimeoutRequests);
-}
-
 TimerThread::Handle TimerThread::NoTimeoutMap::add(std::shared_ptr<const Request> request) {
     std::lock_guard lg(mNTMutex);
     // A unique handle is obtained by mNoTimeoutRequests.fetch_add(1),
     // This need not be under a lock, but we do so anyhow.
-    const Handle handle = getIndexedHandle(mNoTimeoutRequests++);
+    const Handle handle = getUniqueHandle_l();
     mMap[handle] = request;
     return handle;
 }
@@ -271,16 +270,6 @@
     }
 }
 
-TimerThread::Handle TimerThread::MonitorThread::getUniqueHandle_l(
-        std::chrono::milliseconds timeout) {
-    // To avoid key collisions, advance by 1 tick until the key is unique.
-    auto deadline = std::chrono::steady_clock::now() + timeout;
-    for (; mMonitorRequests.find(deadline) != mMonitorRequests.end();
-         deadline += std::chrono::steady_clock::duration(1))
-        ;
-    return deadline;
-}
-
 TimerThread::MonitorThread::MonitorThread(RequestQueue& timeoutQueue)
         : mTimeoutQueue(timeoutQueue)
         , mThread([this] { threadFunc(); }) {
@@ -299,25 +288,80 @@
 
 void TimerThread::MonitorThread::threadFunc() {
     std::unique_lock _l(mMutex);
+    ::android::base::ScopedLockAssertion lock_assertion(mMutex);
     while (!mShouldExit) {
+        Handle nextDeadline = INVALID_HANDLE;
+        Handle now = INVALID_HANDLE;
         if (!mMonitorRequests.empty()) {
-            Handle nextDeadline = mMonitorRequests.begin()->first;
-            if (nextDeadline < std::chrono::steady_clock::now()) {
+            nextDeadline = mMonitorRequests.begin()->first;
+            now = std::chrono::steady_clock::now();
+            if (nextDeadline < now) {
+                auto node = mMonitorRequests.extract(mMonitorRequests.begin());
                 // Deadline has expired, handle the request.
+                auto secondChanceDuration = node.mapped().first->secondChanceDuration;
+                if (secondChanceDuration.count() != 0) {
+                    // We now apply the second chance duration to find the clock
+                    // monotonic second deadline.  The unique key is then the
+                    // pair<second_deadline, first_deadline>.
+                    //
+                    // The second chance prevents a false timeout should there be
+                    // any clock monotonic advancement during suspend.
+                    auto newHandle = now + secondChanceDuration;
+                    ALOGD("%s: TimeCheck second chance applied for %s",
+                            __func__, node.mapped().first->tag.c_str()); // should be rare event.
+                    mSecondChanceRequests.emplace_hint(mSecondChanceRequests.end(),
+                            std::make_pair(newHandle, nextDeadline),
+                            std::move(node.mapped()));
+                    // increment second chance counter.
+                    mSecondChanceCount.fetch_add(1 /* arg */, std::memory_order_relaxed);
+                } else {
+                    {
+                        _l.unlock();
+                        // We add Request to retired queue early so that it can be dumped out.
+                        mTimeoutQueue.add(std::move(node.mapped().first));
+                        node.mapped().second(nextDeadline);
+                        // Caution: we don't hold lock when we call TimerCallback,
+                        // but this is the timeout case!  We will crash soon,
+                        // maybe before returning.
+                        // anything left over is released here outside lock.
+                    }
+                    // reacquire the lock - if something was added, we loop immediately to check.
+                    _l.lock();
+                }
+                // always process expiring monitor requests first.
+                continue;
+            }
+        }
+        // now process any second chance requests.
+        if (!mSecondChanceRequests.empty()) {
+            Handle secondDeadline = mSecondChanceRequests.begin()->first.first;
+            if (now == INVALID_HANDLE) now = std::chrono::steady_clock::now();
+            if (secondDeadline < now) {
+                auto node = mSecondChanceRequests.extract(mSecondChanceRequests.begin());
                 {
-                    auto node = mMonitorRequests.extract(mMonitorRequests.begin());
                     _l.unlock();
                     // We add Request to retired queue early so that it can be dumped out.
                     mTimeoutQueue.add(std::move(node.mapped().first));
-                    node.mapped().second(); // Caution: we don't hold lock here - but do we care?
-                                            // this is the timeout case!  We will crash soon,
-                                            // maybe before returning.
-                    // anything left over is released here outside lock.
+                    const Handle originalHandle = node.key().second;
+                    node.mapped().second(originalHandle);
+                    // Caution: we don't hold lock when we call TimerCallback.
+                    // This is benign issue - we permit concurrent operations
+                    // while in the callback to the MonitorQueue.
+                    //
+                    // Anything left over is released here outside lock.
                 }
                 // reacquire the lock - if something was added, we loop immediately to check.
                 _l.lock();
                 continue;
             }
+            // update the deadline.
+            if (nextDeadline == INVALID_HANDLE) {
+                nextDeadline = secondDeadline;
+            } else {
+                nextDeadline = std::min(nextDeadline, secondDeadline);
+            }
+        }
+        if (nextDeadline != INVALID_HANDLE) {
             mCond.wait_until(_l, nextDeadline);
         } else {
             mCond.wait(_l);
@@ -326,26 +370,40 @@
 }
 
 TimerThread::Handle TimerThread::MonitorThread::add(
-        std::shared_ptr<const Request> request, std::function<void()>&& func,
-        std::chrono::milliseconds timeout) {
+        std::shared_ptr<const Request> request, TimerCallback&& func, Duration timeout) {
     std::lock_guard _l(mMutex);
     const Handle handle = getUniqueHandle_l(timeout);
-    mMonitorRequests.emplace(handle, std::make_pair(std::move(request), std::move(func)));
+    mMonitorRequests.emplace_hint(mMonitorRequests.end(),
+            handle, std::make_pair(std::move(request), std::move(func)));
     mCond.notify_all();
     return handle;
 }
 
 std::shared_ptr<const TimerThread::Request> TimerThread::MonitorThread::remove(Handle handle) {
+    std::pair<std::shared_ptr<const Request>, TimerCallback> data;
     std::unique_lock ul(mMutex);
-    const auto it = mMonitorRequests.find(handle);
-    if (it == mMonitorRequests.end()) {
-        return {};
+    ::android::base::ScopedLockAssertion lock_assertion(mMutex);
+    if (const auto it = mMonitorRequests.find(handle);
+        it != mMonitorRequests.end()) {
+        data = std::move(it->second);
+        mMonitorRequests.erase(it);
+        ul.unlock();  // manually release lock here so func (data.second)
+                      // is released outside of lock.
+        return data.first;  // request
     }
-    std::shared_ptr<const TimerThread::Request> request = std::move(it->second.first);
-    std::function<void()> func = std::move(it->second.second);
-    mMonitorRequests.erase(it);
-    ul.unlock();  // manually release lock here so func is released outside of lock.
-    return request;
+
+    // this check is O(N), but since the second chance requests are ordered
+    // in terms of earliest expiration time, we would expect better than average results.
+    for (auto it = mSecondChanceRequests.begin(); it != mSecondChanceRequests.end(); ++it) {
+        if (it->first.second == handle) {
+            data = std::move(it->second);
+            mSecondChanceRequests.erase(it);
+            ul.unlock();  // manually release lock here so func (data.second)
+                          // is released outside of lock.
+            return data.first; // request
+        }
+    }
+    return {};
 }
 
 void TimerThread::MonitorThread::copyRequests(
@@ -354,6 +412,13 @@
     for (const auto &[deadline, monitorpair] : mMonitorRequests) {
         requests.emplace_back(monitorpair.first);
     }
+    // we combine the second map with the first map - this is
+    // everything that is pending on the monitor thread.
+    // The second map will be older than the first map so this
+    // is in order.
+    for (const auto &[deadline, monitorpair] : mSecondChanceRequests) {
+        requests.emplace_back(monitorpair.first);
+    }
 }
 
 }  // namespace android::mediautils
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index d26e6c2..bd9a462 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -9,14 +9,13 @@
 
 cc_defaults {
     name: "libmediautils_fuzzer_defaults",
+    host_supported: true,
     shared_libs: [
-        "libbatterystats_aidl",
         "libbinder",
-        "libcutils",
         "liblog",
+        "libcutils",
         "libmediautils",
         "libutils",
-        "libbinder",
         "framework-permission-aidl-cpp",
         "packagemanager_aidl-cpp",
     ],
@@ -27,11 +26,6 @@
         "-Werror",
         "-Wno-c++2a-extensions",
     ],
-
-    header_libs: [
-        "bionic_libc_platform_headers",
-        "libmedia_headers",
-    ],
 }
 
 cc_fuzz {
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 130feee..d672fb0 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -19,6 +19,7 @@
 #include <utils/String16.h>
 #include <android/log.h>
 #include <mediautils/SchedulingPolicyService.h>
+#include <mediautils/TidWrapper.h>
 #include "fuzzer/FuzzedDataProvider.h"
 using android::IBatteryStats;
 using android::IBinder;
@@ -34,11 +35,16 @@
     const sp<IServiceManager> sm(defaultServiceManager());
     if (sm != nullptr) {
         const String16 name("batterystats");
-        batteryStatService = checked_interface_cast<IBatteryStats>(sm->checkService(name));
-        if (batteryStatService == nullptr) {
+        sp<IBinder> obj = sm->checkService(name);
+        if (!obj) {
             ALOGW("batterystats service unavailable!");
             return nullptr;
         }
+        batteryStatService = checked_interface_cast<IBatteryStats>(obj);
+        if (batteryStatService == nullptr) {
+            ALOGW("batterystats service interface is invalid");
+            return nullptr;
+        }
     }
     return batteryStatService;
 }
@@ -50,7 +56,8 @@
     int32_t priority = data_provider.ConsumeIntegral<int32_t>();
     bool is_for_app = data_provider.ConsumeBool();
     bool async = data_provider.ConsumeBool();
-    requestPriority(getpid(), gettid(), priority, is_for_app, async);
+    requestPriority(getpid(), android::mediautils::getThreadIdWrapper(), priority, is_for_app,
+                    async);
     // TODO: Verify and re-enable in AOSP (R).
     // bool enable = data_provider.ConsumeBool();
     // We are just using batterystats to avoid the need
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
index 51e8d7a..15f043a 100644
--- a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -25,6 +25,7 @@
 
 static constexpr int kMaxOperations = 50;
 static constexpr int kMaxStringLen = 256;
+static constexpr int kMaxSpaces = 1000;
 
 using android::content::AttributionSourceState;
 
@@ -35,7 +36,9 @@
             pm.allowPlaybackCapture(uid);
         },
         [](FuzzedDataProvider* data_provider, android::MediaPackageManager pm) -> void {
-            int spaces = data_provider->ConsumeIntegral<int>();
+           /* The large value of spaces was taking time in file write operation.
+            * Limited spaces values in range to avoid timeout.*/
+            int spaces = data_provider->ConsumeIntegralInRange<int>(0, kMaxSpaces);
 
             // Dump everything into /dev/null
             int fd = open("/dev/null", O_WRONLY);
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
index 7966469..65b2885 100644
--- a/media/utils/fuzzers/TimeCheckFuzz.cpp
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -48,7 +48,9 @@
     std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
 
     // 3. The constructor, which is fuzzed here:
-    android::mediautils::TimeCheck timeCheck(name.c_str(), {} /* onTimer */, timeoutMs);
+    android::mediautils::TimeCheck timeCheck(name.c_str(), {} /* onTimer */,
+            std::chrono::milliseconds(timeoutMs),
+            {} /* secondChanceDuration */, true /* crashOnTimeout */);
     // We will leave some buffer to avoid sleeping too long
     uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
 
diff --git a/media/utils/include/mediautils/ExtendedAccumulator.h b/media/utils/include/mediautils/ExtendedAccumulator.h
new file mode 100644
index 0000000..7e3e170
--- /dev/null
+++ b/media/utils/include/mediautils/ExtendedAccumulator.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <cstdint>
+#include <tuple>
+#include <type_traits>
+
+#include <log/log.h>
+
+namespace android::mediautils {
+
+// The goal of this class is to detect and accumulate wraparound occurrences on a
+// lower sized integer.
+
+// This class assumes that the underlying unsigned type is either incremented or
+// decremented by at most the underlying signed type between any two subsequent
+// polls (or construction). This is well-defined as the modular nature of
+// unsigned arithmetic ensures that every new value maps 1-1 to an
+// increment/decrement over the same sized signed type. It also ensures that our
+// counter will be equivalent mod the size of the integer even if the underlying
+// type is modified outside of this range.
+//
+// For convenience, this class is thread compatible. Additionally, it is safe
+// as long as there is only one writer.
+template <typename Integral = uint32_t, typename AccumulatingType = uint64_t>
+class ExtendedAccumulator {
+    static_assert(sizeof(Integral) < sizeof(AccumulatingType),
+                  "Accumulating type should be larger than underlying type");
+    static_assert(std::is_integral_v<Integral> && std::is_unsigned_v<Integral>,
+                  "Wraparound behavior is only well-defiend for unsigned ints");
+    static_assert(std::is_integral_v<AccumulatingType>);
+
+  public:
+    enum class Wrap {
+        NORMAL = 0,
+        UNDERFLOW = 1,
+        OVERFLOW = 2,
+    };
+
+    using UnsignedInt = Integral;
+    using SignedInt = std::make_signed_t<UnsignedInt>;
+
+    explicit ExtendedAccumulator(AccumulatingType initial = 0) : mAccumulated(initial) {}
+
+    // Returns a pair of the calculated change on the accumulating value, and a
+    // Wrap type representing the type of wraparound (if any) which occurred.
+    std::pair<SignedInt, Wrap> poll(UnsignedInt value) {
+        auto acc = mAccumulated.load(std::memory_order_relaxed);
+        const auto bottom_bits = static_cast<UnsignedInt>(acc);
+        std::pair<SignedInt, Wrap> res = {0, Wrap::NORMAL};
+        const bool overflow = __builtin_sub_overflow(value, bottom_bits, &res.first);
+
+        if (overflow) {
+            res.second = (res.first > 0) ? Wrap::OVERFLOW : Wrap::UNDERFLOW;
+        }
+
+        const bool acc_overflow = __builtin_add_overflow(acc, res.first, &acc);
+        // If our *accumulating* type overflows or underflows (depending on its
+        // signedness), we should abort.
+        if (acc_overflow) LOG_ALWAYS_FATAL("Unexpected overflow/underflow in %s", __func__);
+
+        mAccumulated.store(acc, std::memory_order_relaxed);
+        return res;
+    }
+
+    AccumulatingType getValue() const { return mAccumulated.load(std::memory_order_relaxed); }
+
+  private:
+    // Invariant - the bottom underlying bits of accumulated are the same as the
+    // last value provided to poll.
+    std::atomic<AccumulatingType> mAccumulated;
+};
+
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/FixedString.h b/media/utils/include/mediautils/FixedString.h
new file mode 100644
index 0000000..047aa82
--- /dev/null
+++ b/media/utils/include/mediautils/FixedString.h
@@ -0,0 +1,282 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <string>
+#include <string_view>
+
+namespace android::mediautils {
+
+/*
+ * FixedString is a stack allocatable string buffer that supports
+ * simple appending of other strings and string_views.
+ *
+ * It is designed for no-malloc operation when std::string
+ * small buffer optimization is insufficient.
+ *
+ * To keep code small, use asStringView() for operations on this.
+ *
+ * Notes:
+ * 1) Appending beyond the internal buffer size results in truncation.
+ *
+ * Alternatives:
+ * 1) If you want a sharable copy-on-write string implementation,
+ *    consider using the legacy android::String8().
+ * 2) Using std::string with a fixed stack allocator may suit your needs,
+ *    but exception avoidance is tricky.
+ * 3) Using C++20 ranges https://en.cppreference.com/w/cpp/ranges if you don't
+ *    need backing store.  Be careful about allocation with ranges.
+ *
+ * Good small sizes are multiples of 16 minus 2, e.g. 14, 30, 46, 62.
+ *
+ * Implementation Notes:
+ * 1) No iterators or [] for FixedString - please convert to std::string_view.
+ * 2) For small N (e.g. less than 62), consider a change to always zero fill and
+ *    potentially prevent always zero terminating (if one only does append).
+ *
+ * Possible arguments to create/append:
+ * 1) A FixedString.
+ * 2) A single char.
+ * 3) A char * pointer.
+ * 4) A std::string.
+ * 5) A std::string_view (or something convertible to it).
+ *
+ * Example:
+ *
+ * FixedString s1(std::string("a"));    // ctor
+ * s1 << "bc" << 'd' << '\n';           // streaming append
+ * s1 += "hello";                       // += append
+ * ASSERT_EQ(s1, "abcd\nhello");
+ */
+template <uint32_t N>
+struct FixedString
+{
+    // Find the best size type.
+    using strsize_t = std::conditional_t<(N > 255), uint32_t, uint8_t>;
+
+    // constructors
+    FixedString() { // override default
+        buffer_[0] = '\0';
+    }
+
+    FixedString(const FixedString& other) { // override default.
+        copyFrom<N>(other);
+    }
+
+    // The following constructor is not explicit to allow
+    // FixedString<8> s = "abcd";
+    template <typename ...Types>
+    FixedString(Types&&... args) {
+        append(std::forward<Types>(args)...);
+    }
+
+    // copy assign (copyFrom checks for equality and returns *this).
+    FixedString& operator=(const FixedString& other) { // override default.
+        return copyFrom<N>(other);
+    }
+
+    template <typename ...Types>
+    FixedString& operator=(Types&&... args) {
+        size_ = 0;
+        return append(std::forward<Types>(args)...);
+    }
+
+    // operator equals
+    bool operator==(const char *s) const {
+        return strncmp(c_str(), s, capacity() + 1) == 0;
+    }
+
+    bool operator==(std::string_view s) const {
+        return size() == s.size() && memcmp(data(), s.data(), size()) == 0;
+    }
+
+    // operator not-equals
+    template <typename T>
+    bool operator!=(const T& other) const {
+        return !operator==(other);
+    }
+
+    // operator +=
+    template <typename ...Types>
+    FixedString& operator+=(Types&&... args) {
+        return append(std::forward<Types>(args)...);
+    }
+
+    // conversion to std::string_view.
+    operator std::string_view() const {
+        return asStringView();
+    }
+
+    // basic observers
+    size_t buffer_offset() const { return offsetof(std::decay_t<decltype(*this)>, buffer_); }
+    static constexpr uint32_t capacity() { return N; }
+    uint32_t size() const { return size_; }
+    uint32_t remaining() const { return size_ >= N ? 0 : N - size_; }
+    bool empty() const { return size_ == 0; }
+    bool full() const { return size_ == N; }  // when full, possible truncation risk.
+    char * data() { return buffer_; }
+    const char * data() const { return buffer_; }
+    const char * c_str() const { return buffer_; }
+
+    inline std::string_view asStringView() const {
+        return { buffer_, static_cast<size_t>(size_) };
+    }
+    inline std::string asString() const {
+        return { buffer_, static_cast<size_t>(size_) };
+    }
+
+    void clear() { size_ = 0; buffer_[0] = 0; }
+
+    // Implementation of append - using templates
+    // to guarantee precedence in the presence of ambiguity.
+    //
+    // Consider C++20 template overloading through constraints and concepts.
+    template <typename T>
+    FixedString& append(const T& t) {
+        using decayT = std::decay_t<T>;
+        if constexpr (is_specialization_v<decayT, FixedString>) {
+            // A FixedString<U>
+            if (size_ == 0) {
+                // optimization to erase everything.
+                return copyFrom(t);
+            } else {
+                return appendStringView({t.data(), t.size()});
+            }
+        } else if constexpr(std::is_same_v<decayT, char>) {
+            if (size_ < N) {
+                buffer_[size_++] = t;
+                buffer_[size_] = '\0';
+            }
+            return *this;
+        } else if constexpr(std::is_same_v<decayT, char *>) {
+            // Some char* ptr.
+            return appendString(t);
+        } else if constexpr (std::is_convertible_v<decayT, std::string_view>) {
+            // std::string_view, std::string, or some other convertible type.
+            return appendStringView(t);
+        } else /* constexpr */ {
+            static_assert(dependent_false_v<T>, "non-matching append type");
+        }
+    }
+
+    FixedString& appendStringView(std::string_view s) {
+        uint32_t total = std::min(static_cast<size_t>(N - size_), s.size());
+        memcpy(buffer_ + size_, s.data(), total);
+        size_ += total;
+        buffer_[size_] = '\0';
+        return *this;
+    }
+
+    FixedString& appendString(const char *s) {
+        // strncpy zero pads to the end,
+        // strlcpy returns total expected length,
+        // we don't have strncpy_s in Bionic,
+        // so we write our own here.
+        while (size_ < N && *s != '\0') {
+            buffer_[size_++] = *s++;
+        }
+        buffer_[size_] = '\0';
+        return *this;
+    }
+
+    // Copy initialize the struct.
+    // Note: We are POD but customize the copying for acceleration
+    // of moving small strings embedded in a large buffers.
+    template <uint32_t U>
+    FixedString& copyFrom(const FixedString<U>& other) {
+        if ((void*)this != (void*)&other) { // not a self-assignment
+            if (other.size() == 0) {
+                size_ = 0;
+                buffer_[0] = '\0';
+                return *this;
+            }
+            constexpr size_t kSizeToCopyWhole = 64;
+            if constexpr (N == U &&
+                    sizeof(*this) == sizeof(other) &&
+                    sizeof(*this) <= kSizeToCopyWhole) {
+                // As we have the same str size type, we can just
+                // memcpy with fixed size, which can be easily optimized.
+                memcpy(static_cast<void*>(this), static_cast<const void*>(&other), sizeof(*this));
+                return *this;
+            }
+            if constexpr (std::is_same_v<strsize_t, typename FixedString<U>::strsize_t>) {
+                constexpr size_t kAlign = 8;  // align to a multiple of 8.
+                static_assert((kAlign & (kAlign - 1)) == 0); // power of 2.
+                // we check any alignment issues.
+                if (buffer_offset() == other.buffer_offset() && other.size() <= capacity()) {
+                    // improve on standard POD copying by reducing size.
+                    const size_t mincpy = buffer_offset() + other.size() + 1 /* nul */;
+                    const size_t maxcpy = std::min(sizeof(*this), sizeof(other));
+                    const size_t cpysize = std::min(mincpy + kAlign - 1 & ~(kAlign - 1), maxcpy);
+                    memcpy(static_cast<void*>(this), static_cast<const void*>(&other), cpysize);
+                    return *this;
+                }
+            }
+            size_ = std::min(other.size(), capacity());
+            memcpy(buffer_, other.data(), size_);
+            buffer_[size_] = '\0';  // zero terminate.
+        }
+        return *this;
+    }
+
+private:
+    //  Template helper methods
+
+    template <typename Test, template <uint32_t> class Ref>
+    struct is_specialization : std::false_type {};
+
+    template <template <uint32_t> class Ref, uint32_t UU>
+    struct is_specialization<Ref<UU>, Ref>: std::true_type {};
+
+    template <typename Test, template <uint32_t> class Ref>
+    static inline constexpr bool is_specialization_v = is_specialization<Test, Ref>::value;
+
+    // For static assert(false) we need a template version to avoid early failure.
+    template <typename T>
+    static inline constexpr bool dependent_false_v = false;
+
+    // POD variables
+    strsize_t size_ = 0;
+    char buffer_[N + 1 /* allow zero termination */];
+};
+
+// Stream operator syntactic sugar.
+// Example:
+// s << 'b' << "c" << "d" << '\n';
+template <uint32_t N, typename ...Types>
+FixedString<N>& operator<<(FixedString<N>& fs, Types&&... args) {
+    return fs.append(std::forward<Types>(args)...);
+}
+
+// We do not use a default size for fixed string as changing
+// the default size would lead to different behavior - we want the
+// size to be explicitly known.
+
+// FixedString62 of 62 chars fits in one typical cache line.
+using FixedString62 = FixedString<62>;
+
+// Slightly smaller
+using FixedString30 = FixedString<30>;
+
+// Since we have added copy and assignment optimizations,
+// we are no longer trivially assignable and copyable.
+// But we check standard layout here to prevent inclusion of unacceptable members or virtuals.
+static_assert(std::is_standard_layout_v<FixedString62>);
+static_assert(std::is_standard_layout_v<FixedString30>);
+
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/InPlaceFunction.h b/media/utils/include/mediautils/InPlaceFunction.h
new file mode 100644
index 0000000..17c6274
--- /dev/null
+++ b/media/utils/include/mediautils/InPlaceFunction.h
@@ -0,0 +1,344 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdlib>
+#include <functional>
+#include <memory>
+#include <type_traits>
+
+namespace android::mediautils {
+
+namespace detail {
+// Vtable interface for erased types
+template <typename Ret, typename... Args>
+struct ICallableTable {
+    // Destroy the erased type
+    void (*destroy)(void* storage) = nullptr;
+    // Call the erased object
+    Ret (*invoke)(void* storage, Args&&...) = nullptr;
+    // **Note** the next two functions only copy object data, not the vptr
+    // Copy the erased object to a new InPlaceFunction buffer
+    void (*copy_to)(const void* storage, void* other) = nullptr;
+    // Move the erased object to a new InPlaceFunction buffer
+    void (*move_to)(void* storage, void* other) = nullptr;
+};
+}  // namespace detail
+
+// This class is an *almost* drop-in replacement for std::function which is guaranteed to never
+// allocate, and always holds the type erased functional object in an in-line small buffer of
+// templated size. If the object is too large to hold, the type will fail to instantiate.
+//
+// Some notable differences are:
+// - operator() is not const (unlike std::function where the call operator is
+// const even if the erased type is not const callable). This retains const
+// correctness by default. A workaround is keeping InPlaceFunction mutable.
+// - Moving from an InPlaceFunction leaves the object in a valid state (operator
+// bool remains true), similar to std::optional/std::variant.
+// Calls to the object are still defined (and are equivalent
+// to calling the underlying type after it has been moved from). To opt-out
+// (and/or ensure safety), clearing the object is recommended:
+//      func1 = std::move(func2); // func2 still valid (and moved-from) after this line
+//      func2 = nullptr; // calling func2 will now abort
+// - Unsafe implicit conversions of the return value to a reference type are
+// prohibited due to the risk of dangling references (some of this safety was
+// added to std::function in c++23). Only converting a reference to a reference to base class is
+// permitted:
+//      std::function<Base&()> = []() -> Derived& {...}
+// - Some (current libc++ implementation) implementations of std::function
+// incorrectly fail to handle returning non-moveable types which is valid given
+// mandatory copy elision.
+//
+// Additionally, the stored functional will use the typical rules of overload
+// resolution to disambiguate the correct call, except, the target class will
+// always be implicitly a non-const lvalue when called. If a different overload
+// is preferred, wrapping the target class in a lambda with explicit casts is
+// recommended (or using inheritance, mixins or CRTP). This avoids the
+// complexity of utilizing abonimable function types as template params.
+template <typename, size_t BufferSize = 32>
+class InPlaceFunction;
+// We partially specialize to match types which are spelled like functions
+template <typename Ret, typename... Args, size_t BufferSize>
+class InPlaceFunction<Ret(Args...), BufferSize> {
+  public:
+    // Storage Type Details
+    static constexpr size_t Size = BufferSize;
+    static constexpr size_t Alignment = alignof(std::max_align_t);
+    using Buffer_t = std::aligned_storage_t<Size, Alignment>;
+    template <typename T, size_t Other>
+    friend class InPlaceFunction;
+
+  private:
+    // Callable which is used for empty InPlaceFunction objects (to match the
+    // std::function interface).
+    struct BadCallable {
+        [[noreturn]] Ret operator()(Args...) { std::abort(); }
+    };
+    static_assert(std::is_trivially_destructible_v<BadCallable>);
+
+    // Implementation of vtable interface for erased types.
+    // Contains only static vtable instantiated once for each erased type and
+    // static helpers.
+    template <typename T>
+    struct TableImpl {
+        // T should be a decayed type
+        static_assert(std::is_same_v<T, std::decay_t<T>>);
+
+        // Helper functions to get an unerased reference to the type held in the
+        // buffer. std::launder is required to avoid strict aliasing rules.
+        // The cast is always defined, as a precondition for these calls is that
+        // (exactly) a T was placement new constructed into the buffer.
+        constexpr static T& getRef(void* storage) {
+            return *std::launder(reinterpret_cast<T*>(storage));
+        }
+
+        constexpr static const T& getRef(const void* storage) {
+            return *std::launder(reinterpret_cast<const T*>(storage));
+        }
+
+        // Constexpr implies inline
+        constexpr static detail::ICallableTable<Ret, Args...> table = {
+                // Stateless lambdas are convertible to function ptrs
+                .destroy = [](void* storage) { getRef(storage).~T(); },
+                .invoke = [](void* storage, Args&&... args) -> Ret {
+                    if constexpr (std::is_void_v<Ret>) {
+                        std::invoke(getRef(storage), std::forward<Args>(args)...);
+                    } else {
+                        return std::invoke(getRef(storage), std::forward<Args>(args)...);
+                    }
+                },
+                .copy_to = [](const void* storage,
+                              void* other) { ::new (other) T(getRef(storage)); },
+                .move_to = [](void* storage,
+                              void* other) { ::new (other) T(std::move(getRef(storage))); },
+        };
+    };
+
+    // Check size/align requirements for the T in Buffer_t.
+    template <typename T>
+    static constexpr bool WillFit_v = sizeof(T) <= Size && alignof(T) <= Alignment;
+
+    // Check size/align requirements for a function to function conversion
+    template <typename T>
+    static constexpr bool ConversionWillFit_v = (T::Size < Size) && (T::Alignment <= Alignment);
+
+    template <typename T>
+    struct IsInPlaceFunction : std::false_type {};
+
+    template <size_t BufferSize_>
+    struct IsInPlaceFunction<InPlaceFunction<Ret(Args...), BufferSize_>> : std::true_type {};
+
+    template <typename T>
+    static T BetterDeclval();
+    template <typename T>
+    static void CheckImplicitConversion(T);
+
+    template <class T, class U, class = void>
+    struct CanImplicitConvert : std::false_type {};
+
+    // std::is_convertible/std::invokeable has a bug (in libc++) regarding
+    // mandatory copy elision for non-moveable types. So, we roll our own.
+    // https://github.com/llvm/llvm-project/issues/55346
+    template <class From, class To>
+    struct CanImplicitConvert<From, To,
+                              decltype(CheckImplicitConversion<To>(BetterDeclval<From>()))>
+        : std::true_type {};
+
+    // Check if the provided type is a valid functional to be type-erased.
+    // if constexpr utilized for short-circuit behavior
+    template <typename T>
+    static constexpr bool isValidFunctional() {
+        using Target = std::decay_t<T>;
+        if constexpr (IsInPlaceFunction<Target>::value || std::is_same_v<Target, std::nullptr_t>) {
+            // Other overloads handle these cases
+            return false;
+        } else if constexpr (std::is_invocable_v<Target, Args...>) {
+            // The target type is a callable (with some unknown return value)
+            if constexpr (std::is_void_v<Ret>) {
+                // Any return value can be dropped to model a void returning
+                // function.
+                return WillFit_v<Target>;
+            } else {
+                using RawRet = std::invoke_result_t<Target, Args...>;
+                if constexpr (CanImplicitConvert<RawRet, Ret>::value) {
+                    if constexpr (std::is_reference_v<Ret>) {
+                        // If the return type is a reference, in order to
+                        // avoid dangling references, we only permit functionals
+                        // which return a reference to the exact type, or a base
+                        // type.
+                        if constexpr (std::is_reference_v<RawRet> &&
+                                      (std::is_same_v<std::decay_t<Ret>, std::decay_t<RawRet>> ||
+                                       std::is_base_of_v<std::decay_t<Ret>,
+                                                         std::decay_t<RawRet>>)) {
+                            return WillFit_v<Target>;
+                        }
+                        return false;
+                    }
+                    return WillFit_v<Target>;
+                }
+                // If we can't convert the raw return type, the functional is invalid.
+                return false;
+            }
+        }
+        return false;
+    }
+
+    template <typename T>
+    static constexpr bool IsValidFunctional_v = isValidFunctional<T>();
+    // Check if the type is a strictly smaller sized InPlaceFunction
+    template <typename T>
+    static constexpr bool isConvertibleFunc() {
+        using Target = std::decay_t<T>;
+        if constexpr (IsInPlaceFunction<Target>::value) {
+            return ConversionWillFit_v<Target>;
+        }
+        return false;
+    }
+
+    template <typename T>
+    static constexpr bool IsConvertibleFunc_v = isConvertibleFunc<T>();
+
+    // Members below
+    // This must come first for alignment
+    Buffer_t storage_;
+    const detail::ICallableTable<Ret, Args...>* vptr_;
+
+    constexpr void copy_to(InPlaceFunction& other) const {
+        vptr_->copy_to(std::addressof(storage_), std::addressof(other.storage_));
+        other.vptr_ = vptr_;
+    }
+
+    constexpr void move_to(InPlaceFunction& other) {
+        vptr_->move_to(std::addressof(storage_), std::addressof(other.storage_));
+        other.vptr_ = vptr_;
+    }
+
+    constexpr void destroy() { vptr_->destroy(std::addressof(storage_)); }
+
+    template <typename T, typename Target = std::decay_t<T>>
+    constexpr void genericInit(T&& t) {
+        vptr_ = &TableImpl<Target>::table;
+        ::new (std::addressof(storage_)) Target(std::forward<T>(t));
+    }
+
+    template <typename T, typename Target = std::decay_t<T>>
+    constexpr void convertingInit(T&& smallerFunc) {
+        // Redundant, but just in-case
+        static_assert(Target::Size < Size && Target::Alignment <= Alignment);
+        if constexpr (std::is_lvalue_reference_v<T>) {
+            smallerFunc.vptr_->copy_to(std::addressof(smallerFunc.storage_),
+                                       std::addressof(storage_));
+        } else {
+            smallerFunc.vptr_->move_to(std::addressof(smallerFunc.storage_),
+                                       std::addressof(storage_));
+        }
+        vptr_ = smallerFunc.vptr_;
+    }
+
+  public:
+    // Public interface
+    template <typename T, std::enable_if_t<IsValidFunctional_v<T>>* = nullptr>
+    constexpr InPlaceFunction(T&& t) {
+        genericInit(std::forward<T>(t));
+    }
+
+    // Conversion from smaller functions.
+    template <typename T, std::enable_if_t<IsConvertibleFunc_v<T>>* = nullptr>
+    constexpr InPlaceFunction(T&& t) {
+        convertingInit(std::forward<T>(t));
+    }
+
+    constexpr InPlaceFunction(const InPlaceFunction& other) { other.copy_to(*this); }
+
+    constexpr InPlaceFunction(InPlaceFunction&& other) { other.move_to(*this); }
+
+    // Making functions default constructible has pros and cons, we will align
+    // with the standard
+    constexpr InPlaceFunction() : InPlaceFunction(BadCallable{}) {}
+
+    constexpr InPlaceFunction(std::nullptr_t) : InPlaceFunction(BadCallable{}) {}
+
+#if __cplusplus >= 202002L
+    constexpr ~InPlaceFunction() {
+#else
+    ~InPlaceFunction() {
+#endif
+        destroy();
+    }
+
+    // The std::function call operator is marked const, but this violates const
+    // correctness. We deviate from the standard and do not mark the operator as
+    // const. Collections of InPlaceFunctions should probably be mutable.
+    constexpr Ret operator()(Args... args) {
+        if constexpr (std::is_void_v<Ret>) {
+            vptr_->invoke(std::addressof(storage_), std::forward<Args>(args)...);
+        } else {
+            return vptr_->invoke(std::addressof(storage_), std::forward<Args>(args)...);
+        }
+    }
+
+    constexpr InPlaceFunction& operator=(const InPlaceFunction& other) {
+        if (std::addressof(other) == this) return *this;
+        destroy();
+        other.copy_to(*this);
+        return *this;
+    }
+
+    constexpr InPlaceFunction& operator=(InPlaceFunction&& other) {
+        if (std::addressof(other) == this) return *this;
+        destroy();
+        other.move_to(*this);
+        return *this;
+    }
+
+    template <typename T, std::enable_if_t<IsValidFunctional_v<T>>* = nullptr>
+    constexpr InPlaceFunction& operator=(T&& t) {
+        // We can't assign to ourselves, since T is a different type
+        destroy();
+        genericInit(std::forward<T>(t));
+        return *this;
+    }
+
+    // Explicitly defining this function saves a move/dtor
+    template <typename T, std::enable_if_t<IsConvertibleFunc_v<T>>* = nullptr>
+    constexpr InPlaceFunction& operator=(T&& t) {
+        // We can't assign to ourselves, since T is different type
+        destroy();
+        convertingInit(std::forward<T>(t));
+        return *this;
+    }
+
+    constexpr InPlaceFunction& operator=(std::nullptr_t) { return operator=(BadCallable{}); }
+
+    // Moved from InPlaceFunctions are still considered valid (similar to
+    // std::optional). If using std::move on a function object explicitly, it is
+    // recommended that the object is reset using nullptr.
+    constexpr explicit operator bool() const { return vptr_ != &TableImpl<BadCallable>::table; }
+
+    constexpr void swap(InPlaceFunction& other) {
+        if (std::addressof(other) == this) return;
+        InPlaceFunction tmp{std::move(other)};
+        other.destroy();
+        move_to(other);
+        destroy();
+        tmp.move_to(*this);
+    }
+
+    friend constexpr void swap(InPlaceFunction& lhs, InPlaceFunction& rhs) { lhs.swap(rhs); }
+};
+
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
index 700fbaa..c8b36d8 100644
--- a/media/utils/include/mediautils/MethodStatistics.h
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -55,15 +55,23 @@
     /**
      * Adds a method event, typically execution time in ms.
      */
-    void event(Code code, FloatType executeMs) {
+    template <typename C>
+    void event(C&& code, FloatType executeMs) {
         std::lock_guard lg(mLock);
-        mStatisticsMap[code].add(executeMs);
+        auto it = mStatisticsMap.lower_bound(code);
+        if (it != mStatisticsMap.end() && it->first == static_cast<Code>(code)) {
+            it->second.add(executeMs);
+        } else {
+            // StatsType ctor takes an optional array of data for initialization.
+            FloatType dataArray[1] = { executeMs };
+            mStatisticsMap.emplace_hint(it, std::forward<C>(code), dataArray);
+        }
     }
 
     /**
      * Returns the name for the method code.
      */
-    std::string getMethodForCode(Code code) const {
+    std::string getMethodForCode(const Code& code) const {
         auto it = mMethodMap.find(code);
         return it == mMethodMap.end() ? std::to_string((int)code) : it->second;
     }
@@ -71,7 +79,7 @@
     /**
      * Returns the number of times the method was invoked by event().
      */
-    size_t getMethodCount(Code code) const {
+    size_t getMethodCount(const Code& code) const {
         std::lock_guard lg(mLock);
         auto it = mStatisticsMap.find(code);
         return it == mStatisticsMap.end() ? 0 : it->second.getN();
@@ -80,7 +88,7 @@
     /**
      * Returns the statistics object for the method.
      */
-    StatsType getStatistics(Code code) const {
+    StatsType getStatistics(const Code& code) const {
         std::lock_guard lg(mLock);
         auto it = mStatisticsMap.find(code);
         return it == mStatisticsMap.end() ? StatsType{} : it->second;
@@ -107,9 +115,10 @@
     }
 
 private:
-    const std::map<Code, std::string> mMethodMap;
+    // Note: we use a transparent comparator std::less<> for heterogeneous key lookup.
+    const std::map<Code, std::string, std::less<>> mMethodMap;
     mutable std::mutex mLock;
-    std::map<Code, StatsType> mStatisticsMap GUARDED_BY(mLock);
+    std::map<Code, StatsType, std::less<>> mStatisticsMap GUARDED_BY(mLock);
 };
 
 // Managed Statistics support.
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfo.h b/media/utils/include/mediautils/ProcessInfo.h
similarity index 82%
rename from media/libstagefright/include/media/stagefright/ProcessInfo.h
rename to media/utils/include/mediautils/ProcessInfo.h
index 06b9c92..c27c939 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfo.h
+++ b/media/utils/include/mediautils/ProcessInfo.h
@@ -18,8 +18,7 @@
 
 #define PROCESS_INFO_H_
 
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/ProcessInfoInterface.h>
+#include <mediautils/ProcessInfoInterface.h>
 #include <map>
 #include <mutex>
 #include <utils/Condition.h>
@@ -34,6 +33,8 @@
     virtual bool isPidUidTrusted(int pid, int uid);
     virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
     virtual void removeProcessInfoOverride(int pid);
+    bool checkProcessExistent(const std::vector<int32_t>& pids,
+                              std::vector<bool>* existent) override;
 
 protected:
     virtual ~ProcessInfo();
@@ -46,7 +47,8 @@
     std::mutex mOverrideLock;
     std::map<int, ProcessInfoOverride> mOverrideMap GUARDED_BY(mOverrideLock);
 
-    DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo);
+    ProcessInfo(const ProcessInfo&) = delete;
+    ProcessInfo& operator=(const ProcessInfo&) = delete;
 };
 
 }  // namespace android
diff --git a/media/utils/include/mediautils/ProcessInfoInterface.h b/media/utils/include/mediautils/ProcessInfoInterface.h
new file mode 100644
index 0000000..e3384ba
--- /dev/null
+++ b/media/utils/include/mediautils/ProcessInfoInterface.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PROCESS_INFO_INTERFACE_H_
+#define PROCESS_INFO_INTERFACE_H_
+
+#include <vector>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ProcessInfoInterface : public RefBase {
+    /*
+     * Gets the priority of the process (with given pid) as oom score.
+     *
+     * @param[in] pid pid of the process.
+     * @param[out] priority of the process.
+     *
+     * @return true for successful return and false otherwise.
+     */
+    virtual bool getPriority(int pid, int* priority) = 0;
+    /*
+     * Check whether the given pid is trusted or not.
+     *
+     * @param[in] pid pid of the process.
+     *
+     * @return true for trusted process and false otherwise.
+     */
+    virtual bool isPidTrusted(int pid) = 0;
+    /*
+     * Check whether the given pid and uid is trusted or not.
+     *
+     * @param[in] pid pid of the process.
+     * @param[in] uid uid of the process.
+     *
+     * @return true for trusted process and false otherwise.
+     */
+    virtual bool isPidUidTrusted(int pid, int uid) = 0;
+    /*
+     * Override process state and oom score of the pid.
+     *
+     * @param[in] pid pid of the process.
+     * @param[in] procState new state of the process to override with.
+     * @param[in] oomScore new oom score of the process to override with.
+     *
+     * @return true upon success and false otherwise.
+     */
+    virtual bool overrideProcessInfo(int pid, int procState, int oomScore) = 0;
+    /*
+     * Remove the override info of the given process.
+     *
+     * @param[in] pid pid of the process.
+     */
+    virtual void removeProcessInfoOverride(int pid) = 0;
+    /*
+     * Checks whether the list of processes with given pids exist or not.
+     *
+     * @param[in] pids List of pids for which to check whether they are Existent or not.
+     * @param[out] existent boolean vector corresponds to Existent state of each pids.
+     *
+     * @return true for successful return and false otherwise.
+     * On successful return:
+     *     - existent[i] true corresponds to pids[i] still active and
+     *     - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+     * On unsuccessful return, the output argument existent is invalid.
+     */
+    virtual bool checkProcessExistent(const std::vector<int32_t>& pids,
+                                      std::vector<bool>* existent) {
+        // A default implementation.
+        (void)pids;
+        (void)existent;
+        return false;
+    }
+
+protected:
+    virtual ~ProcessInfoInterface() {}
+};
+
+}  // namespace android
+
+#endif  // PROCESS_INFO_INTERFACE_H_
diff --git a/media/utils/include/mediautils/SchedulingPolicyService.h b/media/utils/include/mediautils/SchedulingPolicyService.h
index 546cec5..af1fcd2 100644
--- a/media/utils/include/mediautils/SchedulingPolicyService.h
+++ b/media/utils/include/mediautils/SchedulingPolicyService.h
@@ -23,7 +23,7 @@
 
 class IBinder;
 // Request elevated priority for thread tid, whose thread group leader must be pid.
-// The priority parameter is currently restricted to either 1 or 2.
+// The priority parameter is currently restricted from 1 to 3.
 // The asynchronous parameter should be 'true' to return immediately,
 // after the request is enqueued but not necessarily executed.
 // The default value 'false' means to return after request has been enqueued and executed.
@@ -37,6 +37,12 @@
 // 'client' is ignored in this case.
 int requestCpusetBoost(bool enable, const sp<IBinder> &client);
 
+// Audio: Request Spatializer RT priority for thread tid, whose thread group leader must be pid.
+// returns positive value if successful, the RT priority used
+//         zero, if no RT priority selected
+//         negative status code if RT priority unable to be set.
+int requestSpatializerPriority(pid_t pid, pid_t tid);
+
 }   // namespace android
 
 #endif  // _ANDROID_SCHEDULING_POLICY_SERVICE_H
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index de20d55..3d7981a 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -130,7 +130,7 @@
     std::optional<bool> doIsAllowed(uid_t uid);
     sp<content::pm::IPackageManagerNative> retrievePackageManager();
     sp<content::pm::IPackageManagerNative> mPackageManager; // To check apps manifest
-    uint_t mPackageManagerErrors = 0;
+    unsigned int mPackageManagerErrors = 0;
     struct Package {
         std::string name;
         bool playbackCaptureAllowed = false;
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
new file mode 100644
index 0000000..79621e2
--- /dev/null
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -0,0 +1,479 @@
+/*
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#pragma once
+
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <iomanip>
+#include <limits>
+#include <mutex>
+#include <sstream>
+#include <string>
+#include <type_traits>
+#include <unordered_map>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <log/log_main.h>
+#include <utils/StrongPointer.h>
+
+namespace std {
+template <typename T>
+struct hash<::android::wp<T>> {
+    size_t operator()(const ::android::wp<T>& x) const {
+        return std::hash<const T*>()(x.unsafe_get());
+    }
+};
+}  // namespace std
+
+namespace android::mediautils {
+
+// Allocations represent owning handles to a region of shared memory (and thus
+// should not be copied in order to fulfill RAII).
+// To share ownership between multiple objects, a
+// ref-counting solution such as sp or shared ptr is appropriate, so the dtor
+// is called once for a particular block of memory.
+
+using AllocationType = ::android::sp<IMemory>;
+using WeakAllocationType = ::android::wp<IMemory>;
+
+namespace shared_allocator_impl {
+constexpr inline size_t roundup(size_t size, size_t pageSize) {
+    LOG_ALWAYS_FATAL_IF(pageSize == 0 || (pageSize & (pageSize - 1)) != 0,
+                        "Page size not multiple of 2");
+    return ((size + pageSize - 1) & ~(pageSize - 1));
+}
+
+constexpr inline bool isHeapValid(const sp<IMemoryHeap>& heap) {
+    return (heap && heap->getBase() &&
+            heap->getBase() != MAP_FAILED);  // TODO if not mapped locally
+}
+
+template <typename, typename = void>
+static constexpr bool has_deallocate_all = false;
+
+template <typename T>
+static constexpr bool has_deallocate_all<
+        T, std::enable_if_t<std::is_same_v<decltype(std::declval<T>().deallocate_all()), void>,
+                            void>> = true;
+
+template <typename, typename = void>
+static constexpr bool has_owns = false;
+
+template <typename T>
+static constexpr bool
+        has_owns<T, std::enable_if_t<std::is_same_v<decltype(std::declval<T>().owns(
+                                                            std::declval<const AllocationType>())),
+                                                    bool>,
+                                     void>> = true;
+
+template <typename, typename = void>
+static constexpr bool has_dump = false;
+
+template <typename T>
+static constexpr bool has_dump<
+        T,
+        std::enable_if_t<std::is_same_v<decltype(std::declval<T>().dump()), std::string>, void>> =
+        true;
+
+}  // namespace shared_allocator_impl
+
+struct BasicAllocRequest {
+    size_t size;
+};
+struct NamedAllocRequest : public BasicAllocRequest {
+    std::string_view name;
+};
+
+// We are required to add a layer of indirection to hold a handle to the actual
+// block due to sp<> being unable to be created from an object once its
+// ref-count has dropped to zero. So, we have to hold onto an extra reference
+// here. We effectively want to know when the refCount of the object drops to
+// one, since we need to hold on to a reference to pass the object to interfaces
+// requiring an sp<>.
+// TODO is there some way to avoid paying this cost?
+template <typename Allocator>
+class ScopedAllocator;
+
+class ScopedAllocation : public BnMemory {
+  public:
+    template <typename T>
+    friend class ScopedAllocator;
+    template <typename Deallocator>
+    ScopedAllocation(const AllocationType& allocation, Deallocator&& deallocator)
+        : mAllocation(allocation), mDeallocator(std::forward<Deallocator>(deallocator)) {}
+
+    // Defer the implementation to the underlying mAllocation
+
+    virtual sp<IMemoryHeap> getMemory(ssize_t* offset = nullptr,
+                                      size_t* size = nullptr) const override {
+        return mAllocation->getMemory(offset, size);
+    }
+
+  private:
+    ~ScopedAllocation() override { mDeallocator(mAllocation); }
+
+    const AllocationType mAllocation;
+    const std::function<void(const AllocationType&)> mDeallocator;
+};
+
+// Allocations are only deallocated when going out of scope.
+// This should almost always be the outermost allocator.
+template <typename Allocator>
+class ScopedAllocator {
+  public:
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    explicit ScopedAllocator(const std::shared_ptr<Allocator>& allocator) : mAllocator(allocator) {}
+
+    ScopedAllocator() : mAllocator(std::make_shared<Allocator>()) {}
+
+    template <typename T>
+    auto allocate(T&& request) {
+        std::lock_guard l{*mLock};
+        const auto allocation = mAllocator->allocate(std::forward<T>(request));
+        if (!allocation) {
+            return sp<ScopedAllocation>{};
+        }
+        return sp<ScopedAllocation>::make(allocation,
+                [allocator = mAllocator, lock = mLock] (const AllocationType& allocation) {
+                    std::lock_guard l{*lock};
+                    allocator->deallocate(allocation);
+                });
+    }
+
+    // Deallocate and deallocate_all are implicitly unsafe due to double
+    // deallocates upon ScopedAllocation destruction. We can protect against this
+    // efficiently with a gencount (for deallocate_all) or inefficiently (for
+    // deallocate) but we choose not to
+    //
+    // Owns is only safe to pseudo-impl due to static cast reqs
+    template <typename Enable = bool>
+    auto owns(const sp<ScopedAllocation>& allocation) const
+            -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+        std::lock_guard l{*mLock};
+        return mAllocator->owns(allocation->mAllocation);
+    }
+
+    template <typename Enable = std::string>
+    auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+        std::lock_guard l{*mLock};
+        return mAllocator->dump();
+    }
+
+  private:
+    // We store a shared pointer in order to ensure that the allocator outlives
+    // allocations (which call back to become dereferenced).
+    const std::shared_ptr<Allocator> mAllocator;
+    const std::shared_ptr<std::mutex> mLock = std::make_shared<std::mutex>();
+};
+
+// A simple policy for PolicyAllocator which enforces a pool size and an allocation
+// size range.
+template <size_t PoolSize, size_t MinAllocSize = 0,
+          size_t MaxAllocSize = std::numeric_limits<size_t>::max()>
+class SizePolicy {
+    static_assert(PoolSize > 0);
+
+  public:
+    template <typename T>
+    bool isValid(T&& request) const {
+        static_assert(std::is_base_of_v<BasicAllocRequest, std::decay_t<T>>);
+        return !(request.size > kMaxAllocSize || request.size < kMinAllocSize ||
+                 mPoolSize + request.size > kPoolSize);
+    }
+
+    void allocated(const AllocationType& alloc) { mPoolSize += alloc->size(); }
+
+    void deallocated(const AllocationType& alloc) { mPoolSize -= alloc->size(); }
+
+    void deallocated_all() { mPoolSize = 0; }
+
+    static constexpr size_t kPoolSize = PoolSize;
+    static constexpr size_t kMinAllocSize = MinAllocSize;
+    static constexpr size_t kMaxAllocSize = MaxAllocSize;
+
+  private:
+    size_t mPoolSize = 0;
+};
+
+// An allocator which accepts or rejects allocation requests by a parametrized
+// policy (which can carry state).
+template <typename Allocator, typename Policy>
+class PolicyAllocator {
+  public:
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    PolicyAllocator(Allocator allocator, Policy policy)
+        : mAllocator(allocator), mPolicy(std::move(policy)) {}
+
+    // Default initialize the allocator and policy
+    PolicyAllocator() = default;
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        static_assert(std::is_base_of_v<android::mediautils::BasicAllocRequest, std::decay_t<T>>);
+        request.size = shared_allocator_impl::roundup(request.size, alignment());
+        if (!mPolicy.isValid(request)) {
+            return {};
+        }
+        AllocationType val = mAllocator.allocate(std::forward<T>(request));
+        if (val == nullptr) return val;
+        mPolicy.allocated(val);
+        return val;
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        mPolicy.deallocated(allocation);
+        mAllocator.deallocate(allocation);
+    }
+
+    template <typename Enable = void>
+    auto deallocate_all()
+            -> std::enable_if_t<shared_allocator_impl::has_deallocate_all<Allocator>, Enable> {
+        mAllocator.deallocate_all();
+        mPolicy.deallocated_all();
+    }
+
+    template <typename Enable = bool>
+    auto owns(const AllocationType& allocation) const
+            -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+        return mAllocator.owns(allocation);
+    }
+
+    template <typename Enable = std::string>
+    auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+        return mAllocator.dump();
+    }
+
+  private:
+    [[no_unique_address]] Allocator mAllocator;
+    [[no_unique_address]] Policy mPolicy;
+};
+
+// An allocator which keeps track of outstanding allocations for logging and
+// querying ownership.
+template <class Allocator>
+class SnoopingAllocator {
+  public:
+    struct AllocationData {
+        std::string name;
+        size_t allocation_number;
+    };
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    SnoopingAllocator(Allocator allocator, std::string_view name)
+        : mName(name), mAllocator(std::move(allocator)) {}
+
+    explicit SnoopingAllocator(std::string_view name) : mName(name), mAllocator(Allocator{}) {}
+
+    explicit SnoopingAllocator(Allocator allocator) : mAllocator(std::move(allocator)) {}
+
+    // Default construct allocator and name
+    SnoopingAllocator() = default;
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        static_assert(std::is_base_of_v<NamedAllocRequest, std::decay_t<T>>);
+        AllocationType allocation = mAllocator.allocate(request);
+        if (allocation)
+            mAllocations.insert({WeakAllocationType{allocation},
+                                 {std::string{request.name}, mAllocationNumber++}});
+        return allocation;
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        mAllocations.erase(WeakAllocationType{allocation});
+        mAllocator.deallocate(allocation);
+    }
+
+    void deallocate_all() {
+        if constexpr (shared_allocator_impl::has_deallocate_all<Allocator>) {
+            mAllocator.deallocate_all();
+        } else {
+            for (auto& [mem, value] : mAllocations) {
+                mAllocator.deallocate(mem);
+            }
+        }
+        mAllocations.clear();
+    }
+
+    bool owns(const AllocationType& allocation) const {
+        return (mAllocations.count(WeakAllocationType{allocation}) > 0);
+    }
+
+    std::string dump() const {
+        std::ostringstream dump;
+        dump << mName << " Allocator Dump:\n";
+        dump << std::setw(8) << "HeapID" << std::setw(8) << "Size" << std::setw(8) << "Offset"
+             << std::setw(8) << "Order"
+             << "   Name\n";
+        for (auto& [mem, value] : mAllocations) {
+            // TODO Imem size and offset
+            const AllocationType handle = mem.promote();
+            if (!handle) {
+                dump << "Invalid memory lifetime!";
+                continue;
+            }
+            const auto heap = handle->getMemory();
+            dump << std::setw(8) << heap->getHeapID() << std::setw(8) << heap->getSize()
+                 << std::setw(8) << heap->getOffset() << std::setw(8) << value.allocation_number
+                 << "   " << value.name << "\n";
+        }
+        return dump.str();
+    }
+
+    const std::unordered_map<WeakAllocationType, AllocationData>& getAllocations() {
+        return mAllocations;
+    }
+
+  private:
+    const std::string mName;
+    [[no_unique_address]] Allocator mAllocator;
+    // We don't take copies of the underlying information in an allocation,
+    // rather, the allocation information is put on the heap and referenced via
+    // a ref-counted solution. So, the address of the allocation information is
+    // appropriate to hash. In order for this block to be freed, the underlying
+    // allocation must be referenced by no one (thus deallocated).
+    std::unordered_map<WeakAllocationType, AllocationData> mAllocations;
+    // For debugging purposes, monotonic
+    size_t mAllocationNumber = 0;
+};
+
+// An allocator which passes a failed allocation request to a backup allocator.
+template <class PrimaryAllocator, class SecondaryAllocator>
+class FallbackAllocator {
+  public:
+    static_assert(PrimaryAllocator::alignment() == SecondaryAllocator::alignment());
+    static_assert(shared_allocator_impl::has_owns<PrimaryAllocator>);
+
+    static constexpr size_t alignment() { return PrimaryAllocator::alignment(); }
+
+    FallbackAllocator(const PrimaryAllocator& primary, const SecondaryAllocator& secondary)
+        : mPrimary(primary), mSecondary(secondary) {}
+
+    // Default construct primary and secondary allocator
+    FallbackAllocator() = default;
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        AllocationType allocation = mPrimary.allocate(std::forward<T>(request));
+        if (!allocation) allocation = mSecondary.allocate(std::forward<T>(request));
+        return allocation;
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        if (mPrimary.owns(allocation)) {
+            mPrimary.deallocate(allocation);
+        } else {
+            mSecondary.deallocate(allocation);
+        }
+    }
+
+    template <typename Enable = void>
+    auto deallocate_all() -> std::enable_if_t<
+            shared_allocator_impl::has_deallocate_all<PrimaryAllocator> &&
+                    shared_allocator_impl::has_deallocate_all<SecondaryAllocator>,
+            Enable> {
+        mPrimary.deallocate_all();
+        mSecondary.deallocate_all();
+    }
+
+    template <typename Enable = bool>
+    auto owns(const AllocationType& allocation) const
+            -> std::enable_if_t<shared_allocator_impl::has_owns<SecondaryAllocator>, Enable> {
+        return mPrimary.owns(allocation) || mSecondary.owns(allocation);
+    }
+
+    template <typename Enable = std::string>
+    auto dump() const
+            -> std::enable_if_t<shared_allocator_impl::has_dump<PrimaryAllocator> &&
+                                        shared_allocator_impl::has_dump<SecondaryAllocator>,
+                                Enable> {
+        return std::string("Primary: \n") + mPrimary.dump() + std::string("Secondary: \n") +
+               mSecondary.dump();
+    }
+
+  private:
+    [[no_unique_address]] PrimaryAllocator mPrimary;
+    [[no_unique_address]] SecondaryAllocator mSecondary;
+};
+
+// An allocator which is backed by a shared_ptr to an allocator, so multiple
+// allocators can share the same backing allocator (and thus the same state).
+template <typename Allocator>
+class IndirectAllocator {
+  public:
+    static constexpr size_t alignment() { return Allocator::alignment(); }
+
+    explicit IndirectAllocator(const std::shared_ptr<Allocator>& allocator)
+        : mAllocator(allocator) {}
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        return mAllocator->allocate(std::forward<T>(request));
+    }
+
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        mAllocator->deallocate(allocation);
+    }
+
+    // We can't implement deallocate_all/dump/owns, since we may not be the only allocator with
+    // access to the underlying allocator (making it not well-defined). If these
+    // methods are necesesary, we need to wrap with a snooping allocator.
+  private:
+    const std::shared_ptr<Allocator> mAllocator;
+};
+
+// Stateless. This allocator allocates full page-aligned MemoryHeapBases (backed by
+// a shared memory mapped anonymous file) as allocations.
+class MemoryHeapBaseAllocator {
+  public:
+    static constexpr size_t alignment() { return 4096; /* PAGE_SIZE */ }
+    static constexpr unsigned FLAGS = 0;  // default flags
+
+    template <typename T>
+    AllocationType allocate(T&& request) {
+        static_assert(std::is_base_of_v<BasicAllocRequest, std::decay_t<T>>);
+        auto heap =
+                sp<MemoryHeapBase>::make(shared_allocator_impl::roundup(request.size, alignment()));
+        if (!shared_allocator_impl::isHeapValid(heap)) {
+            return {};
+        }
+        return sp<MemoryBase>::make(heap, 0, heap->getSize());
+    }
+
+    // Passing a block not allocated by a HeapAllocator is undefined.
+    void deallocate(const AllocationType& allocation) {
+        if (!allocation) return;
+        const auto heap = allocation->getMemory();
+        if (!heap) return;
+        // This causes future mapped accesses (even across process boundaries)
+        // to receive SIGBUS.
+        ftruncate(heap->getHeapID(), 0);
+        // This static cast is safe, since as long as the block was originally
+        // allocated by us, the underlying IMemoryHeap was a MemoryHeapBase
+        static_cast<MemoryHeapBase&>(*heap).dispose();
+    }
+};
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/StaticStringView.h b/media/utils/include/mediautils/StaticStringView.h
new file mode 100644
index 0000000..14be240
--- /dev/null
+++ b/media/utils/include/mediautils/StaticStringView.h
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string_view>
+#include <type_traits>
+
+#pragma push_macro("EXPLICIT_CONVERSION_GENERATE_OPERATOR")
+#undef EXPLICIT_CONVERSION_GENERATE_OPERATOR
+#define EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, op)               \
+    friend constexpr bool operator op(T lhs, T rhs) {                 \
+        return operator op(static_cast<U>(lhs), static_cast<U>(rhs)); \
+    }                                                                 \
+    friend constexpr bool operator op(T lhs, U rhs) {                 \
+        return operator op(static_cast<U>(lhs), rhs);                 \
+    }                                                                 \
+    friend constexpr bool operator op(U lhs, T rhs) {                 \
+        return operator op(lhs, static_cast<U>(rhs));                 \
+    }
+
+#pragma push_macro("EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS")
+#undef EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS
+// Generate comparison operator friend functions for types (appropriately
+// const/ref qualified) where T is **explicitly** convertible to U.
+#define EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS(T, U)      \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, ==)                  \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, !=)                  \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, <)                   \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, <=)                  \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, >)                   \
+    EXPLICIT_CONVERSION_GENERATE_OPERATOR(T, U, >=)
+
+namespace android::mediautils {
+
+// This class a reference to a string with static storage duration
+// which is const (i.e. a string view). We expose an identical API to
+// string_view, however we do not publicly inherit to avoid potential mis-use of
+// non-virtual dtors/methods.
+//
+// We can create APIs which consume only static strings, which
+// avoids allocation/deallocation of the string locally, as well as potential
+// lifetime issues caused by consuming raw pointers (or string_views).
+// Equivalently, a string_view which is always valid, and whose underlying data
+// can never change.
+//
+// In most cases, the string_view should be initialized at compile time (and there are
+// helpers to do so below). In order to initialize a non-constexpr array,
+// the second template param must be false (i.e. opt-in).
+// Construction/usage as follows (constexpr required unless second template param is false):
+//
+//     constexpr static std::array<char, 12> debugString = toStdArray("MyMethodName");
+//     constexpr auto myStaticStringView = StaticStringView::create<debugString>();
+//     const auto size_t length = myStaticStringView.length() // can call any string_view methods
+//     globalLog(myStaticStringView, ...); // Pass to APIs consuming StaticStringViews
+//
+struct StaticStringView final : private std::string_view {
+    template <typename T>
+    struct is_const_char_array : std::false_type {};
+
+    // Use templated value helper
+    template <size_t N>
+    struct is_const_char_array<const std::array<char, N>> : std::true_type {};
+
+    template <typename T>
+    static constexpr bool is_const_char_array_v =
+            is_const_char_array<std::remove_reference_t<T>>::value;
+
+    template <auto& val, std::enable_if_t<is_const_char_array_v<decltype(val)>, bool> Check = true>
+    static constexpr StaticStringView create() {
+        if constexpr (Check) {
+            // If this static_assert fails to compile, this method was called
+            // with a non-constexpr
+            static_assert(val[0]);
+        }
+        return StaticStringView{val.data(), val.size()};
+    }
+
+    // We can copy/move assign/construct from other StaticStringViews as their validity is already
+    // ensured
+    constexpr StaticStringView(const StaticStringView& other) = default;
+    constexpr StaticStringView& operator=(const StaticStringView& other) = default;
+    constexpr StaticStringView(StaticStringView&& other) = default;
+    constexpr StaticStringView& operator=(StaticStringView&& other) = default;
+
+    // Explicitly convert to a std::string_view (this is a strict loss of
+    // information so should only be used across APIs which intend to consume
+    // any std::string_view).
+    constexpr std::string_view getStringView() const { return *this; }
+
+    // The following methods expose an identical API to std::string_view
+    using std::string_view::begin;
+    using std::string_view::cbegin;
+    using std::string_view::cend;
+    using std::string_view::crbegin;
+    using std::string_view::crend;
+    using std::string_view::end;
+    using std::string_view::rbegin;
+    using std::string_view::rend;
+    using std::string_view::operator[];
+    using std::string_view::at;
+    using std::string_view::back;
+    using std::string_view::data;
+    using std::string_view::empty;
+    using std::string_view::front;
+    using std::string_view::length;
+    using std::string_view::max_size;
+    using std::string_view::size;
+    // These modifiers are valid because the resulting view is a
+    // substring of the original static string
+    using std::string_view::remove_prefix;
+    using std::string_view::remove_suffix;
+    // Skip swap
+    using std::string_view::compare;
+    using std::string_view::copy;
+    using std::string_view::find;
+    using std::string_view::find_first_not_of;
+    using std::string_view::find_first_of;
+    using std::string_view::find_last_not_of;
+    using std::string_view::find_last_of;
+    using std::string_view::rfind;
+    using std::string_view::substr;
+#if __cplusplus >= 202202L
+    using std::string_view::ends_with;
+    using std::string_view::starts_with;
+#endif
+    using std::string_view::npos;
+
+    // Non-member friend functions to follow. Identical API to std::string_view
+    template <class CharT, class Traits>
+    friend std::basic_ostream<CharT, Traits>& operator<<(std::basic_ostream<CharT, Traits>& os,
+                                                         StaticStringView v) {
+        return os << static_cast<std::string_view&>(v);
+    }
+
+    EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS(const StaticStringView&,
+                                                      const std::string_view&)
+
+  private:
+    constexpr StaticStringView(const char* ptr, size_t sz) : std::string_view(ptr, sz){};
+
+  public:
+    // The next two functions are logically consteval (only avail in c++20).
+    // We can't use templates as params, as they would require references to
+    // static which would unnecessarily bloat executable size.
+    template <typename T, size_t N, size_t M>
+    static constexpr std::array<T, N + M> concatArray(const std::array<T, N>& a,
+                                                      const std::array<T, M>& b) {
+        std::array<T, N + M> res{};
+        for (size_t i = 0; i < N; i++) {
+            res[i] = a[i];
+        }
+        for (size_t i = 0; i < M; i++) {
+            res[N + i] = b[i];
+        }
+        return res;
+    }
+
+    static void arrayIsNotNullTerminated();
+
+    // This method should only be called on C-style char arrays which are
+    // null-terminated. Calling this method on a char array with intermediate null
+    // characters (i.e. "hello\0" or "hel\0lo" will result in a std::array with null
+    // characters, which is most likely not intended.
+    // We attempt to detect a non-null terminated char array at link-time, but
+    // this is best effort. A consequence of this approach is that this method
+    // will fail to link for extern args, or when not inlined. Since this method
+    // is intended to be used constexpr, this is not an issue.
+    template <size_t N>
+    static constexpr std::array<char, N - 1> toStdArray(const char (&input)[N]) {
+        std::array<char, N - 1> res{};
+        for (size_t i = 0; i < N - 1; i++) {
+            res[i] = input[i];
+        }
+        // A workaround to generate a link-time error if toStdArray is not called on
+        // a null-terminated char array.
+        if (input[N - 1] != 0) arrayIsNotNullTerminated();
+        return res;
+    }
+};
+}  // namespace android::mediautils
+
+// Specialization of std::hash for use with std::unordered_map
+namespace std {
+template <>
+struct hash<android::mediautils::StaticStringView> {
+    constexpr size_t operator()(const android::mediautils::StaticStringView& val) {
+        return std::hash<std::string_view>{}(val.getStringView());
+    }
+};
+}  // namespace std
+
+#pragma pop_macro("EXPLICIT_CONVERSION_GENERATE_OPERATOR")
+#pragma pop_macro("EXPLICIT_CONVERSION_GENERATE_COMPARISON_OPERATORS")
diff --git a/media/utils/include/mediautils/TidWrapper.h b/media/utils/include/mediautils/TidWrapper.h
new file mode 100644
index 0000000..aeefa01
--- /dev/null
+++ b/media/utils/include/mediautils/TidWrapper.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#if defined(__linux__)
+#include <signal.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+#endif
+
+namespace android::mediautils {
+
+// The library wrapper for gettid is only available on bionic. If we don't link
+// against it, we syscall directly.
+inline pid_t getThreadIdWrapper() {
+#if defined(__BIONIC__)
+    return ::gettid();
+#else
+    return syscall(SYS_gettid);
+#endif
+}
+
+// Send an abort signal to a (linux) thread id.
+inline int abortTid(int tid) {
+#if defined(__linux__)
+    const pid_t pid = getpid();
+    siginfo_t siginfo = {
+        .si_code = SI_QUEUE,
+        .si_pid = pid,
+        .si_uid = getuid(),
+    };
+    return syscall(SYS_rt_tgsigqueueinfo, pid, tid, SIGABRT, &siginfo);
+#else
+  errno = ENODEV;
+  return -1;
+#endif
+}
+
+}
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index ef03aef..f1d572f 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -16,6 +16,7 @@
 
 #pragma once
 
+#include <chrono>
 #include <vector>
 
 #include <mediautils/TimerThread.h>
@@ -27,10 +28,33 @@
 
 class TimeCheck {
   public:
+
+    // Duration for TimeCheck is based on steady_clock, typically nanoseconds.
+    using Duration = std::chrono::steady_clock::duration;
+
+    // Duration for printing is in milliseconds, using float for additional precision.
+    using FloatMs = std::chrono::duration<float, std::milli>;
+
+    // OnTimerFunc is the callback function with 2 parameters.
+    //  bool timeout  (which is true when the TimeCheck object
+    //                 times out, false when the TimeCheck object is
+    //                 destroyed or leaves scope before the timer expires.)
+    //  float elapsedMs (the elapsed time to this event).
     using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
 
     // The default timeout is chosen to be less than system server watchdog timeout
-    static constexpr uint32_t kDefaultTimeOutMs = 5000;
+    // Note: kDefaultTimeOutMs should be no less than 2 seconds, otherwise spurious timeouts
+    // may occur with system suspend.
+    static constexpr TimeCheck::Duration kDefaultTimeoutDuration = std::chrono::milliseconds(3000);
+
+    // Due to suspend abort not incrementing the monotonic clock,
+    // we allow another second chance timeout after the first timeout expires.
+    //
+    // The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
+    // and the result is more stable when the monotonic clock increments during suspend.
+    //
+    static constexpr TimeCheck::Duration kDefaultSecondChanceDuration =
+            std::chrono::milliseconds(2000);
 
     /**
      * TimeCheck is a RAII object which will notify a callback
@@ -44,24 +68,24 @@
      * the deallocation.
      *
      * \param tag       string associated with the TimeCheck object.
-     * \param onTimer   callback function with 2 parameters
-     *                      bool timeout  (which is true when the TimeCheck object
-     *                                    times out, false when the TimeCheck object is
-     *                                    destroyed or leaves scope before the timer expires.)
-     *                      float elapsedMs (the elapsed time to this event).
+     * \param onTimer   callback function with 2 parameters (described above in OnTimerFunc).
      *                  The callback when timeout is true will be called on a different thread.
      *                  This will cancel the callback on the destructor but is not guaranteed
      *                  to block for callback completion if it is already in progress
      *                  (for maximum concurrency and reduced deadlock potential), so use proper
      *                  lifetime analysis (e.g. shared or weak pointers).
-     * \param timeoutMs timeout in milliseconds.
+     * \param requestedTimeoutDuration timeout in milliseconds.
      *                  A zero timeout means no timeout is set -
      *                  the callback is called only when
      *                  the TimeCheck object is destroyed or leaves scope.
+     * \param secondChanceDuration additional milliseconds to wait if the first timeout expires.
+     *                  This is used to prevent false timeouts if the steady (monotonic)
+     *                  clock advances on aborted suspend.
      * \param crashOnTimeout true if the object issues an abort on timeout.
      */
-    explicit TimeCheck(std::string tag, OnTimerFunc&& onTimer = {},
-            uint32_t timeoutMs = kDefaultTimeOutMs, bool crashOnTimeout = true);
+    explicit TimeCheck(std::string_view tag, OnTimerFunc&& onTimer,
+            Duration requestedTimeoutDuration, Duration secondChanceDuration,
+            bool crashOnTimeout);
 
     TimeCheck() = default;
     // Remove copy constructors as there should only be one call to the destructor.
@@ -79,16 +103,35 @@
     // The usage here is const safe.
     class TimeCheckHandler {
     public:
-        const std::string tag;
+        template <typename S, typename F>
+        TimeCheckHandler(S&& _tag, F&& _onTimer, bool _crashOnTimeout,
+            Duration _timeoutDuration, Duration _secondChanceDuration,
+            std::chrono::system_clock::time_point _startSystemTime,
+            pid_t _tid)
+            : tag(std::forward<S>(_tag))
+            , onTimer(std::forward<F>(_onTimer))
+            , crashOnTimeout(_crashOnTimeout)
+            , timeoutDuration(_timeoutDuration)
+            , secondChanceDuration(_secondChanceDuration)
+            , startSystemTime(_startSystemTime)
+            , tid(_tid)
+            {}
+        const FixedString62 tag;
         const OnTimerFunc onTimer;
         const bool crashOnTimeout;
-        const std::chrono::system_clock::time_point startTime;
+        const Duration timeoutDuration;
+        const Duration secondChanceDuration;
+        const std::chrono::system_clock::time_point startSystemTime;
         const pid_t tid;
-
         void onCancel(TimerThread::Handle handle) const;
-        void onTimeout() const;
+        void onTimeout(TimerThread::Handle handle) const;
     };
 
+    // Returns a string that represents the timeout vs elapsed time,
+    // and diagnostics if there are any potential issues.
+    static std::string analyzeTimeouts(
+            float timeoutMs, float elapsedSteadyMs, float elapsedSystemMs);
+
     static TimerThread& getTimeCheckThread();
     static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
 
@@ -104,4 +147,9 @@
 TimeCheck makeTimeCheckStatsForClassMethod(
         std::string_view className, std::string_view methodName);
 
+// A handy statement-like macro to put at the beginning of almost every method
+// which calls into HAL. Note that it requires the class to implement 'getClassName'.
+#define TIME_CHECK() auto timeCheck = \
+            mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
 }  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/TimerThread.h b/media/utils/include/mediautils/TimerThread.h
index ffee602..d84d682 100644
--- a/media/utils/include/mediautils/TimerThread.h
+++ b/media/utils/include/mediautils/TimerThread.h
@@ -21,12 +21,16 @@
 #include <deque>
 #include <functional>
 #include <map>
+#include <memory>
 #include <mutex>
 #include <string>
 #include <thread>
+#include <vector>
 
 #include <android-base/thread_annotations.h>
 
+#include <mediautils/FixedString.h>
+
 namespace android::mediautils {
 
 /**
@@ -34,19 +38,93 @@
  */
 class TimerThread {
   public:
-    // A Handle is a time_point that serves as a unique key.  It is ordered.
+    // A Handle is a time_point that serves as a unique key to access a queued
+    // request to the TimerThread.
     using Handle = std::chrono::steady_clock::time_point;
 
+    // Duration is based on steady_clock (typically nanoseconds)
+    // vs the system_clock duration (typically microseconds).
+    using Duration = std::chrono::steady_clock::duration;
+
     static inline constexpr Handle INVALID_HANDLE =
             std::chrono::steady_clock::time_point::min();
 
+    // Handle implementation details:
+    // A Handle represents the timer expiration time based on std::chrono::steady_clock
+    // (clock monotonic).  This Handle is computed as now() + timeout.
+    //
+    // The lsb of the Handle time_point is adjusted to indicate whether there is
+    // a timeout action (1) or not (0).
+    //
+
+    template <size_t COUNT>
+    static constexpr bool is_power_of_2_v = COUNT > 0 && (COUNT & (COUNT - 1)) == 0;
+
+    template <size_t COUNT>
+    static constexpr size_t mask_from_count_v = COUNT - 1;
+
+    static constexpr size_t HANDLE_TYPES = 2;
+    // HANDLE_TYPES must be a power of 2.
+    static_assert(is_power_of_2_v<HANDLE_TYPES>);
+
+    // The handle types
+    enum class HANDLE_TYPE : size_t {
+        NO_TIMEOUT = 0,
+        TIMEOUT = 1,
+    };
+
+    static constexpr size_t HANDLE_TYPE_MASK = mask_from_count_v<HANDLE_TYPES>;
+
+    template <typename T>
+    static constexpr auto enum_as_value(T x) {
+        return static_cast<std::underlying_type_t<T>>(x);
+    }
+
+    static inline bool isNoTimeoutHandle(Handle handle) {
+        return (handle.time_since_epoch().count() & HANDLE_TYPE_MASK) ==
+                enum_as_value(HANDLE_TYPE::NO_TIMEOUT);
+    }
+
+    static inline bool isTimeoutHandle(Handle handle) {
+        return (handle.time_since_epoch().count() & HANDLE_TYPE_MASK) ==
+                enum_as_value(HANDLE_TYPE::TIMEOUT);
+    }
+
+    // Returns a unique Handle that doesn't exist in the container.
+    template <size_t MAX_TYPED_HANDLES, size_t HANDLE_TYPE_AS_VALUE, typename C, typename T>
+    static Handle getUniqueHandleForHandleType_l(C container, T timeout) {
+        static_assert(MAX_TYPED_HANDLES > 0 && HANDLE_TYPE_AS_VALUE < MAX_TYPED_HANDLES
+                && is_power_of_2_v<MAX_TYPED_HANDLES>,
+                " handles must be power of two");
+
+        // Our initial handle is the deadline as computed from steady_clock.
+        auto deadline = std::chrono::steady_clock::now() + timeout;
+
+        // We adjust the lsbs by the minimum increment to have the correct
+        // HANDLE_TYPE in the least significant bits.
+        auto remainder = deadline.time_since_epoch().count() & HANDLE_TYPE_MASK;
+        size_t offset = HANDLE_TYPE_AS_VALUE > remainder ? HANDLE_TYPE_AS_VALUE - remainder :
+                     MAX_TYPED_HANDLES + HANDLE_TYPE_AS_VALUE - remainder;
+        deadline += std::chrono::steady_clock::duration(offset);
+
+        // To avoid key collisions, advance the handle by MAX_TYPED_HANDLES (the modulus factor)
+        // until the key is unique.
+        while (container.find(deadline) != container.end()) {
+            deadline += std::chrono::steady_clock::duration(MAX_TYPED_HANDLES);
+        }
+        return deadline;
+    }
+
+    // TimerCallback invoked on timeout or cancel.
+    using TimerCallback = std::function<void(Handle)>;
+
     /**
      * Schedules a task to be executed in the future (`timeout` duration from now).
      *
      * \param tag     string associated with the task.  This need not be unique,
      *                as the Handle returned is used for cancelling.
      * \param func    callback function that is invoked at the timeout.
-     * \param timeout timeout duration which is converted to milliseconds with at
+     * \param timeoutDuration timeout duration which is converted to milliseconds with at
      *                least 45 integer bits.
      *                A timeout of 0 (or negative) means the timer never expires
      *                so func() is never called. These tasks are stored internally
@@ -54,7 +132,8 @@
      * \returns       a handle that can be used for cancellation.
      */
     Handle scheduleTask(
-            std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout);
+            std::string_view tag, TimerCallback&& func,
+            Duration timeoutDuration, Duration secondChanceDuration);
 
     /**
      * Tracks a task that shows up on toString() until cancelled.
@@ -62,7 +141,7 @@
      * \param tag     string associated with the task.
      * \returns       a handle that can be used for cancellation.
      */
-    Handle trackTask(std::string tag);
+    Handle trackTask(std::string_view tag);
 
     /**
      * Cancels a task previously scheduled with scheduleTask()
@@ -74,7 +153,15 @@
      */
     bool cancelTask(Handle handle);
 
-    std::string toString(size_t retiredCount = SIZE_MAX) const;
+    struct SnapshotAnalysis;
+    /**
+     * Take a snapshot of the current state of the TimerThread and determine the
+     * potential cause of a deadlock.
+     * \param retiredCount The number of successfully retired calls to capture
+     *                      (may be many).
+     * \return See below for a description of a SnapShotAnalysis object
+     */
+    SnapshotAnalysis getSnapshotAnalysis(size_t retiredCount = SIZE_MAX) const;
 
     /**
      * Returns a string representation of the TimerThread queue.
@@ -125,20 +212,69 @@
         return s;
     }
 
-  private:
     // To minimize movement of data, we pass around shared_ptrs to Requests.
     // These are allocated and deallocated outside of the lock.
+    // TODO(b/243839867) consider options to merge Request with the
+    // TimeCheck::TimeCheckHandler struct.
     struct Request {
+        Request(std::chrono::system_clock::time_point _scheduled,
+                std::chrono::system_clock::time_point _deadline,
+                Duration _secondChanceDuration,
+                pid_t _tid,
+                std::string_view _tag)
+            : scheduled(_scheduled)
+            , deadline(_deadline)
+            , secondChanceDuration(_secondChanceDuration)
+            , tid(_tid)
+            , tag(_tag)
+            {}
+
         const std::chrono::system_clock::time_point scheduled;
-        const std::chrono::system_clock::time_point deadline; // deadline := scheduled + timeout
+        const std::chrono::system_clock::time_point deadline; // deadline := scheduled
+                                                              // + timeoutDuration
+                                                              // + secondChanceDuration
                                                               // if deadline == scheduled, no
                                                               // timeout, task not executed.
+        Duration secondChanceDuration;
         const pid_t tid;
-        const std::string tag;
-
+        const FixedString62 tag;
         std::string toString() const;
     };
 
+
+    // SnapshotAnalysis contains info deduced by analysisTimeout().
+
+    struct SnapshotAnalysis {
+        // If we were unable to determine any applicable thread ids,
+        // we leave their value as INVALID_PID.
+        // Note, we use the linux thread id (not pthread), so its type is pid_t.
+        static constexpr pid_t INVALID_PID = -1;
+        // Description of likely issue and/or blocked method.
+        // Empty if no actionable info.
+        std::string description;
+        // Tid of the (latest) monitored thread which has timed out.
+        // This is the thread which the suspect is deduced with respect to.
+        // Most often, this is the thread which an abort is being triggered
+        // from.
+        pid_t timeoutTid = INVALID_PID;
+        // Tid of the (HAL) thread which has likely halted progress, selected
+        // from pendingRequests. May be the same as timeoutTid, if the timed-out
+        // thread directly called into the HAL.
+        pid_t suspectTid = INVALID_PID;
+        // Number of second chances given by the timer thread
+        size_t secondChanceCount;
+        // List of pending requests
+        std::vector<std::shared_ptr<const Request>> pendingRequests;
+        // List of timed-out requests
+        std::vector<std::shared_ptr<const Request>> timeoutRequests;
+        // List of retired requests
+        std::vector<std::shared_ptr<const Request>> retiredRequests;
+        // Dumps the information contained above as well as additional call
+        // stacks where applicable.
+        std::string toString() const;
+    };
+
+  private:
     // Deque of requests, in order of add().
     // This class is thread-safe.
     class RequestQueue {
@@ -160,15 +296,17 @@
                 mRequestQueue GUARDED_BY(mRQMutex);
     };
 
-    // A storage map of tasks without timeouts.  There is no std::function<void()>
+    // A storage map of tasks without timeouts.  There is no TimerCallback
     // required, it just tracks the tasks with the tag, scheduled time and the tid.
     // These tasks show up on a pendingToString() until manually cancelled.
     class NoTimeoutMap {
-        // This a counter of the requests that have no timeout (timeout == 0).
-        std::atomic<size_t> mNoTimeoutRequests{};
-
         mutable std::mutex mNTMutex;
         std::map<Handle, std::shared_ptr<const Request>> mMap GUARDED_BY(mNTMutex);
+        Handle getUniqueHandle_l() REQUIRES(mNTMutex) {
+            return getUniqueHandleForHandleType_l<
+                    HANDLE_TYPES, enum_as_value(HANDLE_TYPE::NO_TIMEOUT)>(
+                mMap, Duration{} /* timeout */);
+        }
 
       public:
         bool isValidHandle(Handle handle) const; // lock free
@@ -182,15 +320,27 @@
     // call on timeout.
     // This class is thread-safe.
     class MonitorThread {
+        std::atomic<size_t> mSecondChanceCount{};
         mutable std::mutex mMutex;
-        mutable std::condition_variable mCond;
+        mutable std::condition_variable mCond GUARDED_BY(mMutex);
 
         // Ordered map of requests based on time of deadline.
         //
-        std::map<Handle, std::pair<std::shared_ptr<const Request>, std::function<void()>>>
+        std::map<Handle, std::pair<std::shared_ptr<const Request>, TimerCallback>>
                 mMonitorRequests GUARDED_BY(mMutex);
 
-        RequestQueue& mTimeoutQueue; // locked internally, added to when request times out.
+        // Due to monotonic/steady clock inaccuracies during suspend,
+        // we allow an additional second chance waiting time to prevent
+        // false removal.
+
+        // This mSecondChanceRequests queue is almost always empty.
+        // Using a pair with the original handle allows lookup and keeps
+        // the Key unique.
+        std::map<std::pair<Handle /* new */, Handle /* original */>,
+                std::pair<std::shared_ptr<const Request>, TimerCallback>>
+                        mSecondChanceRequests GUARDED_BY(mMutex);
+
+        RequestQueue& mTimeoutQueue GUARDED_BY(mMutex); // added to when request times out.
 
         // Worker thread variables
         bool mShouldExit GUARDED_BY(mMutex) = false;
@@ -200,60 +350,32 @@
         std::thread mThread;
 
         void threadFunc();
-        Handle getUniqueHandle_l(std::chrono::milliseconds timeout) REQUIRES(mMutex);
+        Handle getUniqueHandle_l(Duration timeout) REQUIRES(mMutex) {
+            return getUniqueHandleForHandleType_l<
+                    HANDLE_TYPES, enum_as_value(HANDLE_TYPE::TIMEOUT)>(
+                mMonitorRequests, timeout);
+        }
 
       public:
         MonitorThread(RequestQueue &timeoutQueue);
         ~MonitorThread();
 
-        Handle add(std::shared_ptr<const Request> request, std::function<void()>&& func,
-                std::chrono::milliseconds timeout);
+        Handle add(std::shared_ptr<const Request> request, TimerCallback&& func,
+                Duration timeout);
         std::shared_ptr<const Request> remove(Handle handle);
         void copyRequests(std::vector<std::shared_ptr<const Request>>& requests) const;
+        size_t getSecondChanceCount() const {
+            return mSecondChanceCount.load(std::memory_order_relaxed);
+        }
     };
 
-    // Analysis contains info deduced by analysisTimeout().
-    //
-    // Summary is the result string from checking timeoutRequests to see if
-    // any might be caused by blocked calls in pendingRequests.
-    //
-    // Summary string is empty if there is no automatic actionable info.
-    //
-    // timeoutTid is the tid selected from timeoutRequests (if any).
-    //
-    // HALBlockedTid is the tid that is blocked from pendingRequests believed
-    // to cause the timeout.
-    // HALBlockedTid may be INVALID_PID if no suspected tid is found,
-    // and if HALBlockedTid is valid, it will not be the same as timeoutTid.
-    //
-    static constexpr pid_t INVALID_PID = -1;
-    struct Analysis {
-        std::string summary;
-        pid_t timeoutTid = INVALID_PID;
-        pid_t HALBlockedTid = INVALID_PID;
-    };
 
     // A HAL method is where the substring "Hidl" is in the class name.
     // The tag should look like: ... Hidl ... :: ...
     static bool isRequestFromHal(const std::shared_ptr<const Request>& request);
 
-    // Returns analysis from the requests.
-    static Analysis analyzeTimeout(
-        const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
-        const std::vector<std::shared_ptr<const Request>>& pendingRequests);
-
     std::vector<std::shared_ptr<const Request>> getPendingRequests() const;
 
-    // A no-timeout request is represented by a handles at the end of steady_clock time,
-    // counting down by the number of no timeout requests previously requested.
-    // We manage them on the NoTimeoutMap, but conceptually they could be scheduled
-    // on the MonitorThread because those time handles won't expire in
-    // the lifetime of the device.
-    static inline Handle getIndexedHandle(size_t index) {
-        return std::chrono::time_point<std::chrono::steady_clock>::max() -
-                    std::chrono::time_point<std::chrono::steady_clock>::duration(index);
-    }
-
     static constexpr size_t kRetiredQueueMax = 16;
     RequestQueue mRetiredQueue{kRetiredQueueMax};  // locked internally
 
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 1024018..0689083 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -7,76 +7,107 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_test_library {
-    name: "libsharedtest",
+// general test config
+cc_defaults {
+    name: "libmediautils_tests_config",
+
+    host_supported: true,
+
     cflags: [
         "-Wall",
         "-Werror",
         "-Wextra",
     ],
 
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
+    sanitize: {
+        undefined: true,
+        misc_undefined: [
+            "float-divide-by-zero",
+            "local-bounds",
+        ],
+        integer_overflow: true,
+        cfi: true,
+        memtag_heap: true,
+        diag: {
+            undefined: true,
+            misc_undefined: [
+                "float-divide-by-zero",
+                "local-bounds",
+            ],
+            integer_overflow: true,
+            cfi: true,
+            memtag_heap: true,
+        },
     },
+    target: {
+        host: {
+            sanitize: {
+                cfi: false,
+                diag: {
+                    cfi: false,
+                },
+            },
+        },
+    },
+}
+
+cc_defaults {
+    name: "libmediautils_tests_defaults",
+
+    defaults: ["libmediautils_tests_config"],
+
+    host_supported: true,
 
     shared_libs: [
+        "libbinder",
         "liblog",
+        "libmediautils",
+        "libutils",
     ],
+}
+
+cc_test_library {
+    name: "libsharedtest",
+
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "sharedtest.cpp",
-    ]
+    ],
 }
 
 cc_test {
     name: "library_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
-    },
-
-    shared_libs: [
-        "libbase",
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     data_libs: [
         "libsharedtest",
     ],
 
+    shared_libs: [
+        "libbase",
+    ],
+
     srcs: [
         "library_tests.cpp",
     ],
 }
 
 cc_test {
+    name: "libmediautils_test",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "memory-test.cpp",
+    ],
+}
+
+cc_test {
     name: "media_process_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "media_process_tests.cpp",
@@ -86,17 +117,7 @@
 cc_test {
     name: "media_synchronization_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "media_synchronization_tests.cpp",
@@ -106,17 +127,7 @@
 cc_test {
     name: "media_threadsnapshot_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "media_threadsnapshot_tests.cpp",
@@ -124,19 +135,26 @@
 }
 
 cc_test {
-    name: "mediautils_scopedstatistics_tests",
+    name: "mediautils_fixedstring_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     shared_libs: [
         "libaudioutils",
-        "liblog",
-        "libmediautils",
-        "libutils",
+    ],
+
+    srcs: [
+        "mediautils_fixedstring_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "mediautils_scopedstatistics_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    shared_libs: [
+        "libaudioutils",
     ],
 
     srcs: [
@@ -147,17 +165,10 @@
 cc_test {
     name: "methodstatistics_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     shared_libs: [
         "libaudioutils",
-        "liblog",
-        "libmediautils",
-        "libutils",
     ],
 
     srcs: [
@@ -166,28 +177,59 @@
 }
 
 cc_test {
+    name: "static_string_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "static_string_view_tests.cpp",
+    ],
+}
+
+cc_test {
     name: "timecheck_tests",
 
-    cflags: [
-        "-Wall",
-        "-Werror",
-        "-Wextra",
-    ],
-
-    sanitize:{
-       address: true,
-       cfi: true,
-       integer_overflow: true,
-       memtag_heap: true,
-    },
-
-    shared_libs: [
-        "liblog",
-        "libmediautils",
-        "libutils",
-    ],
+    defaults: ["libmediautils_tests_defaults"],
 
     srcs: [
         "timecheck_tests.cpp",
     ],
 }
+
+cc_test {
+    name: "timerthread_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "TimerThread-test.cpp",
+    ],
+}
+
+cc_test {
+    name: "extended_accumulator_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "extended_accumulator_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "inplace_function_tests",
+
+    defaults: ["libmediautils_tests_defaults"],
+
+    srcs: [
+        "inplace_function_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "shared_memory_allocator_tests",
+    defaults: ["libmediautils_tests_defaults"],
+    srcs: [
+        "shared_memory_allocator_tests.cpp",
+    ],
+}
diff --git a/media/utils/tests/TimerThread-test.cpp b/media/utils/tests/TimerThread-test.cpp
new file mode 100644
index 0000000..468deed
--- /dev/null
+++ b/media/utils/tests/TimerThread-test.cpp
@@ -0,0 +1,277 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <chrono>
+#include <thread>
+#include <gtest/gtest.h>
+#include <mediautils/TimerThread.h>
+
+using namespace std::chrono_literals;
+using namespace android::mediautils;
+
+namespace {
+
+constexpr auto kJitter = 10ms;
+
+// Each task written by *ToString() will start with a left brace.
+constexpr char REQUEST_START = '{';
+
+inline size_t countChars(std::string_view s, char c) {
+    return std::count(s.begin(), s.end(), c);
+}
+
+
+// Split msec time between timeout and second chance time
+// This tests expiration times weighted between timeout and the second chance time.
+#define DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(msec, frac) \
+    std::chrono::milliseconds(int((msec) * (frac)) + 1), \
+    std::chrono::milliseconds(int((msec) * (1.f - (frac))))
+
+// The TimerThreadTest is parameterized on a fraction between 0.f and 1.f which
+// is how the total timeout time is split between the first timeout and the second chance time.
+//
+class TimerThreadTest : public ::testing::TestWithParam<float> {
+protected:
+
+static void testBasic() {
+    const auto frac = GetParam();
+
+    std::atomic<bool> taskRan = false;
+    TimerThread thread;
+    TimerThread::Handle handle =
+            thread.scheduleTask("Basic", [&taskRan](TimerThread::Handle) {
+                    taskRan = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
+    ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
+    std::this_thread::sleep_for(100ms - kJitter);
+    ASSERT_FALSE(taskRan);
+    std::this_thread::sleep_for(2 * kJitter);
+    ASSERT_TRUE(taskRan); // timed-out called.
+    ASSERT_EQ(1ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // nothing cancelled
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+static void testCancel() {
+    const auto frac = GetParam();
+
+    std::atomic<bool> taskRan = false;
+    TimerThread thread;
+    TimerThread::Handle handle =
+            thread.scheduleTask("Cancel", [&taskRan](TimerThread::Handle) {
+                    taskRan = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
+    ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
+    std::this_thread::sleep_for(100ms - kJitter);
+    ASSERT_FALSE(taskRan);
+    ASSERT_TRUE(thread.cancelTask(handle));
+    std::this_thread::sleep_for(2 * kJitter);
+    ASSERT_FALSE(taskRan); // timed-out did not call.
+    ASSERT_EQ(0ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // task cancelled.
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+static void testCancelAfterRun() {
+    const auto frac = GetParam();
+
+    std::atomic<bool> taskRan = false;
+    TimerThread thread;
+    TimerThread::Handle handle =
+            thread.scheduleTask("CancelAfterRun",
+                    [&taskRan](TimerThread::Handle) {
+                            taskRan = true; },
+                            DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
+    ASSERT_TRUE(TimerThread::isTimeoutHandle(handle));
+    std::this_thread::sleep_for(100ms + kJitter);
+    ASSERT_TRUE(taskRan); //  timed-out called.
+    ASSERT_FALSE(thread.cancelTask(handle));
+    ASSERT_EQ(1ul, countChars(thread.timeoutToString(), REQUEST_START));
+    // nothing actually cancelled
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+static void testMultipleTasks() {
+    const auto frac = GetParam();
+
+    std::array<std::atomic<bool>, 6> taskRan{};
+    TimerThread thread;
+
+    auto startTime = std::chrono::steady_clock::now();
+
+    thread.scheduleTask("0", [&taskRan](TimerThread::Handle) {
+            taskRan[0] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(300, frac));
+    thread.scheduleTask("1", [&taskRan](TimerThread::Handle) {
+            taskRan[1] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(100, frac));
+    thread.scheduleTask("2", [&taskRan](TimerThread::Handle) {
+            taskRan[2] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
+    thread.scheduleTask("3", [&taskRan](TimerThread::Handle) {
+            taskRan[3] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(400, frac));
+    auto handle4 = thread.scheduleTask("4", [&taskRan](TimerThread::Handle) {
+            taskRan[4] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
+    thread.scheduleTask("5", [&taskRan](TimerThread::Handle) {
+            taskRan[5] = true; }, DISTRIBUTE_TIMEOUT_SECONDCHANCE_MS_FRAC(200, frac));
+
+    // 6 tasks pending
+    ASSERT_EQ(6ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 0 tasks completed
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    // None of the tasks are expected to have finished at the start.
+    std::array<std::atomic<bool>, 6> expected{};
+
+    // Task 1 should trigger around 100ms.
+    std::this_thread::sleep_until(startTime + 100ms - kJitter);
+
+    ASSERT_EQ(expected, taskRan);
+
+
+    std::this_thread::sleep_until(startTime + 100ms + kJitter);
+
+    expected[1] = true;
+    ASSERT_EQ(expected, taskRan);
+
+    // Cancel task 4 before it gets a chance to run.
+    thread.cancelTask(handle4);
+
+    // Tasks 2 and 5 should trigger around 200ms.
+    std::this_thread::sleep_until(startTime + 200ms - kJitter);
+
+    ASSERT_EQ(expected, taskRan);
+
+
+    std::this_thread::sleep_until(startTime + 200ms + kJitter);
+
+    expected[2] = true;
+    expected[5] = true;
+    ASSERT_EQ(expected, taskRan);
+
+    // Task 0 should trigger around 300ms.
+    std::this_thread::sleep_until(startTime + 300ms - kJitter);
+
+    ASSERT_EQ(expected, taskRan);
+
+    std::this_thread::sleep_until(startTime + 300ms + kJitter);
+
+    expected[0] = true;
+    ASSERT_EQ(expected, taskRan);
+
+    // 1 task pending
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks called on timeout,  and 1 cancelled
+    ASSERT_EQ(4ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    // Task 3 should trigger around 400ms.
+    std::this_thread::sleep_until(startTime + 400ms - kJitter);
+
+    ASSERT_EQ(expected, taskRan);
+
+    // 4 tasks called on timeout and 1 cancelled
+    ASSERT_EQ(4ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    std::this_thread::sleep_until(startTime + 400ms + kJitter);
+
+    expected[3] = true;
+    ASSERT_EQ(expected, taskRan);
+
+    // 0 tasks pending
+    ASSERT_EQ(0ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 5 tasks called on timeout and 1 cancelled
+    ASSERT_EQ(5ul, countChars(thread.timeoutToString(), REQUEST_START));
+    ASSERT_EQ(1ul, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+}; // class TimerThreadTest
+
+TEST_P(TimerThreadTest, Basic) {
+    testBasic();
+}
+
+TEST_P(TimerThreadTest, Cancel) {
+    testCancel();
+}
+
+TEST_P(TimerThreadTest, CancelAfterRun) {
+    testCancelAfterRun();
+}
+
+TEST_P(TimerThreadTest, MultipleTasks) {
+    testMultipleTasks();
+}
+
+INSTANTIATE_TEST_CASE_P(
+        TimerThread,
+        TimerThreadTest,
+        ::testing::Values(0.f, 0.5f, 1.f)
+        );
+
+TEST(TimerThread, TrackedTasks) {
+    TimerThread thread;
+
+    auto handle0 = thread.trackTask("0");
+    auto handle1 = thread.trackTask("1");
+    auto handle2 = thread.trackTask("2");
+
+    ASSERT_TRUE(TimerThread::isNoTimeoutHandle(handle0));
+    ASSERT_TRUE(TimerThread::isNoTimeoutHandle(handle1));
+    ASSERT_TRUE(TimerThread::isNoTimeoutHandle(handle2));
+
+    // 3 tasks pending
+    ASSERT_EQ(3ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 0 tasks retired
+    ASSERT_EQ(0ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle0));
+    ASSERT_TRUE(thread.cancelTask(handle1));
+
+    // 1 task pending
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    // handle1 is stale, cancel returns false.
+    ASSERT_FALSE(thread.cancelTask(handle1));
+
+    // 1 task pending
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    // Add another tracked task.
+    auto handle3 = thread.trackTask("3");
+    ASSERT_TRUE(TimerThread::isNoTimeoutHandle(handle3));
+
+    // 2 tasks pending
+    ASSERT_EQ(2ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle2));
+
+    // 1 tasks pending
+    ASSERT_EQ(1ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 3 tasks retired
+    ASSERT_EQ(3ul, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle3));
+
+    // 0 tasks pending
+    ASSERT_EQ(0ul, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks retired
+    ASSERT_EQ(4ul, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+}  // namespace
diff --git a/media/utils/tests/extended_accumulator_tests.cpp b/media/utils/tests/extended_accumulator_tests.cpp
new file mode 100644
index 0000000..e243e7e
--- /dev/null
+++ b/media/utils/tests/extended_accumulator_tests.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "extended_accumulator_tests"
+
+#include <mediautils/ExtendedAccumulator.h>
+
+#include <type_traits>
+#include <cstdint>
+#include <limits.h>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+// Conditionally choose a base accumulating counter value in order to prevent
+// unsigned underflow on the accumulator from aborting the tests.
+template <typename TType, typename CType>
+static constexpr CType getBase() {
+  static_assert(sizeof(TType) < sizeof(CType));
+  if constexpr (std::is_unsigned_v<CType>) {
+      return std::numeric_limits<TType>::max() + 1;
+  } else {
+      return 0;
+  }
+}
+
+// Since the entire state of this utility is the previous value, and the
+// behavior is isomorphic mod the underlying type on the previous value, we can
+// test combinations of the previous value of the underlying type and a
+// hypothetical signed update to that type and ensure the accumulator moves
+// correctly and reports overflow correctly.
+template <typename TestUInt, typename CType>
+void testPair(TestUInt prevVal, std::make_signed_t<TestUInt> delta) {
+    using TestDetect = ExtendedAccumulator<TestUInt, CType>;
+    using TestInt = typename TestDetect::SignedInt;
+    static_assert(std::is_same_v<typename TestDetect::UnsignedInt, TestUInt>);
+    static_assert(std::is_same_v<TestInt, std::make_signed_t<TestUInt>>);
+    static_assert(sizeof(TestUInt) < sizeof(CType));
+
+    // To safely detect underflow/overflow for testing
+    // Should be 0 mod TestUInt, max + 1 is convenient
+    static constexpr CType base = getBase<TestUInt, CType>();
+    const CType prev = base + prevVal;
+    TestDetect test{prev};
+    EXPECT_EQ(test.getValue(), prev);
+    // Prevent unsigned wraparound abort
+    CType next;
+    const auto err =  __builtin_add_overflow(prev, delta, &next);
+    LOG_ALWAYS_FATAL_IF(err, "Unexpected wrap in tests");
+    const auto [result, status] = test.poll(static_cast<TestUInt>(next));
+    EXPECT_EQ(test.getValue(), next);
+    EXPECT_EQ(result, delta);
+
+    // Test overflow/underflow event reporting.
+    if (next < base) EXPECT_EQ(TestDetect::Wrap::UNDERFLOW, status);
+    else if (next > base + std::numeric_limits<TestUInt>::max())
+        EXPECT_EQ(TestDetect::Wrap::OVERFLOW, status);
+    else EXPECT_EQ(TestDetect::Wrap::NORMAL, status);
+}
+
+// Test this utility on every combination of prior and update value for the
+// type uint8_t, with an unsigned containing type.
+TEST(wraparound_tests, cover_u8_u64) {
+    using TType = uint8_t;
+    using CType = uint64_t;
+    static constexpr CType max = std::numeric_limits<TType>::max();
+    for (CType i = 0; i <= max; i++) {
+        for (CType j = 0; j <= max; j++) {
+            testPair<TType, CType>(i, static_cast<int64_t>(j));
+        }
+    }
+}
+
+// Test this utility on every combination of prior and update value for the
+// type uint8_t, with a signed containing type.
+TEST(wraparound_tests, cover_u8_s64) {
+    using TType = uint8_t;
+    using CType = int64_t;
+    static constexpr CType max = std::numeric_limits<TType>::max();
+    for (CType i = 0; i <= max; i++) {
+        for (CType j = 0; j <= max; j++) {
+            testPair<TType, CType>(i, static_cast<int64_t>(j));
+        }
+    }
+}
diff --git a/media/utils/tests/inplace_function_tests.cpp b/media/utils/tests/inplace_function_tests.cpp
new file mode 100644
index 0000000..6172aa4
--- /dev/null
+++ b/media/utils/tests/inplace_function_tests.cpp
@@ -0,0 +1,493 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "inplace_function_tests"
+
+#include <mediautils/InPlaceFunction.h>
+
+#include <type_traits>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+struct BigCallable {
+    BigCallable(size_t* x, size_t val1, size_t val2) : ptr(x), a(val1), b(val2) {}
+    size_t* ptr;
+    size_t a;
+    size_t b;
+    size_t operator()(size_t input) const {
+        *ptr += a * 100 + b * 10 + input;
+        return 8;
+    }
+};
+
+TEST(InPlaceFunctionTests, Basic) {
+    size_t x = 5;
+    InPlaceFunction<size_t(size_t)> func;
+    {
+        BigCallable test{&x, 2, 3};
+        func = test;
+    }
+    EXPECT_EQ(func(2), 8ull);
+    EXPECT_EQ(x, 232ull + 5);
+}
+
+TEST(InPlaceFunctionTests, Invalid) {
+    InPlaceFunction<size_t(size_t)> func;
+    EXPECT_TRUE(!func);
+    InPlaceFunction<size_t(size_t)> func2{nullptr};
+    EXPECT_TRUE(!func2);
+    InPlaceFunction<size_t(size_t)> func3 = [](size_t x) { return x; };
+    EXPECT_TRUE(!(!func3));
+    func3 = nullptr;
+    EXPECT_TRUE(!func3);
+}
+
+TEST(InPlaceFunctionTests, MultiArg) {
+    InPlaceFunction<size_t(size_t, size_t, size_t)> func = [](size_t a, size_t b, size_t c) {
+        return a + b + c;
+    };
+    EXPECT_EQ(func(2, 3, 5), 2ull + 3 + 5);
+}
+struct Record {
+    Record(size_t m, size_t c, size_t d) : move_called(m), copy_called(c), dtor_called(d) {}
+    Record() {}
+    size_t move_called = 0;
+    size_t copy_called = 0;
+    size_t dtor_called = 0;
+    friend std::ostream& operator<<(std::ostream& os, const Record& record) {
+        return os << "Record, moves: " << record.move_called << ", copies: " << record.copy_called
+                  << ", dtor: " << record.dtor_called << '\n';
+    }
+};
+
+bool operator==(const Record& lhs, const Record& rhs) {
+    return lhs.move_called == rhs.move_called && lhs.copy_called == rhs.copy_called &&
+           lhs.dtor_called == rhs.dtor_called;
+}
+
+struct Noisy {
+    Record& ref;
+    size_t state;
+    Noisy(Record& record, size_t val) : ref(record), state(val) {}
+    Noisy(const Noisy& other) : ref(other.ref), state(other.state) { ref.copy_called++; }
+
+    Noisy(Noisy&& other) : ref(other.ref), state(other.state) { ref.move_called++; }
+    ~Noisy() { ref.dtor_called++; }
+
+    size_t operator()() { return state; }
+};
+
+TEST(InPlaceFunctionTests, CtorForwarding) {
+    Record record;
+    Noisy noisy{record, 17};
+    InPlaceFunction<size_t()> func{noisy};
+    EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+    EXPECT_EQ(func(), 17ull);
+    Record record2;
+    Noisy noisy2{record2, 13};
+    InPlaceFunction<size_t()> func2{std::move(noisy2)};
+    EXPECT_EQ(record2, Record(1, 0, 0));  // move, copy, dtor
+    EXPECT_EQ(func2(), 13ull);
+}
+
+TEST(InPlaceFunctionTests, FunctionCtorForwarding) {
+    {
+        Record record;
+        Noisy noisy{record, 17};
+        InPlaceFunction<size_t()> func{noisy};
+        EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+        EXPECT_EQ(func(), 17ull);
+        InPlaceFunction<size_t()> func2{func};
+        EXPECT_EQ(record, Record(0, 2, 0));  // move, copy, dtor
+        EXPECT_EQ(func2(), 17ull);
+    }
+    Record record;
+    Noisy noisy{record, 13};
+    InPlaceFunction<size_t()> func{noisy};
+    EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+    EXPECT_EQ(func(), 13ull);
+    InPlaceFunction<size_t()> func2{std::move(func)};
+    EXPECT_EQ(record, Record(1, 1, 0));  // move, copy, dtor
+    EXPECT_EQ(func2(), 13ull);
+    // We expect moved from functions to still be valid
+    EXPECT_TRUE(!(!func));
+    EXPECT_EQ(static_cast<bool>(func), static_cast<bool>(func2));
+    EXPECT_EQ(func(), 13ull);
+}
+
+TEST(InPlaceFunctionTests, Dtor) {
+    Record record;
+    {
+        InPlaceFunction<size_t()> func;
+        {
+            Noisy noisy{record, 17};
+            func = noisy;
+        }
+        EXPECT_EQ(func(), 17ull);
+        EXPECT_EQ(record.dtor_called, 1ull);
+    }
+    EXPECT_EQ(record.dtor_called, 2ull);
+}
+
+TEST(InPlaceFunctionTests, Assignment) {
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 5};
+        InPlaceFunction<size_t()> func{noisy};
+        EXPECT_EQ(func(), 17ull);
+        EXPECT_EQ(record.dtor_called, 0ull);
+        func = noisy2;
+        EXPECT_EQ(record.dtor_called, 1ull);
+        EXPECT_EQ(record2, Record(0, 1, 0));  // move, copy, dtor
+        EXPECT_EQ(func(), 5ull);
+    }
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 5};
+        InPlaceFunction<size_t()> func{noisy};
+        EXPECT_EQ(func(), 17ull);
+        EXPECT_EQ(record.dtor_called, 0ull);
+        func = std::move(noisy2);
+        EXPECT_EQ(record.dtor_called, 1ull);
+        EXPECT_EQ(record2, Record(1, 0, 0));  // move, copy, dtor
+        EXPECT_EQ(func(), 5ull);
+    }
+
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 13};
+        {
+            InPlaceFunction<size_t()> func{noisy};
+            EXPECT_EQ(func(), 17ull);
+            InPlaceFunction<size_t()> func2{noisy2};
+            EXPECT_EQ(record2, Record(0, 1, 0));  // move, copy, dtor
+            EXPECT_EQ(record.dtor_called, 0ull);
+            func = func2;
+            EXPECT_EQ(record.dtor_called, 1ull);
+            EXPECT_EQ(func(), 13ull);
+            EXPECT_EQ(record2, Record(0, 2, 0));  // move, copy, dtor
+            EXPECT_TRUE(static_cast<bool>(func2));
+            EXPECT_EQ(func2(), 13ull);
+        }
+        EXPECT_EQ(record2, Record(0, 2, 2));  // move, copy, dtor
+    }
+
+    {
+        Record record;
+        Record record2;
+        Noisy noisy{record, 17};
+        Noisy noisy2{record2, 13};
+        {
+            InPlaceFunction<size_t()> func{noisy};
+            EXPECT_EQ(func(), 17ull);
+            InPlaceFunction<size_t()> func2{noisy2};
+            EXPECT_EQ(record.dtor_called, 0ull);
+            EXPECT_EQ(record2, Record(0, 1, 0));  // move, copy, dtor
+            func = std::move(func2);
+            EXPECT_EQ(record.dtor_called, 1ull);
+            EXPECT_EQ(func(), 13ull);
+            EXPECT_EQ(record2, Record(1, 1, 0));  // move, copy, dtor
+            // Moved from function is still valid
+            EXPECT_TRUE(static_cast<bool>(func2));
+            EXPECT_EQ(func2(), 13ull);
+        }
+        EXPECT_EQ(record2, Record(1, 1, 2));  // move, copy, dtor
+    }
+}
+
+TEST(InPlaceFunctionTests, Swap) {
+    Record record1;
+    Record record2;
+    InPlaceFunction<size_t()> func1 = Noisy{record1, 5};
+    InPlaceFunction<size_t()> func2 = Noisy{record2, 7};
+    EXPECT_EQ(record1, Record(1, 0, 1));  // move, copy, dtor
+    EXPECT_EQ(record2, Record(1, 0, 1));  // move, copy, dtor
+    EXPECT_EQ(func1(), 5ull);
+    EXPECT_EQ(func2(), 7ull);
+    func1.swap(func2);
+    EXPECT_EQ(record1, Record(2, 0, 2));  // move, copy, dtor
+    // An additional move and destroy into the temporary object
+    EXPECT_EQ(record2, Record(3, 0, 3));  // move, copy, dtor
+    EXPECT_EQ(func1(), 7ull);
+    EXPECT_EQ(func2(), 5ull);
+}
+
+TEST(InPlaceFunctionTests, Conversion) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 16> func2 = noisy;
+        EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+        {
+            InPlaceFunction<size_t(), 32> func{func2};
+            EXPECT_EQ(record, Record(0, 2, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(0, 2, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(0, 2, 2));  // move, copy, dtor
+}
+
+TEST(InPlaceFunctionTests, ConversionMove) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 16> func2 = noisy;
+        EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+        {
+            InPlaceFunction<size_t(), 32> func{std::move(func2)};
+            EXPECT_EQ(record, Record(1, 1, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(1, 1, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(1, 1, 2));  // move, copy, dtor
+}
+
+TEST(InPlaceFunctionTests, ConversionAssign) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 32> func;
+        {
+            InPlaceFunction<size_t(), 16> func2 = noisy;
+            EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+            func = func2;
+            EXPECT_EQ(record, Record(0, 2, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(0, 2, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(0, 2, 2));  // move, copy, dtor
+}
+
+TEST(InPlaceFunctionTests, ConversionAssignMove) {
+    Record record;
+    Noisy noisy{record, 15};
+    {
+        InPlaceFunction<size_t(), 32> func;
+        {
+            InPlaceFunction<size_t(), 16> func2 = noisy;
+            EXPECT_EQ(record, Record(0, 1, 0));  // move, copy, dtor
+            func = std::move(func2);
+            EXPECT_EQ(record, Record(1, 1, 0));  // move, copy, dtor
+            EXPECT_EQ(func2(), func());
+        }
+        EXPECT_EQ(record, Record(1, 1, 1));  // move, copy, dtor
+    }
+    EXPECT_EQ(record, Record(1, 1, 2));  // move, copy, dtor
+}
+
+struct NoMoveCopy {
+    NoMoveCopy() = default;
+    NoMoveCopy(const NoMoveCopy&) = delete;
+    NoMoveCopy(NoMoveCopy&&) = delete;
+};
+struct TestCallable {
+    NoMoveCopy& operator()(NoMoveCopy& x) { return x; }
+};
+
+TEST(InPlaceFunctionTests, ArgumentForwarding) {
+    const auto lambd = [](NoMoveCopy& x) -> NoMoveCopy& { return x; };
+    InPlaceFunction<NoMoveCopy&(NoMoveCopy&)> func = lambd;
+    const auto lambd2 = [](NoMoveCopy&& x) -> NoMoveCopy&& { return std::move(x); };
+    InPlaceFunction<NoMoveCopy && (NoMoveCopy &&)> func2 = lambd2;
+    auto lvalue = NoMoveCopy{};
+    func(lvalue);
+    func2(NoMoveCopy{});
+    InPlaceFunction<void(NoMoveCopy&)> func3 = [](const NoMoveCopy&) {};
+    func3(lvalue);
+    InPlaceFunction<void(NoMoveCopy &&)> func4 = [](const NoMoveCopy&) {};
+    func4(std::move(lvalue));
+    InPlaceFunction<void(const NoMoveCopy&)> func5 = [](const NoMoveCopy&) {};
+    func5(lvalue);
+    InPlaceFunction<void(const NoMoveCopy&&)> func6 = [](const NoMoveCopy&) {};
+    func6(std::move(lvalue));
+    InPlaceFunction<void(const NoMoveCopy&&)> func7 = [](const NoMoveCopy&&) {};
+    func7(std::move(lvalue));
+    InPlaceFunction<void(NoMoveCopy &&)> func8 = [](const NoMoveCopy&&) {};
+    func8(std::move(lvalue));
+
+    {
+        Record record;
+        Noisy noisy{record, 5};
+        const auto lambd3 = [](Noisy) {};
+        InPlaceFunction<void(Noisy)> func3{lambd3};
+        EXPECT_EQ(record, Record(0, 0, 0));  // move, copy, dtor
+        func3(std::move(noisy));
+        EXPECT_EQ(record, Record(2, 0, 2));  // move, copy, dtor
+    }
+
+    {
+        Record record;
+        Noisy noisy{record, 5};
+        const auto lambd3 = [](Noisy) {};
+        InPlaceFunction<void(Noisy)> func3{lambd3};
+        EXPECT_EQ(record, Record(0, 0, 0));  // move, copy, dtor
+        func3(noisy);
+        EXPECT_EQ(record, Record(1, 1, 2));  // move, copy, dtor
+    }
+}
+
+TEST(InPlaceFunctionTests, VoidFunction) {
+    InPlaceFunction<void(size_t)> func = [](size_t x) -> size_t { return x; };
+    func(5);
+    InPlaceFunction<void(void)> func2 = []() -> size_t { return 5; };
+    func2();
+}
+NoMoveCopy foo() {
+    return NoMoveCopy();
+}
+struct Test {
+    NoMoveCopy operator()() { return NoMoveCopy{}; }
+};
+
+TEST(InPlaceFunctionTests, FullElision) {
+    InPlaceFunction<NoMoveCopy()> func = foo;
+}
+
+TEST(InPlaceFunctionTests, ReturnConversion) {
+    const auto lambd = [](int&& x) -> int&& { return std::move(x); };
+    InPlaceFunction<int && (int&& x)> func = lambd;
+    func(5);
+    InPlaceFunction<void(int)> func3 = [](double) {};
+    func3(5);
+    InPlaceFunction<double()> func4 = []() -> int { return 5; };
+    func4();
+}
+
+struct Overloaded {
+    int operator()() & { return 2; }
+    int operator()() const& { return 3; }
+    int operator()() && { return 4; }
+    int operator()() const&& { return 5; }
+};
+
+TEST(InPlaceFunctionTests, OverloadResolution) {
+    InPlaceFunction<int()> func = Overloaded{};
+    EXPECT_EQ(func(), 2);
+    EXPECT_EQ(std::move(func()), 2);
+}
+
+template <class T, class U, class = void>
+struct can_assign : std::false_type {};
+
+template <class T, class U>
+struct can_assign<T, U, typename std::void_t<decltype(T().operator=(U()))>> : std::true_type {};
+
+template <class From, class To, bool Expected>
+static constexpr bool Convertible =
+        (can_assign<To, From>::value ==
+         std::is_constructible_v<To, From>)&&(std::is_constructible_v<To, From> == Expected);
+
+struct TooBig {
+    std::array<uint64_t, 5> big = {1, 2, 3, 4, 5};
+    size_t operator()() { return static_cast<size_t>(big[0] + big[1] + big[2] + big[3] + big[4]); }
+};
+static_assert(sizeof(TooBig) == 40);
+struct NotCallable {};
+struct WrongArg {
+    void operator()(NotCallable) {}
+};
+struct WrongRet {
+    NotCallable operator()(size_t) { return NotCallable{}; }
+};
+
+static_assert(Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(), 32>, true>);
+static_assert(
+        Convertible<InPlaceFunction<size_t(size_t), 32>, InPlaceFunction<size_t(), 32>, false>);
+static_assert(Convertible<InPlaceFunction<void(), 32>, InPlaceFunction<size_t(), 32>, false>);
+static_assert(Convertible<TooBig, InPlaceFunction<size_t(), 32>, false>);
+static_assert(Convertible<TooBig, InPlaceFunction<size_t(), 40>, true>);
+static_assert(Convertible<NotCallable, InPlaceFunction<size_t(), 40>, false>);
+static_assert(Convertible<WrongArg, InPlaceFunction<void(size_t), 40>, false>);
+static_assert(Convertible<WrongRet, InPlaceFunction<size_t(size_t), 40>, false>);
+// Void returning functions are modelled by any return type
+static_assert(Convertible<WrongRet, InPlaceFunction<void(size_t), 40>, true>);
+
+// Check constructibility/assignability from smaller function types
+static_assert(Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(), 24>, false>);
+static_assert(Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(), 40>, true>);
+static_assert(
+        Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<size_t(size_t), 40>, false>);
+static_assert(
+        Convertible<InPlaceFunction<size_t(), 32>, InPlaceFunction<NotCallable(), 40>, false>);
+
+struct BadLambd {
+    int operator()(int&& x) { return std::move(x); }
+};
+
+static_assert(Convertible<BadLambd, InPlaceFunction<int(int&&), 32>, true>);
+static_assert(Convertible<BadLambd, InPlaceFunction<int&(int&&), 32>, false>);
+static_assert(Convertible<BadLambd, InPlaceFunction<const int&(int&&), 32>, false>);
+static_assert(Convertible<BadLambd, InPlaceFunction<int && (int&&), 32>, false>);
+static_assert(Convertible<BadLambd, InPlaceFunction<const int && (int&&), 32>, false>);
+
+struct Base {};
+struct Derived : Base {};
+struct Converted {
+    Converted(const Derived&) {}
+};
+
+struct ConvertCallable {
+    Derived operator()() { return Derived{}; }
+    Derived& operator()(Derived& x) { return x; }
+    Derived&& operator()(Derived&& x) { return std::move(x); }
+    const Derived& operator()(const Derived& x) { return x; }
+    const Derived&& operator()(const Derived&& x) { return std::move(x); }
+};
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived()>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base()>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Converted()>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Converted&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Converted && ()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Converted&()>, false>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Converted && ()>, false>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived&(Derived&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base&(Derived&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Derived && (Derived &&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<Base && (Derived &&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived&(const Derived&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base&(const Derived&)>, true>);
+
+static_assert(
+        Convertible<ConvertCallable, InPlaceFunction<const Derived && (const Derived&&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base && (const Derived&&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived&(Derived&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base&(Derived&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived && (Derived &&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base && (Derived &&)>, true>);
+
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Derived&(Derived&&)>, true>);
+static_assert(Convertible<ConvertCallable, InPlaceFunction<const Base&(Derived&&)>, true>);
diff --git a/media/utils/tests/library_tests.cpp b/media/utils/tests/library_tests.cpp
index c5c500c..f15f7f9 100644
--- a/media/utils/tests/library_tests.cpp
+++ b/media/utils/tests/library_tests.cpp
@@ -26,8 +26,9 @@
 
 namespace {
 
-static int32_t here = 0;  // accessed on same thread.
+[[maybe_unused]] static int32_t here = 0;  // accessed on same thread.
 
+#if __android__
 TEST(library_tests, basic) {
     std::string path = android::base::GetExecutableDirectory() + "/libsharedtest.so";
     // The flags to loadLibrary should not include  RTLD_GLOBAL or RTLD_NODELETE
@@ -64,6 +65,7 @@
     // will prevent unloading libraries.
     ASSERT_EQ(1, here);
 }
+#endif
 
 TEST(library_tests, sad_library) {
     std::string path = android::base::GetExecutableDirectory()
diff --git a/media/utils/tests/media_process_tests.cpp b/media/utils/tests/media_process_tests.cpp
index 2ae3f70..391c6a7 100644
--- a/media/utils/tests/media_process_tests.cpp
+++ b/media/utils/tests/media_process_tests.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <mediautils/Process.h>
+#include <mediautils/TidWrapper.h>
 
 #define LOG_TAG "media_process_tests"
 
@@ -24,8 +25,16 @@
 using namespace android;
 using namespace android::mediautils;
 
+// Disables false-positives from base::Split()
+//
+// See mismatched sanitized libraries here:
+// https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow
+extern "C" const char* __asan_default_options() {
+  return "detect_container_overflow=0";
+}
+
 TEST(media_process_tests, basic) {
-  const std::string schedString = getThreadSchedAsString(gettid());
+  const std::string schedString = getThreadSchedAsString(getThreadIdWrapper());
 
   (void)schedString;
   // We don't test schedString, only that we haven't crashed.
diff --git a/media/utils/tests/media_threadsnapshot_tests.cpp b/media/utils/tests/media_threadsnapshot_tests.cpp
index c7a45e2..57cf698 100644
--- a/media/utils/tests/media_threadsnapshot_tests.cpp
+++ b/media/utils/tests/media_threadsnapshot_tests.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <mediautils/ThreadSnapshot.h>
+#include <mediautils/TidWrapper.h>
 
 #define LOG_TAG "media_threadsnapshot_tests"
 
@@ -27,10 +28,18 @@
 using namespace android;
 using namespace android::mediautils;
 
+// Disables false-positives from base::Split()
+//
+// See mismatched sanitized libraries here:
+// https://github.com/google/sanitizers/wiki/AddressSanitizerContainerOverflow
+extern "C" const char* __asan_default_options() {
+  return "detect_container_overflow=0";
+}
+
 TEST(media_threadsnapshot_tests, basic) {
   using namespace std::chrono_literals;
 
-  ThreadSnapshot threadSnapshot(gettid());
+  ThreadSnapshot threadSnapshot(getThreadIdWrapper());
 
   threadSnapshot.onBegin();
 
diff --git a/media/utils/tests/mediautils_fixedstring_tests.cpp b/media/utils/tests/mediautils_fixedstring_tests.cpp
new file mode 100644
index 0000000..7ab9a9f
--- /dev/null
+++ b/media/utils/tests/mediautils_fixedstring_tests.cpp
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "mediautils_fixedstring_tests"
+
+#include <mediautils/FixedString.h>
+
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+
+TEST(mediautils_fixedstring_tests, ctor) {
+    FixedString<8> s0("abcde");
+
+    ASSERT_FALSE(s0.empty());
+    ASSERT_EQ(8U, s0.capacity());
+
+    ASSERT_EQ(5U, s0.size());
+    ASSERT_EQ(3U, s0.remaining());
+    ASSERT_EQ(0, strcmp(s0.c_str(), "abcde"));
+
+    ASSERT_EQ(0, strcmp(s0.data(), "abcde"));
+
+    // overflow
+    FixedString<8> s1("abcdefghijk");
+    ASSERT_EQ(8U, s1.size());
+    ASSERT_TRUE(s1.full());
+    ASSERT_EQ(0U, s1.remaining());
+    ASSERT_EQ(0, strcmp(s1.c_str(), "abcdefgh"));
+
+    // overflow
+    FixedString<8> s2(std::string("abcdefghijk"));
+    ASSERT_TRUE(s2.full());
+
+    ASSERT_EQ(8U, s2.size());
+    ASSERT_EQ(0, strcmp(s2.c_str(), "abcdefgh"));
+
+    // complex
+    ASSERT_EQ(s1, s2);
+    ASSERT_EQ(FixedString<12>().append(s1), s2);
+    ASSERT_NE(s1, "bcd");
+
+    // string and stringview
+    ASSERT_EQ(s1.asString(), s1.asStringView());
+
+    FixedString30 s3;
+    s3 = std::string("abcd");
+    ASSERT_EQ(s3, "abcd");
+
+    s3.clear();
+    ASSERT_EQ(s3, "");
+    ASSERT_NE(s3, "abcd");
+    ASSERT_EQ(0U, s3.size());
+}
+
+TEST(mediautils_fixedstring_tests, append) {
+    FixedString<8> s0;
+    ASSERT_EQ(0U, s0.size());
+    ASSERT_EQ(0, strcmp(s0.c_str(), ""));
+    ASSERT_TRUE(s0.empty());
+    ASSERT_FALSE(s0.full());
+
+    s0.append("abc");
+    ASSERT_EQ(3U, s0.size());
+    ASSERT_EQ(0, strcmp(s0.c_str(), "abc"));
+
+    s0.append(std::string("d"));
+    ASSERT_EQ(4U, s0.size());
+    ASSERT_EQ(0, strcmp(s0.c_str(), "abcd"));
+
+    // overflow
+    s0.append("efghijk");
+    ASSERT_EQ(8U, s0.size());
+    ASSERT_EQ(0, strcmp(s0.c_str(), "abcdefgh"));
+    ASSERT_TRUE(s0.full());
+
+    // concatenated
+    ASSERT_EQ(FixedString62("abcd"),
+            FixedString<8>("ab").append("c").append(FixedString<8>("d")));
+    ASSERT_EQ(FixedString<12>("a").append(FixedString<12>("b")), "ab");
+}
+
+TEST(mediautils_fixedstring_tests, plus_equals) {
+    FixedString<8> s0;
+    ASSERT_EQ(0U, s0.size());
+    ASSERT_EQ(0, strcmp(s0.c_str(), ""));
+
+    s0 += "abc";
+    s0 += "def";
+    ASSERT_EQ(s0, "abcdef");
+}
+
+TEST(mediautils_fixedstring_tests, stream_operator) {
+    FixedString<8> s0('a');
+
+    s0 << 'b' << "c" << "d" << '\n';
+    ASSERT_EQ(s0, "abcd\n");
+}
+
+TEST(mediautils_fixedstring_tests, examples) {
+    FixedString30 s1(std::string("a"));
+    s1 << "bc" << 'd' << '\n';
+    s1 += "hello";
+
+    ASSERT_EQ(s1, "abcd\nhello");
+
+    FixedString30 s2;
+    for (const auto &c : s1.asStringView()) {
+        s2.append(c);
+    };
+    ASSERT_EQ(s1, s2);
+
+    FixedString30 s3(std::move(s1));
+}
+
+// Ensure type alias works fine as well.
+using FixedString1024 = FixedString<1024>;
+
+TEST(mediautils_fixedstring_tests, copy) {
+    FixedString1024 s0("abc");
+    FixedString62 s1(s0);
+
+    ASSERT_EQ(3U, s1.size());
+    ASSERT_EQ(0, strcmp(s1.c_str(), "abc"));
+    ASSERT_EQ(s0, s1);
+
+    FixedString<1024> s2(s1);
+    ASSERT_EQ(3U, s2.size());
+    ASSERT_EQ(0, strcmp(s2.c_str(), "abc"));
+    ASSERT_EQ(s2, "abc");
+    ASSERT_NE(s2, "def");
+    ASSERT_EQ(s2, std::string("abc"));
+    ASSERT_NE(s2, std::string("def"));
+    ASSERT_EQ(s1, s2);
+    ASSERT_EQ(s0, s2);
+    ASSERT_EQ(s2, FixedString62(FixedString1024("abc")));
+}
diff --git a/media/utils/memory-test.cpp b/media/utils/tests/memory-test.cpp
similarity index 100%
rename from media/utils/memory-test.cpp
rename to media/utils/tests/memory-test.cpp
diff --git a/media/utils/tests/shared_memory_allocator_tests.cpp b/media/utils/tests/shared_memory_allocator_tests.cpp
new file mode 100644
index 0000000..11bc72a
--- /dev/null
+++ b/media/utils/tests/shared_memory_allocator_tests.cpp
@@ -0,0 +1,350 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "shared_memory_allocator_tests"
+
+#include <gtest/gtest.h>
+#include <mediautils/SharedMemoryAllocator.h>
+#include <sys/stat.h>
+#include <utils/Log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+namespace {
+void validate_block(const AllocationType& block) {
+    ASSERT_TRUE(block != nullptr);
+    memset(block->unsecurePointer(), 10, 4096);
+    EXPECT_EQ(*(static_cast<char*>(block->unsecurePointer()) + 100), static_cast<char>(10));
+}
+
+template <size_t N = 0, bool FatalOwn = true>
+struct ValidateForwarding {
+    static constexpr size_t alignment() { return 1337; }
+
+    bool owns(const AllocationType& allocation) const {
+        if (allocation == owned) return true;
+        if constexpr (FatalOwn) {
+            LOG_ALWAYS_FATAL_IF(allocation != not_owned, "Invalid allocation passed to allocator");
+        }
+        return false;
+    }
+
+    void deallocate_all() { deallocate_all_count++; }
+    std::string dump() const { return dump_string; }
+
+    static inline size_t deallocate_all_count = 0;
+    static inline const AllocationType owned =
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+    static inline const AllocationType not_owned =
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+    static inline const std::string dump_string = std::to_string(N) + "Test Dump Forwarding";
+};
+
+};  // namespace
+static_assert(shared_allocator_impl::has_owns<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_dump<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_deallocate_all<MemoryHeapBaseAllocator> == false);
+static_assert(shared_allocator_impl::has_owns<SnoopingAllocator<MemoryHeapBaseAllocator>> == true);
+static_assert(shared_allocator_impl::has_dump<SnoopingAllocator<MemoryHeapBaseAllocator>> == true);
+static_assert(
+        shared_allocator_impl::has_deallocate_all<SnoopingAllocator<MemoryHeapBaseAllocator>> ==
+        true);
+static_assert(
+        shared_allocator_impl::has_owns<
+                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+        true);
+static_assert(
+        shared_allocator_impl::has_dump<
+                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+        true);
+static_assert(
+        shared_allocator_impl::has_deallocate_all<
+                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
+        true);
+static_assert(shared_allocator_impl::has_owns<
+                      FallbackAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+                                        SnoopingAllocator<MemoryHeapBaseAllocator>>> == true);
+
+TEST(shared_memory_allocator_tests, roundup) {
+    using namespace shared_allocator_impl;
+    EXPECT_EQ(roundup(1023, 1024), 1024ul);
+    EXPECT_EQ(roundup(1024, 1024), 1024ul);
+    EXPECT_EQ(roundup(1025, 1024), 2048ul);
+    EXPECT_DEATH(roundup(1023, 1023), "");
+    EXPECT_DEATH(roundup(1023, 0), "");
+}
+
+TEST(shared_memory_allocator_tests, mheapbase_allocator) {
+    MemoryHeapBaseAllocator allocator;
+    const auto memory = allocator.allocate(BasicAllocRequest{500});
+    ASSERT_TRUE(memory != nullptr);
+    const auto fd = dup(memory->getMemory()->getHeapID());
+    EXPECT_EQ(memory->size(), static_cast<unsigned>(4096));
+    EXPECT_EQ(memory->size(), memory->getMemory()->getSize());
+    validate_block(memory);
+    allocator.deallocate(memory);
+    // Ensures we have closed the fd
+    EXPECT_EQ(memory->unsecurePointer(), nullptr);
+    EXPECT_EQ(memory->getMemory()->getBase(), nullptr);
+    struct stat st;
+    const auto err = fstat(fd, &st);
+    EXPECT_EQ(err, 0);
+    // Ensure we reclaim pages (overly-zealous)
+    EXPECT_EQ(st.st_size, 0);
+}
+
+TEST(shared_memory_allocator_tests, mheapbase_allocator_independence) {
+    static_assert(MemoryHeapBaseAllocator::alignment() == 4096);
+    MemoryHeapBaseAllocator allocator;
+    const auto first_memory = allocator.allocate(BasicAllocRequest{500});
+    const auto second_memory = allocator.allocate(BasicAllocRequest{500});
+    ASSERT_TRUE(first_memory != nullptr && second_memory != nullptr);
+    EXPECT_NE(first_memory->getMemory()->getHeapID(), second_memory->getMemory()->getHeapID());
+    allocator.deallocate(first_memory);
+    validate_block(second_memory);
+    allocator.deallocate(second_memory);
+}
+
+TEST(shared_memory_allocator_tests, snooping_allocator) {
+    static_assert(SnoopingAllocator<ValidateForwarding<0>>::alignment() ==
+                  ValidateForwarding<0>::alignment());
+
+    SnoopingAllocator<MemoryHeapBaseAllocator> allocator{"allocator"};
+    const auto first_memory = allocator.allocate(NamedAllocRequest{{500}, "allocate_1"});
+    auto second_memory = first_memory;
+    {
+        const auto tmp = allocator.allocate(NamedAllocRequest{{5000}, "allocate_2"});
+        // Test copying handle around
+        second_memory = tmp;
+    }
+    ASSERT_TRUE(first_memory && second_memory);
+    EXPECT_TRUE(allocator.owns(first_memory) && allocator.owns(second_memory));
+    const auto first_allocations = allocator.getAllocations();
+    EXPECT_EQ(first_allocations.size(), 2ull);
+    for (const auto& [key, val] : allocator.getAllocations()) {
+        if (val.allocation_number == 0) {
+            EXPECT_EQ(val.name, "allocate_1");
+            EXPECT_TRUE(first_memory == key);
+        }
+        if (val.allocation_number == 1) {
+            EXPECT_EQ(val.name, "allocate_2");
+            EXPECT_TRUE(second_memory == key);
+        }
+    }
+    // TODO test dump and deallocate forwarding
+    // EXPECT_EQ(allocator.dump(), std::string{});
+    validate_block(second_memory);
+    allocator.deallocate(second_memory);
+    EXPECT_EQ(second_memory->unsecurePointer(), nullptr);
+    EXPECT_FALSE(allocator.owns(second_memory));
+    EXPECT_TRUE(allocator.owns(first_memory));
+    const auto second_allocations = allocator.getAllocations();
+    EXPECT_EQ(second_allocations.size(), 1ul);
+    for (const auto& [key, val] : second_allocations) {
+        EXPECT_EQ(val.name, "allocate_1");
+        EXPECT_TRUE(first_memory == key);
+    }
+    // EXPECT_EQ(allocator.dump(), std::string{});
+    // TODO test deallocate_all O(1)
+}
+
+// TODO generic policy test
+TEST(shared_memory_allocator_tests, size_policy_allocator_enforcement) {
+    PolicyAllocator allocator{MemoryHeapBaseAllocator{},
+                              SizePolicy<4096 * 7, 4096 * 2, 4096 * 4>{}};
+    // Violate max size
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 5}) == nullptr);
+    // Violate min alloc size
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096}) == nullptr);
+    const auto first_memory = allocator.allocate(BasicAllocRequest{4096 * 4});
+    validate_block(first_memory);
+    // Violate pool size
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 4}) == nullptr);
+    const auto second_memory = allocator.allocate(BasicAllocRequest{4096 * 3});
+    validate_block(second_memory);
+    allocator.deallocate(second_memory);
+    // Check pool size update after deallocation
+    const auto new_second_memory = allocator.allocate(BasicAllocRequest{4096 * 2});
+    validate_block(new_second_memory);
+}
+
+TEST(shared_memory_allocator_tests, indirect_allocator) {
+    static_assert(IndirectAllocator<ValidateForwarding<0>>::alignment() ==
+                  ValidateForwarding<0>::alignment());
+    const auto allocator_handle = std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>();
+    IndirectAllocator allocator{allocator_handle};
+    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    EXPECT_TRUE(allocator_handle->owns(memory));
+    EXPECT_TRUE(allocator_handle->getAllocations().size() == 1);
+    allocator.deallocate(memory);
+    EXPECT_FALSE(allocator_handle->owns(memory));
+    EXPECT_TRUE(allocator_handle->getAllocations().size() == 0);
+}
+
+TEST(shared_memory_allocator_tests, policy_allocator_forwarding) {
+    // Test appropriate forwarding of allocator, deallocate
+    const auto primary_allocator =
+            std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("allocator");
+    PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<4096>{}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    EXPECT_TRUE(primary_allocator->owns(memory));
+    const auto& allocations = primary_allocator->getAllocations();
+    EXPECT_TRUE(allocations.size() == 1);
+    allocator.deallocate(memory);
+    EXPECT_TRUE(allocations.size() == 0);
+    const auto memory2 = allocator.allocate(NamedAllocRequest{{4096}, "allocation_2"});
+    EXPECT_TRUE(allocations.size() == 1);
+    EXPECT_TRUE(primary_allocator->owns(memory2));
+    allocator.deallocate(memory2);
+    EXPECT_FALSE(primary_allocator->owns(memory2));
+    EXPECT_TRUE(allocations.size() == 0);
+    // Test appropriate forwarding of own, dump, alignment, deallocate_all
+    PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<4096>{}};
+    EXPECT_TRUE(allocator2.owns(ValidateForwarding<0>::owned));
+    EXPECT_FALSE(allocator2.owns(ValidateForwarding<0>::not_owned));
+    EXPECT_TRUE(allocator2.dump().find(ValidateForwarding<0>::dump_string) != std::string::npos);
+    static_assert(decltype(allocator2)::alignment() == ValidateForwarding<0>::alignment());
+    size_t prev = ValidateForwarding<0>::deallocate_all_count;
+    allocator2.deallocate_all();
+    EXPECT_EQ(ValidateForwarding<0>::deallocate_all_count, prev + 1);
+}
+
+TEST(shared_memory_allocator_tests, snooping_allocator_nullptr) {
+    SnoopingAllocator allocator{PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<4096 * 2>{}}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    validate_block(memory);
+    ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{5000}, "allocation_2"}) == nullptr);
+    const auto& allocations = allocator.getAllocations();
+    EXPECT_EQ(allocations.size(), 1ul);
+    for (const auto& [key, val] : allocations) {
+        EXPECT_EQ(val.name, "allocation_1");
+        EXPECT_EQ(val.allocation_number, 0ul);
+        EXPECT_TRUE(key == memory);
+    }
+}
+
+TEST(shared_memory_allocator_tests, fallback_allocator) {
+    // Construct Fallback Allocator
+    const auto primary_allocator = std::make_shared<
+            SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>>>(
+            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>{}, "primary_allocator");
+    const auto secondary_allocator =
+            std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("secondary_allocator");
+
+    FallbackAllocator fallback_allocator{SnoopingAllocator{IndirectAllocator{primary_allocator}},
+                                         SnoopingAllocator{IndirectAllocator{secondary_allocator}}};
+    static_assert(decltype(fallback_allocator)::alignment() == 4096);
+    // Basic Allocation Test
+    const auto memory = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    validate_block(memory);
+    // Correct allocator selected
+    EXPECT_TRUE(fallback_allocator.owns(memory));
+    EXPECT_TRUE(primary_allocator->owns(memory));
+    EXPECT_FALSE(secondary_allocator->owns(memory));
+    // Test fallback allocation
+    const auto memory2 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_2"});
+    validate_block(memory2);
+    // Correct allocator selected
+    EXPECT_TRUE(fallback_allocator.owns(memory2));
+    EXPECT_FALSE(primary_allocator->owns(memory2));
+    EXPECT_TRUE(secondary_allocator->owns(memory2));
+    // Allocations ended up in the correct allocators
+    const auto& primary_allocations = primary_allocator->getAllocations();
+    EXPECT_TRUE(primary_allocations.size() == 1ul);
+    ASSERT_TRUE(primary_allocations.find(memory) != primary_allocations.end());
+    EXPECT_EQ(primary_allocations.find(memory)->second.name, std::string{"allocation_1"});
+    const auto& secondary_allocations = secondary_allocator->getAllocations();
+    EXPECT_TRUE(secondary_allocations.size() == 1ul);
+    ASSERT_TRUE(secondary_allocations.find(memory2) != secondary_allocations.end());
+    EXPECT_EQ(secondary_allocations.find(memory2)->second.name, std::string{"allocation_2"});
+    // Test deallocate appropriate forwarding
+    fallback_allocator.deallocate(memory);
+    EXPECT_TRUE(primary_allocator->getAllocations().size() == 0ul);
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+    // Appropriate fallback after deallocation
+    const auto memory3 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_3"});
+    EXPECT_TRUE(fallback_allocator.owns(memory3));
+    EXPECT_TRUE(primary_allocator->owns(memory3));
+    EXPECT_FALSE(secondary_allocator->owns(memory3));
+    EXPECT_TRUE(primary_allocator->getAllocations().size() == 1ul);
+    // Test deallocate appropriate forwarding
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+    fallback_allocator.deallocate(memory2);
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 0ul);
+    const auto memory4 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_4"});
+    EXPECT_TRUE(fallback_allocator.owns(memory4));
+    EXPECT_FALSE(primary_allocator->owns(memory4));
+    EXPECT_TRUE(secondary_allocator->owns(memory4));
+    // Allocations ended up in the correct allocators
+    EXPECT_TRUE(primary_allocator->getAllocations().size() == 1ul);
+    EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
+    ASSERT_TRUE(primary_allocations.find(memory3) != primary_allocations.end());
+    EXPECT_EQ(primary_allocations.find(memory3)->second.name, std::string{"allocation_3"});
+    ASSERT_TRUE(secondary_allocations.find(memory4) != secondary_allocations.end());
+    EXPECT_EQ(secondary_allocations.find(memory4)->second.name, std::string{"allocation_4"});
+}
+
+TEST(shared_memory_allocator_tests, fallback_allocator_forwarding) {
+    // Test forwarding
+    using Alloc1 = ValidateForwarding<0, false>;
+    using Alloc2 = ValidateForwarding<1, false>;
+    FallbackAllocator forward_test{Alloc1{}, Alloc2{}};
+    EXPECT_TRUE(forward_test.dump().find(Alloc1::dump_string) != std::string::npos);
+    EXPECT_TRUE(forward_test.dump().find(Alloc2::dump_string) != std::string::npos);
+    // Test owned forwarding
+    EXPECT_TRUE(forward_test.owns(Alloc1::owned));
+    EXPECT_TRUE(forward_test.owns(Alloc2::owned));
+    EXPECT_FALSE(forward_test.owns(Alloc1::not_owned));
+    EXPECT_FALSE(forward_test.owns(Alloc2::not_owned));
+    // Test alignment forwarding
+    static_assert(FallbackAllocator<Alloc1, Alloc2>::alignment() == Alloc1::alignment());
+    // Test deallocate_all forwarding
+    size_t prev1 = Alloc1::deallocate_all_count;
+    size_t prev2 = Alloc2::deallocate_all_count;
+    forward_test.deallocate_all();
+    EXPECT_EQ(prev1 + 1, Alloc1::deallocate_all_count);
+    EXPECT_EQ(prev2 + 1, Alloc2::deallocate_all_count);
+}
+
+TEST(shared_memory_allocator_tests, scoped_allocator) {
+    const auto underlying_allocator =
+            std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("Allocator");
+    ScopedAllocator allocator{underlying_allocator};
+    const auto& allocations = underlying_allocator->getAllocations();
+    {
+        decltype(allocator.allocate(NamedAllocRequest{})) copy;
+        {
+            EXPECT_EQ(allocations.size(), 0ul);
+            const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+            copy = memory;
+            EXPECT_EQ(allocations.size(), 1ul);
+            EXPECT_TRUE(allocator.owns(copy));
+            EXPECT_TRUE(allocator.owns(memory));
+        }
+        EXPECT_TRUE(allocator.owns(copy));
+        EXPECT_EQ(allocations.size(), 1ul);
+        for (const auto& [key, value] : allocations) {
+            EXPECT_EQ(value.name, std::string{"allocation_1"});
+        }
+    }
+    EXPECT_EQ(allocations.size(), 0ul);
+    // Test forwarding
+    static_assert(ScopedAllocator<ValidateForwarding<0>>::alignment() ==
+                  ValidateForwarding<0>::alignment());
+    ScopedAllocator<ValidateForwarding<0>> forwarding{};
+    EXPECT_EQ(forwarding.dump(), ValidateForwarding<0>::dump_string);
+}
diff --git a/media/utils/tests/static_string_view_tests.cpp b/media/utils/tests/static_string_view_tests.cpp
new file mode 100644
index 0000000..c00de68
--- /dev/null
+++ b/media/utils/tests/static_string_view_tests.cpp
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "StaticStringViewTests"
+
+#include <mediautils/StaticStringView.h>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android::mediautils;
+
+template <auto& T, class = void>
+struct CanCreate : std::false_type {};
+
+template <auto& T>
+struct CanCreate<T, typename std::void_t<decltype(StaticStringView::create<T>)>> : std::true_type {
+};
+
+static constexpr std::array<char, 2> global = {'a', 'b'};
+
+TEST(StaticStringViewTests, CreateTicket) {
+    // This will always fail due to template param binding rules
+    // const std::array<char,2> nonstatic = {'a', 'b'};
+    // static_assert(can_assign<nonstatic>::value == false);
+    static std::array<char, 2> nonconst = {'a', 'b'};
+    static const std::array<char, 2> nonconstexpr = {'a', 'b'};
+    static constexpr std::array<int, 2> nonchar = {1, 2};
+    static constexpr size_t nonarray = 2;
+
+    static_assert(CanCreate<nonconst>::value == false);
+    static_assert(CanCreate<nonarray>::value == false);
+    static_assert(CanCreate<nonchar>::value == false);
+    static_assert(CanCreate<nonconstexpr>::value == false);
+
+    static constexpr std::array<char, 2> scoped = {'a', 'b'};
+    constexpr StaticStringView Ticket1 = StaticStringView::create<global>();
+    constexpr StaticStringView Ticket2 = StaticStringView::create<scoped>();
+    const StaticStringView Ticket3 = StaticStringView::create<scoped>();
+    EXPECT_EQ(Ticket3, Ticket2);
+    EXPECT_EQ(Ticket1.getStringView(), Ticket2.getStringView());
+    EXPECT_EQ(std::string_view{"ab"}, Ticket1.getStringView());
+}
+TEST(StaticStringViewTests, CompileTimeConvert) {
+    static constexpr std::array<char, 4> converted = StaticStringView::toStdArray("test");
+    constexpr StaticStringView ticket = StaticStringView::create<converted>();
+    EXPECT_EQ(ticket, std::string_view{"test"});
+    // Unchecked constexpr construction
+    static const std::array<char, 5> converted2 = StaticStringView::toStdArray("test2");
+    constexpr auto ticket2 = StaticStringView::create<converted2, false>();
+    EXPECT_EQ(ticket2, std::string_view{"test2"});
+    constexpr char stack_array[4] = {'a', 'b', 'c', '\0'};
+    static constexpr auto converted3 = StaticStringView::toStdArray(stack_array);
+    constexpr auto ticket3 = StaticStringView::create<converted3>();
+    EXPECT_EQ(ticket3, std::string_view{"abc"});
+}
+
+TEST(StaticStringViewTests, CompileTimeConcat) {
+    // temporaries should not be static to prevent odr use
+    constexpr std::array<char, 3> arr1 = {'a', 'b', 'c'};
+    constexpr std::array<char, 4> arr2 = {'d', 'e', 'f', 'g'};
+    static constexpr std::array<char, 7> res = StaticStringView::concatArray(arr1, arr2);
+    static constexpr std::array<char, 7> expected = {'a', 'b', 'c', 'd', 'e', 'f', 'g'};
+    EXPECT_EQ(res, expected);
+}
+
+TEST(StaticStringViewTests, StringViewForwarding) {
+    static constexpr auto converted = StaticStringView::toStdArray("test");
+    constexpr auto ticket = StaticStringView::create<converted>();
+    EXPECT_EQ(ticket.length(), ticket.getStringView().length());
+    EXPECT_TRUE(ticket == ticket.getStringView());
+    EXPECT_TRUE(ticket == ticket);
+    EXPECT_TRUE(ticket.getStringView() == ticket);
+    EXPECT_TRUE(ticket > "abc");
+    EXPECT_TRUE("abc" < ticket);
+}
diff --git a/media/utils/tests/timecheck_tests.cpp b/media/utils/tests/timecheck_tests.cpp
index 6ebf44d..bd91efa 100644
--- a/media/utils/tests/timecheck_tests.cpp
+++ b/media/utils/tests/timecheck_tests.cpp
@@ -26,7 +26,6 @@
 using namespace std::chrono_literals;
 
 namespace {
-
 TEST(timecheck_tests, success) {
     bool timeoutRegistered = false;
     float elapsedMsRegistered = 0.f;
@@ -39,7 +38,7 @@
             timeoutRegistered = timeout;
             elapsedMsRegistered = elapsedMs;
             event = true;
-        }, 1000 /* msec */, false /* crash */);
+        }, 1000ms /* timeoutDuration */, {} /* secondChanceDuration */, false /* crash */);
     }
     ASSERT_TRUE(event);
     ASSERT_FALSE(timeoutRegistered);
@@ -58,7 +57,7 @@
             timeoutRegistered = timeout;
             elapsedMsRegistered = elapsedMs;
             event = true; // store-release, must be last.
-        }, 1 /* msec */, false /* crash */);
+        }, 1ms /* timeoutDuration */, {} /* secondChanceDuration */, false /* crash */);
         std::this_thread::sleep_for(100ms);
     }
     ASSERT_TRUE(event); // load-acquire, must be first.
@@ -69,4 +68,33 @@
 // Note: We do not test TimeCheck crash because TimeCheck is multithreaded and the
 // EXPECT_EXIT() signal catching is imperfect due to the gtest fork.
 
-} // namespace
\ No newline at end of file
+// Note, the following test is to manually verify the correct thread is aborted.
+// Due to difficulties with gtest and EXPECT_EXIT, this is difficult to verify
+// automatically. TODO(b/246446561) Attempt to use EXPECT_EXIT
+
+#if 0
+void threadFunction() {
+    bool timeoutRegistered = false;
+    float elapsedMsRegistered = 0.f;
+    std::atomic_bool event = false;  // seq-cst implies acquire-release
+    {
+        TimeCheck timeCheck("timeout",
+                [&event, &timeoutRegistered, &elapsedMsRegistered]
+                        (bool timeout, float elapsedMs) {
+            timeoutRegistered = timeout;
+            elapsedMsRegistered = elapsedMs;
+            event = true; // store-release, must be last.
+        }, 1ms /* timeoutDuration */, {} /* secondChanceDuration */, true /* crash */);
+        std::this_thread::sleep_for(100ms);
+        ADD_FAILURE();
+    }
+}
+
+TEST(timecheck_tests, death) {
+  std::thread mthread{threadFunction};
+  mthread.join();
+}
+#endif
+
+} // namespace
+
diff --git a/services/OWNERS b/services/OWNERS
deleted file mode 100644
index 17e605d..0000000
--- a/services/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-elaurent@google.com
-essick@google.com
-etalvala@google.com
-hunga@google.com
-nchalko@google.com
-quxiangfang@google.com
diff --git a/services/audioflinger/AllocatorFactory.h b/services/audioflinger/AllocatorFactory.h
new file mode 100644
index 0000000..7534607
--- /dev/null
+++ b/services/audioflinger/AllocatorFactory.h
@@ -0,0 +1,95 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#include <mediautils/SharedMemoryAllocator.h>
+
+#pragma once
+
+// TODO how do we appropriately restrict visibility of this header?
+// It should only be included in AudioFlinger.h
+// We will make everything internal linkage for now.
+namespace android {
+namespace AllocatorFactory {
+namespace {
+// TODO make sure these are appropriate
+constexpr inline size_t MAX_MEMORY_SIZE = 1024 * 1024 * 100;                  // 100 MiB
+constexpr inline size_t DED_SIZE = (MAX_MEMORY_SIZE * 4) / 10;                // 40 MiB
+constexpr inline size_t SHARED_SIZE = MAX_MEMORY_SIZE - DED_SIZE;             // 60 MiB
+constexpr inline size_t SHARED_SIZE_LARGE = (SHARED_SIZE * 4) / 6;            // 40 MiB
+constexpr inline size_t SHARED_SIZE_SMALL = SHARED_SIZE - SHARED_SIZE_LARGE;  // 20 MiB
+constexpr inline size_t SMALL_THRESHOLD = 1024 * 40;                          // 40 KiB
+
+inline auto getDedicated() {
+    using namespace mediautils;
+    static const auto allocator =
+            std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<DED_SIZE>>>();
+    return allocator;
+}
+
+inline auto getSharedLarge() {
+    using namespace mediautils;
+    static const auto allocator = std::make_shared<
+            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<SHARED_SIZE_LARGE>>>();
+    return allocator;
+}
+
+inline auto getSharedSmall() {
+    using namespace mediautils;
+    static const auto allocator =
+            std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator,
+                                             SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>>();
+    return allocator;
+}
+
+template <typename Policy, typename Allocator>
+inline auto wrapWithPolicySnooping(Allocator allocator, std::string_view name) {
+    using namespace mediautils;
+    return SnoopingAllocator{PolicyAllocator{IndirectAllocator{allocator}, Policy{}}, name};
+}
+
+// A reasonable upper bound on how many clients we expect, and how many pieces to slice
+// the dedicate pool.
+constexpr inline size_t CLIENT_BOUND = 32;
+// Maximum amount of shared pools a single client can take (50%).
+constexpr inline size_t ADV_THRESHOLD_INV = 2;
+
+inline auto getClientAllocator() {
+    using namespace mediautils;
+    const auto makeDedPool = []() {
+        return wrapWithPolicySnooping<SizePolicy<DED_SIZE / CLIENT_BOUND>>(getDedicated(),
+                                                                           "Dedicated Pool");
+    };
+    const auto makeLargeShared = []() {
+        return wrapWithPolicySnooping<SizePolicy<SHARED_SIZE_LARGE / ADV_THRESHOLD_INV>>(
+                getSharedLarge(), "Large Shared");
+    };
+    const auto makeSmallShared = []() {
+        return wrapWithPolicySnooping<
+                SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
+                getSharedSmall(), "Small Shared");
+    };
+
+    return ScopedAllocator{std::make_shared<
+            FallbackAllocator<decltype(makeDedPool()),
+                              decltype(FallbackAllocator(makeLargeShared(), makeSmallShared()))>>(
+            makeDedPool(), FallbackAllocator{makeLargeShared(), makeSmallShared()})};
+}
+
+using ClientAllocator = decltype(getClientAllocator());
+}  // namespace
+}  // namespace AllocatorFactory
+}  // namespace android
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 763c070..663df69 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -19,9 +19,130 @@
     ],
 }
 
+tidy_errors = [
+    // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+    // For many categories, the checks are too many to specify individually.
+    // Feel free to disable as needed - as warnings are generally ignored,
+    // we treat warnings as errors.
+    "android-*",
+    "bugprone-*",
+    "cert-*",
+    "clang-analyzer-security*",
+    "google-*",
+    "misc-*",
+    //"modernize-*",  // explicitly list the modernize as they can be subjective.
+    "modernize-avoid-bind",
+    //"modernize-avoid-c-arrays", // std::array<> can be verbose
+    "modernize-concat-nested-namespaces",
+    //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+    "modernize-deprecated-ios-base-aliases",
+    "modernize-loop-convert",
+    "modernize-make-shared",
+    "modernize-make-unique",
+    // "modernize-pass-by-value",
+    "modernize-raw-string-literal",
+    "modernize-redundant-void-arg",
+    "modernize-replace-auto-ptr",
+    "modernize-replace-random-shuffle",
+    "modernize-return-braced-init-list",
+    "modernize-shrink-to-fit",
+    "modernize-unary-static-assert",
+    // "modernize-use-auto",  // found in MediaMetricsService.h, debatable - auto can obscure type
+    "modernize-use-bool-literals",
+    "modernize-use-default-member-init",
+    "modernize-use-emplace",
+    "modernize-use-equals-default",
+    "modernize-use-equals-delete",
+    // "modernize-use-nodiscard",
+    "modernize-use-noexcept",
+    "modernize-use-nullptr",
+    "modernize-use-override",
+    //"modernize-use-trailing-return-type", // not necessarily more readable
+    "modernize-use-transparent-functors",
+    "modernize-use-uncaught-exceptions",
+    "modernize-use-using",
+    "performance-*",
+
+    // Remove some pedantic stylistic requirements.
+    "-google-readability-casting", // C++ casts not always necessary and may be verbose
+    "-google-readability-todo",    // do not require TODO(info)
+
+    "-bugprone-unhandled-self-assignment",
+    "-bugprone-suspicious-string-compare",
+    "-cert-oop54-cpp", // found in TransactionLog.h
+    "-bugprone-narrowing-conversions", // b/182410845
+
+    // TODO(b/275642749) Reenable these warnings
+    "-bugprone-assignment-in-if-condition",
+    "-bugprone-forward-declaration-namespace",
+    "-bugprone-parent-virtual-call",
+    "-cert-dcl59-cpp",
+    "-cert-err34-c",
+    "-google-build-namespaces",
+    "-google-build-using-namespace",
+    "-google-default-arguments",
+    "-google-runtime-int",
+    "-misc-const-correctness",
+    "-misc-non-private-member-variables-in-classes",
+    "-modernize-concat-nested-namespaces",
+    "-modernize-loop-convert",
+    "-modernize-use-default-member-init",
+    "-modernize-use-equals-default",
+    "-modernize-use-nullptr",
+    "-modernize-use-override",
+    "-modernize-use-using",
+    "-performance-no-int-to-ptr",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+    name: "audioflinger_flags_defaults",
+    // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+    // https://clang.llvm.org/docs/DiagnosticsReference.html
+    cflags: [
+        "-Wall",
+        "-Wdeprecated",
+        "-Werror",
+        "-Werror=implicit-fallthrough",
+        "-Werror=sometimes-uninitialized",
+        "-Werror=conditional-uninitialized",
+        "-Wextra",
+
+        // suppress some warning chatter.
+        "-Wno-deprecated-copy-with-dtor",
+        "-Wno-deprecated-copy-with-user-provided-dtor",
+
+        "-Wredundant-decls",
+        "-Wshadow",
+        "-Wstrict-aliasing",
+        "-fstrict-aliasing",
+        "-Wthread-safety",
+        //"-Wthread-safety-negative", // experimental - looks broken in R.
+        "-Wunreachable-code",
+        "-Wunreachable-code-break",
+        "-Wunreachable-code-return",
+        "-Wunused",
+        "-Wused-but-marked-unused",
+        "-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS",
+    ],
+    // https://clang.llvm.org/extra/clang-tidy/
+    tidy: true,
+    tidy_checks: tidy_errors,
+    tidy_checks_as_errors: tidy_errors,
+    tidy_flags: [
+      "-format-style=file",
+    ],
+}
+
 cc_library_shared {
     name: "libaudioflinger",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "audioflinger_flags_defaults",
+    ],
+
     srcs: [
         "AudioFlinger.cpp",
         "AudioHwDevice.cpp",
@@ -39,7 +160,9 @@
         "FastThread.cpp",
         "FastThreadDumpState.cpp",
         "FastThreadState.cpp",
+        "MelReporter.cpp",
         "NBAIO_Tee.cpp",
+        "PatchCommandThread.cpp",
         "PatchPanel.cpp",
         "PropertyUtils.cpp",
         "SpdifStreamOut.cpp",
@@ -55,12 +178,13 @@
     ],
 
     shared_libs: [
-        "android.media.audio.common.types-V1-cpp",
         "audioflinger-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
         "effect-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "libactivitymanager_aidl",
+        "libaudioflinger_timing",
         "libaudiofoundation",
         "libaudiohal",
         "libaudioprocessing",
@@ -70,7 +194,9 @@
         "libutils",
         "liblog",
         "libbinder",
+        "libbinder_ndk",
         "libaudioclient",
+        "libaudiomanager",
         "libmedialogservice",
         "libmediametrics",
         "libmediautils",
@@ -78,10 +204,10 @@
         "libnblog",
         "libpermission",
         "libpowermanager",
-        "libmediautils",
         "libmemunreachable",
         "libmedia_helper",
         "libshmemcompat",
+        "libsounddose",
         "libvibrator",
         "packagemanager_aidl-cpp",
     ],
@@ -96,11 +222,13 @@
         "libaaudio_headers",
         "libaudioclient_headers",
         "libaudiohal_headers",
+        "libaudioutils_headers",
         "libmedia_headers",
     ],
 
     export_shared_lib_headers: [
         "libpermission",
+        "android.hardware.audio.core.sounddose-V1-ndk",
     ],
 
     cflags: [
@@ -114,3 +242,8 @@
     },
 
 }
+
+cc_library_headers {
+    name: "libaudioflinger_headers",
+    export_include_dirs: ["."],
+}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f7576f6..325adfa 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -39,6 +39,7 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <binder/Parcel.h>
+#include <media/audiohal/AudioHalVersionInfo.h>
 #include <media/audiohal/DeviceHalInterface.h>
 #include <media/audiohal/DevicesFactoryHalInterface.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -54,9 +55,10 @@
 #include <cutils/properties.h>
 
 #include <system/audio.h>
-#include <audiomanager/AudioManager.h>
+#include <audiomanager/IAudioManager.h>
 
 #include "AudioFlinger.h"
+#include "EffectConfiguration.h"
 #include "NBAIO_Tee.h"
 #include "PropertyUtils.h"
 
@@ -106,19 +108,23 @@
 
 namespace android {
 
-#define MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION 7.1
-
 using ::android::base::StringPrintf;
 using media::IEffectClient;
 using media::audio::common::AudioMMapPolicyInfo;
 using media::audio::common::AudioMMapPolicyType;
+using media::audio::common::AudioMode;
 using android::content::AttributionSourceState;
+using android::detail::AudioHalVersionInfo;
 
-static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
-static const char kHardwareLockedString[] = "Hardware lock is taken\n";
-static const char kClientLockedString[] = "Client lock is taken\n";
-static const char kNoEffectsFactory[] = "Effects Factory is absent\n";
+static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
+        AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
 
+static constexpr char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
+static constexpr char kHardwareLockedString[] = "Hardware lock is taken\n";
+static constexpr char kClientLockedString[] = "Client lock is taken\n";
+static constexpr char kNoEffectsFactory[] = "Effects Factory is absent\n";
+
+static constexpr char kAudioServiceName[] = "audio";
 
 nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs;
 
@@ -190,7 +196,6 @@
 BINDER_METHOD_ENTRY(restoreOutput) \
 BINDER_METHOD_ENTRY(openInput) \
 BINDER_METHOD_ENTRY(closeInput) \
-BINDER_METHOD_ENTRY(invalidateStream) \
 BINDER_METHOD_ENTRY(setVoiceVolume) \
 BINDER_METHOD_ENTRY(getRenderPosition) \
 BINDER_METHOD_ENTRY(getInputFramesLost) \
@@ -226,6 +231,14 @@
 BINDER_METHOD_ENTRY(getAAudioMixerBurstCount) \
 BINDER_METHOD_ENTRY(getAAudioHardwareBurstMinUsec) \
 BINDER_METHOD_ENTRY(setDeviceConnectedState) \
+BINDER_METHOD_ENTRY(setSimulateDeviceConnections) \
+BINDER_METHOD_ENTRY(setRequestedLatencyMode) \
+BINDER_METHOD_ENTRY(getSupportedLatencyModes) \
+BINDER_METHOD_ENTRY(setBluetoothVariableLatencyEnabled) \
+BINDER_METHOD_ENTRY(isBluetoothVariableLatencyEnabled) \
+BINDER_METHOD_ENTRY(supportsBluetoothVariableLatency) \
+BINDER_METHOD_ENTRY(getSoundDoseInterface) \
+BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
 
 // singleton for Binder Method Statistics for IAudioFlinger
 static auto& getIAudioFlingerStatistics() {
@@ -280,7 +293,6 @@
                 return opPackageLegacy == package; }) == packages.end()) {
             ALOGW("The package name(%s) provided does not correspond to the uid %d",
                     attributionSource.packageName.value_or("").c_str(), attributionSource.uid);
-            checkedAttributionSource.packageName = std::optional<std::string>();
         }
     }
     return checkedAttributionSource;
@@ -319,8 +331,11 @@
       mClientSharedHeapSize(kMinimumClientSharedHeapSizeBytes),
       mGlobalEffectEnableTime(0),
       mPatchPanel(this),
-      mDeviceEffectManager(this),
-      mSystemReady(false)
+      mPatchCommandThread(sp<PatchCommandThread>::make()),
+      mDeviceEffectManager(sp<DeviceEffectManager>::make(*this)),
+      mMelReporter(sp<MelReporter>::make(*this)),
+      mSystemReady(false),
+      mBluetoothLatencyModesEnabled(true)
 {
     // Move the audio session unique ID generator start base as time passes to limit risk of
     // generating the same ID again after an audioserver restart.
@@ -358,7 +373,7 @@
     BatteryNotifier::getInstance().noteResetAudio();
 
     mDevicesFactoryHal = DevicesFactoryHalInterface::create();
-    mEffectsFactoryHal = EffectsFactoryHalInterface::create();
+    mEffectsFactoryHal = audioflinger::EffectConfiguration::getEffectsFactoryHal();
 
     mMediaLogNotifier->run("MediaLogNotifier");
     std::vector<pid_t> halPids;
@@ -396,7 +411,7 @@
     mDevicesFactoryHalCallback = new DevicesFactoryHalCallbackImpl;
     mDevicesFactoryHal->setCallbackOnce(mDevicesFactoryHalCallback);
 
-    if (mDevicesFactoryHal->getHalVersion() <= MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION) {
+    if (mDevicesFactoryHal->getHalVersion() <= kMaxAAudioPropertyDeviceHalVersion) {
         mAAudioBurstsPerBuffer = getAAudioMixerBurstCountFromSystemProperty();
         mAAudioHwBurstMinMicros = getAAudioHardwareBurstMinUsecFromSystemProperty();
     }
@@ -442,7 +457,7 @@
         *policyInfos = it->second;
         return NO_ERROR;
     }
-    if (mDevicesFactoryHal->getHalVersion() > MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION) {
+    if (mDevicesFactoryHal->getHalVersion() > kMaxAAudioPropertyDeviceHalVersion) {
         AutoMutex lock(mHardwareLock);
         for (size_t i = 0; i < mAudioHwDevs.size(); ++i) {
             AudioHwDevice *dev = mAudioHwDevs.valueAt(i);
@@ -473,14 +488,17 @@
     return mAAudioHwBurstMinMicros;
 }
 
-status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) {
+status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port,
+                                               media::DeviceConnectedState state) {
     status_t final_result = NO_INIT;
     Mutex::Autolock _l(mLock);
     AutoMutex lock(mHardwareLock);
     mHardwareStatus = AUDIO_HW_SET_CONNECTED_STATE;
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
-        status_t result = dev->setConnectedState(port, connected);
+        status_t result = state == media::DeviceConnectedState::PREPARE_TO_DISCONNECT
+                ? dev->prepareToDisconnectExternalDevice(port)
+                : dev->setConnectedState(port, state == media::DeviceConnectedState::CONNECTED);
         // Same logic as with setParameter: it's a success if at least one
         // HAL module accepts the update.
         if (final_result != NO_ERROR) {
@@ -491,6 +509,25 @@
     return final_result;
 }
 
+status_t AudioFlinger::setSimulateDeviceConnections(bool enabled) {
+    bool at_least_one_succeeded = false;
+    status_t last_error = INVALID_OPERATION;
+    Mutex::Autolock _l(mLock);
+    AutoMutex lock(mHardwareLock);
+    mHardwareStatus = AUDIO_HW_SET_SIMULATE_CONNECTIONS;
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+        status_t result = dev->setSimulateDeviceConnections(enabled);
+        if (result == OK) {
+            at_least_one_succeeded = true;
+        } else {
+            last_error = result;
+        }
+    }
+    mHardwareStatus = AUDIO_HW_IDLE;
+    return at_least_one_succeeded ? OK : last_error;
+}
+
 // getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
 std::optional<media::AudioVibratorInfo> AudioFlinger::getDefaultVibratorInfo_l() {
     if (mAudioVibratorInfos.empty()) {
@@ -579,6 +616,33 @@
     audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     audio_attributes_t localAttr = *attr;
+
+    // TODO b/182392553: refactor or make clearer
+    pid_t clientPid =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+    bool updatePid = (clientPid == (pid_t)-1);
+    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+
+    AttributionSourceState adjAttributionSource = client.attributionSource;
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+        uid_t clientUid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+        ALOGW_IF(clientUid != callingUid,
+                "%s uid %d tried to pass itself off as %d",
+                __FUNCTION__, callingUid, clientUid);
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+        updatePid = true;
+    }
+    if (updatePid) {
+        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                 "%s uid %d pid %d tried to pass itself off as pid %d",
+                 __func__, callingUid, callingPid, clientPid);
+        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
+    }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
+
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
         fullConfig.sample_rate = config->sample_rate;
@@ -586,20 +650,27 @@
         fullConfig.format = config->format;
         std::vector<audio_io_handle_t> secondaryOutputs;
         bool isSpatialized;
+        bool isBitPerfect;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
-                                            &streamType, client.attributionSource,
+                                            &streamType, adjAttributionSource,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
-                                            deviceId, &portId, &secondaryOutputs, &isSpatialized);
+                                            deviceId, &portId, &secondaryOutputs, &isSpatialized,
+                                            &isBitPerfect);
+        if (ret != NO_ERROR) {
+            config->sample_rate = fullConfig.sample_rate;
+            config->channel_mask = fullConfig.channel_mask;
+            config->format = fullConfig.format;
+        }
         ALOGW_IF(!secondaryOutputs.empty(),
                  "%s does not support secondary outputs, ignoring them", __func__);
     } else {
         ret = AudioSystem::getInputForAttr(&localAttr, &io,
                                               RECORD_RIID_INVALID,
                                               actualSessionId,
-                                              client.attributionSource,
+                                              adjAttributionSource,
                                               config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
     }
@@ -633,33 +704,35 @@
 }
 
 /* static */
-int AudioFlinger::onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
+os::HapticScale AudioFlinger::onExternalVibrationStart(
+        const sp<os::ExternalVibration>& externalVibration) {
     sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != nullptr) {
         int32_t ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
             ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
-            return ret;
+            return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
         }
     }
     ALOGD("%s, start external vibration with intensity as MUTE due to %s",
             __func__,
             evs == nullptr ? "external vibration service not found"
                            : "error when querying intensity");
-    return static_cast<int>(os::HapticScale::MUTE);
+    return os::HapticScale::MUTE;
 }
 
 /* static */
 void AudioFlinger::onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration) {
     sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != 0) {
+        ALOGD("%s, stopping external vibration", __func__);
         evs->onExternalVibrationStop(*externalVibration);
     }
 }
 
 status_t AudioFlinger::addEffectToHal(audio_port_handle_t deviceId,
-        audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+        audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
     AutoMutex lock(mHardwareLock);
     AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
     if (audioHwDevice == nullptr) {
@@ -669,7 +742,7 @@
 }
 
 status_t AudioFlinger::removeEffectFromHal(audio_port_handle_t deviceId,
-        audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+        audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
     AutoMutex lock(mHardwareLock);
     AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
     if (audioHwDevice == nullptr) {
@@ -721,15 +794,14 @@
 {
     String8 result;
 
-    result.append("Clients:\n");
-    result.append("   pid    heap_size\n");
+    result.append("Client Allocators:\n");
     for (size_t i = 0; i < mClients.size(); ++i) {
         sp<Client> client = mClients.valueAt(i).promote();
         if (client != 0) {
-            result.appendFormat("%6d %12zu\n", client->pid(),
-                    client->heap()->getMemoryHeap()->getSize());
+          result.appendFormat("Client: %d\n", client->pid());
+          result.append(client->allocator().dump().c_str());
         }
-    }
+   }
 
     result.append("Notification Clients:\n");
     result.append("   pid    uid  name\n");
@@ -765,6 +837,13 @@
                             (uint32_t)(mStandbyTimeInNsecs / 1000000));
     result.append(buffer);
     write(fd, result.string(), result.size());
+
+    dprintf(fd, "Vibrator infos(size=%zu):\n", mAudioVibratorInfos.size());
+    for (const auto& vibratorInfo : mAudioVibratorInfos) {
+        dprintf(fd, "  - %s\n", vibratorInfo.toString().c_str());
+    }
+    dprintf(fd, "Bluetooth latency modes are %senabled\n",
+            mBluetoothLatencyModesEnabled ? "" : "not ");
 }
 
 void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args __unused)
@@ -787,6 +866,7 @@
 }
 
 status_t AudioFlinger::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     if (!dumpAllowed()) {
         dumpPermissionDenial(fd, args);
@@ -858,7 +938,10 @@
 
         mPatchPanel.dump(fd);
 
-        mDeviceEffectManager.dump(fd);
+        mDeviceEffectManager->dump(fd);
+
+        std::string melOutput = mMelReporter->dump();
+        write(fd, melOutput.c_str(), melOutput.size());
 
         // dump external setParameters
         auto dumpLogger = [fd](SimpleLog& logger, const char* name) {
@@ -889,7 +972,6 @@
         // to lookup the service if it's not running, as it will block for a second
         if (sMediaLogServiceAsBinder != 0) {
             dprintf(fd, "\nmedia.log:\n");
-            Vector<String16> args;
             sMediaLogServiceAsBinder->dump(fd, args);
         }
 
@@ -1034,7 +1116,8 @@
     audio_stream_type_t streamType;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     std::vector<audio_io_handle_t> secondaryOutputs;
-    bool isSpatialized = false;;
+    bool isSpatialized = false;
+    bool isBitPerfect = false;
 
     // TODO b/182392553: refactor or make clearer
     pid_t clientPid =
@@ -1048,7 +1131,7 @@
     audio_attributes_t localAttr = input.attr;
 
     AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         ALOGW_IF(clientUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, clientUid);
@@ -1064,6 +1147,8 @@
         clientPid = callingPid;
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
 
     audio_session_t sessionId = input.sessionId;
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -1079,7 +1164,7 @@
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
                                             adjAttributionSource, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs,
-                                            &isSpatialized);
+                                            &isSpatialized, &isBitPerfect);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1145,31 +1230,34 @@
                                       input.notificationsPerBuffer, input.speed,
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
-                                      &lStatus, portId, input.audioTrackCallback, isSpatialized);
+                                      &lStatus, portId, input.audioTrackCallback, isSpatialized,
+                                      isBitPerfect);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
         output.afFrameCount = thread->frameCount();
         output.afSampleRate = thread->sampleRate();
+        output.afChannelMask = static_cast<audio_channel_mask_t>(thread->channelMask() |
+                                                                 thread->hapticChannelMask());
+        output.afFormat = thread->format();
         output.afLatencyMs = thread->latency();
         output.portId = portId;
 
         if (lStatus == NO_ERROR) {
+            // no risk of deadlock because AudioFlinger::mLock is held
+            Mutex::Autolock _dl(thread->mLock);
             // Connect secondary outputs. Failure on a secondary output must not imped the primary
             // Any secondary output setup failure will lead to a desync between the AP and AF until
             // the track is destroyed.
             updateSecondaryOutputsForTrack_l(track.get(), thread, secondaryOutputs);
-        }
-
-        // move effect chain to this output thread if an effect on same session was waiting
-        // for a track to be created
-        if (lStatus == NO_ERROR && effectThread != NULL) {
-            // no risk of deadlock because AudioFlinger::mLock is held
-            Mutex::Autolock _dl(thread->mLock);
-            Mutex::Autolock _sl(effectThread->mLock);
-            if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
-                effectThreadId = thread->id();
-                effectIds = thread->getEffectIds_l(sessionId);
+            // move effect chain to this output thread if an effect on same session was waiting
+            // for a track to be created
+            if (effectThread != nullptr) {
+                Mutex::Autolock _sl(effectThread->mLock);
+                if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
+                    effectThreadId = thread->id();
+                    effectIds = thread->getEffectIds_l(sessionId);
+                }
             }
         }
 
@@ -1384,8 +1472,9 @@
     if (NO_ERROR == ret) {
         Mutex::Autolock _l(mLock);
         mMode = mode;
-        for (size_t i = 0; i < mPlaybackThreads.size(); i++)
+        for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
             mPlaybackThreads.valueAt(i)->setMode(mode);
+        }
     }
 
     mediametrics::LogItem(mMetricsId)
@@ -1583,6 +1672,80 @@
     return NO_ERROR;
 }
 
+status_t AudioFlinger::setRequestedLatencyMode(
+        audio_io_handle_t output, audio_latency_mode_t mode) {
+    if (output == AUDIO_IO_HANDLE_NONE) {
+        return BAD_VALUE;
+    }
+    AutoMutex lock(mLock);
+    PlaybackThread *thread = checkPlaybackThread_l(output);
+    if (thread == nullptr) {
+        return BAD_VALUE;
+    }
+    return thread->setRequestedLatencyMode(mode);
+}
+
+status_t AudioFlinger::getSupportedLatencyModes(audio_io_handle_t output,
+            std::vector<audio_latency_mode_t>* modes) {
+    if (output == AUDIO_IO_HANDLE_NONE) {
+        return BAD_VALUE;
+    }
+    AutoMutex lock(mLock);
+    PlaybackThread *thread = checkPlaybackThread_l(output);
+    if (thread == nullptr) {
+        return BAD_VALUE;
+    }
+    return thread->getSupportedLatencyModes(modes);
+}
+
+status_t AudioFlinger::setBluetoothVariableLatencyEnabled(bool enabled) {
+    Mutex::Autolock _l(mLock);
+    status_t status = INVALID_OPERATION;
+    for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+        // Success if at least one PlaybackThread supports Bluetooth latency modes
+        if (mPlaybackThreads.valueAt(i)->setBluetoothVariableLatencyEnabled(enabled) == NO_ERROR) {
+            status = NO_ERROR;
+        }
+    }
+    if (status == NO_ERROR) {
+        mBluetoothLatencyModesEnabled.store(enabled);
+    }
+    return status;
+}
+
+status_t AudioFlinger::isBluetoothVariableLatencyEnabled(bool *enabled) {
+    if (enabled == nullptr) {
+        return BAD_VALUE;
+    }
+    *enabled = mBluetoothLatencyModesEnabled.load();
+    return NO_ERROR;
+}
+
+status_t AudioFlinger::supportsBluetoothVariableLatency(bool* support) {
+    if (support == nullptr) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    *support = false;
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        if (mAudioHwDevs.valueAt(i)->supportsBluetoothVariableLatency()) {
+             *support = true;
+             break;
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t AudioFlinger::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                             sp<media::ISoundDose>* soundDose) {
+    if (soundDose == nullptr) {
+        return BAD_VALUE;
+    }
+
+    *soundDose = mMelReporter->getSoundDoseInterface(callback);
+    return NO_ERROR;
+}
+
 status_t AudioFlinger::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     // check calling permissions
@@ -1659,7 +1822,7 @@
 // forwardAudioHwSyncToDownstreamPatches_l() must be called with AudioFlinger::mLock held
 void AudioFlinger::forwardParametersToDownstreamPatches_l(
         audio_io_handle_t upStream, const String8& keyValuePairs,
-        std::function<bool(const sp<PlaybackThread>&)> useThread)
+        const std::function<bool(const sp<PlaybackThread>&)>& useThread)
 {
     std::vector<PatchPanel::SoftwarePatch> swPatches;
     if (mPatchPanel.getDownstreamSoftwarePatches(upStream, &swPatches) != OK) return;
@@ -1675,7 +1838,7 @@
 
 // Update downstream patches for all playback threads attached to an MSD module
 void AudioFlinger::updateDownStreamPatches_l(const struct audio_patch *patch,
-                                             const std::set<audio_io_handle_t> streams)
+                                             const std::set<audio_io_handle_t>& streams)
 {
     for (const audio_io_handle_t stream : streams) {
         PlaybackThread *playbackThread = checkPlaybackThread_l(stream);
@@ -1705,6 +1868,8 @@
         String8(AudioParameter::keyStreamSupportedFormats),
         String8(AudioParameter::keyStreamSupportedChannels),
         String8(AudioParameter::keyStreamSupportedSamplingRates),
+        String8(AudioParameter::keyClosing),
+        String8(AudioParameter::keyExiting),
     };
 
     if (isAudioServerUid(callingUid)) {
@@ -1881,24 +2046,29 @@
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
     sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
+
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
-    if (channelMask != AUDIO_CHANNEL_IN_MONO)
+    if (channelMask != AUDIO_CHANNEL_IN_MONO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
-    if (channelMask != AUDIO_CHANNEL_IN_STEREO)
+    }
+    if (channelMask != AUDIO_CHANNEL_IN_STEREO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_STEREO);
+    }
 
     std::vector<audio_format_t> formats = {format};
-    if (format != AUDIO_FORMAT_PCM_16_BIT)
+    if (format != AUDIO_FORMAT_PCM_16_BIT) {
         formats.push_back(AUDIO_FORMAT_PCM_16_BIT);
+    }
 
     std::vector<uint32_t> sampleRates = {sampleRate};
     static const uint32_t SR_44100 = 44100;
     static const uint32_t SR_48000 = 48000;
-
-    if (sampleRate != SR_48000)
+    if (sampleRate != SR_48000) {
         sampleRates.push_back(SR_48000);
-    if (sampleRate != SR_44100)
+    }
+    if (sampleRate != SR_44100) {
         sampleRates.push_back(SR_44100);
+    }
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
@@ -2082,6 +2252,21 @@
     }
 }
 
+void AudioFlinger::onSupportedLatencyModesChanged(
+        audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) {
+    int32_t outputAidl = VALUE_OR_FATAL(legacy2aidl_audio_io_handle_t_int32_t(output));
+    std::vector<media::audio::common::AudioLatencyMode> modesAidl = VALUE_OR_FATAL(
+                convertContainer<std::vector<media::audio::common::AudioLatencyMode>>(
+                        modes, legacy2aidl_audio_latency_mode_t_AudioLatencyMode));
+
+    Mutex::Autolock _l(mClientLock);
+    size_t size = mNotificationClients.size();
+    for (size_t i = 0; i < size; i++) {
+        mNotificationClients.valueAt(i)->audioFlingerClient()
+                ->onSupportedLatencyModesChanged(outputAidl, modesAidl);
+    }
+}
+
 // removeClient_l() must be called with AudioFlinger::mClientLock held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2130,12 +2315,8 @@
 AudioFlinger::Client::Client(const sp<AudioFlinger>& audioFlinger, pid_t pid)
     :   RefBase(),
         mAudioFlinger(audioFlinger),
-        mPid(pid)
-{
-    mMemoryDealer = new MemoryDealer(
-            audioFlinger->getClientSharedHeapSize(),
-            (std::string("AudioFlinger::Client(") + std::to_string(pid) + ")").c_str());
-}
+        mPid(pid),
+        mClientAllocator(AllocatorFactory::getClientAllocator()) {}
 
 // Client destructor must be called with AudioFlinger::mClientLock held
 AudioFlinger::Client::~Client()
@@ -2143,9 +2324,9 @@
     mAudioFlinger->removeClient_l(mPid);
 }
 
-sp<MemoryDealer> AudioFlinger::Client::heap() const
+AllocatorFactory::ClientAllocator& AudioFlinger::Client::allocator()
 {
-    return mMemoryDealer;
+    return mClientAllocator;
 }
 
 // ----------------------------------------------------------------------------
@@ -2229,7 +2410,7 @@
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
            adjAttributionSource.uid));
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         ALOGW_IF(currentUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, currentUid);
@@ -2245,7 +2426,8 @@
                  __func__, callingUid, callingPid, currentPid);
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
-
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
     // we don't yet support anything other than linear PCM
     if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -2347,11 +2529,17 @@
             };
         }
 
+        output.halConfig = {
+                thread->sampleRate(),
+                thread->channelMask(),
+                thread->format()
+        };
+
         // Check if one effect chain was awaiting for an AudioRecord to be created on this
         // session and move it to this thread.
         sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
         if (chain != 0) {
-            Mutex::Autolock _l(thread->mLock);
+            Mutex::Autolock _l2(thread->mLock);
             thread->addEffectChain_l(chain);
         }
         break;
@@ -2390,6 +2578,47 @@
 
 // ----------------------------------------------------------------------------
 
+status_t AudioFlinger::getAudioPolicyConfig(media::AudioPolicyConfig *config)
+{
+    if (config == nullptr) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    AutoMutex lock(mHardwareLock);
+    RETURN_STATUS_IF_ERROR(
+            mDevicesFactoryHal->getSurroundSoundConfig(&config->surroundSoundConfig));
+    RETURN_STATUS_IF_ERROR(mDevicesFactoryHal->getEngineConfig(&config->engineConfig));
+    std::vector<std::string> hwModuleNames;
+    RETURN_STATUS_IF_ERROR(mDevicesFactoryHal->getDeviceNames(&hwModuleNames));
+    std::set<AudioMode> allSupportedModes;
+    for (const auto& name : hwModuleNames) {
+        AudioHwDevice* module = loadHwModule_l(name.c_str());
+        if (module == nullptr) continue;
+        media::AudioHwModule aidlModule;
+        if (module->hwDevice()->getAudioPorts(&aidlModule.ports) == OK &&
+                module->hwDevice()->getAudioRoutes(&aidlModule.routes) == OK) {
+            aidlModule.handle = module->handle();
+            aidlModule.name = module->moduleName();
+            config->modules.push_back(std::move(aidlModule));
+        }
+        std::vector<AudioMode> supportedModes;
+        if (module->hwDevice()->getSupportedModes(&supportedModes) == OK) {
+            allSupportedModes.insert(supportedModes.begin(), supportedModes.end());
+        }
+    }
+    if (!allSupportedModes.empty()) {
+        config->supportedModes.insert(config->supportedModes.end(),
+                allSupportedModes.begin(), allSupportedModes.end());
+    } else {
+        ALOGW("%s: The HAL does not provide telephony functionality", __func__);
+        config->supportedModes = { media::audio::common::AudioMode::NORMAL,
+            media::audio::common::AudioMode::RINGTONE,
+            media::audio::common::AudioMode::IN_CALL,
+            media::audio::common::AudioMode::IN_COMMUNICATION };
+    }
+    return OK;
+}
+
 audio_module_handle_t AudioFlinger::loadHwModule(const char *name)
 {
     if (name == NULL) {
@@ -2400,16 +2629,17 @@
     }
     Mutex::Autolock _l(mLock);
     AutoMutex lock(mHardwareLock);
-    return loadHwModule_l(name);
+    AudioHwDevice* module = loadHwModule_l(name);
+    return module != nullptr ? module->handle() : AUDIO_MODULE_HANDLE_NONE;
 }
 
 // loadHwModule_l() must be called with AudioFlinger::mLock and AudioFlinger::mHardwareLock held
-audio_module_handle_t AudioFlinger::loadHwModule_l(const char *name)
+AudioHwDevice* AudioFlinger::loadHwModule_l(const char *name)
 {
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
         if (strncmp(mAudioHwDevs.valueAt(i)->moduleName(), name, strlen(name)) == 0) {
             ALOGW("loadHwModule() module %s already loaded", name);
-            return mAudioHwDevs.keyAt(i);
+            return mAudioHwDevs.valueAt(i);
         }
     }
 
@@ -2418,7 +2648,10 @@
     int rc = mDevicesFactoryHal->openDevice(name, &dev);
     if (rc) {
         ALOGE("loadHwModule() error %d loading module %s", rc, name);
-        return AUDIO_MODULE_HANDLE_NONE;
+        return nullptr;
+    }
+    if (!mMelReporter->activateHalSoundDoseComputation(name, dev)) {
+        ALOGW("loadHwModule() sound dose reporting is not available");
     }
 
     mHardwareStatus = AUDIO_HW_INIT;
@@ -2426,7 +2659,7 @@
     mHardwareStatus = AUDIO_HW_IDLE;
     if (rc) {
         ALOGE("loadHwModule() init check error %d for module %s", rc, name);
-        return AUDIO_MODULE_HANDLE_NONE;
+        return nullptr;
     }
 
     // Check and cache this HAL's level of support for master mute and master
@@ -2468,6 +2701,13 @@
         flags = static_cast<AudioHwDevice::Flags>(flags | AudioHwDevice::AHWD_IS_INSERT);
     }
 
+
+    if (bool supports = false;
+            dev->supportsBluetoothVariableLatency(&supports) == NO_ERROR && supports) {
+        flags = static_cast<AudioHwDevice::Flags>(flags |
+                AudioHwDevice::AHWD_SUPPORTS_BT_LATENCY_MODES);
+    }
+
     audio_module_handle_t handle = (audio_module_handle_t) nextUniqueId(AUDIO_UNIQUE_ID_USE_MODULE);
     AudioHwDevice *audioDevice = new AudioHwDevice(handle, name, dev, flags);
     if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_PRIMARY) == 0) {
@@ -2477,7 +2717,7 @@
         mHardwareStatus = AUDIO_HW_IDLE;
     }
 
-    if (mDevicesFactoryHal->getHalVersion() > MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION) {
+    if (mDevicesFactoryHal->getHalVersion() > kMaxAAudioPropertyDeviceHalVersion) {
         if (int32_t mixerBursts = dev->getAAudioMixerBurstCount();
             mixerBursts > 0 && mixerBursts > mAAudioBurstsPerBuffer) {
             mAAudioBurstsPerBuffer = mixerBursts;
@@ -2493,8 +2733,7 @@
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
 
-    return handle;
-
+    return audioDevice;
 }
 
 // ----------------------------------------------------------------------------
@@ -2672,23 +2911,47 @@
         ThreadBase *thread = (ThreadBase *)mMmapThreads.valueAt(i).get();
         thread->systemReady();
     }
+
+    // Java services are ready, so we can create a reference to AudioService
+    getOrCreateAudioManager();
+
     return NO_ERROR;
 }
 
-status_t AudioFlinger::getMicrophones(std::vector<media::MicrophoneInfo> *microphones)
+sp<IAudioManager> AudioFlinger::getOrCreateAudioManager()
+{
+    if (mAudioManager.load() == nullptr) {
+        // use checkService() to avoid blocking
+        sp<IBinder> binder =
+            defaultServiceManager()->checkService(String16(kAudioServiceName));
+        if (binder != nullptr) {
+            mAudioManager = interface_cast<IAudioManager>(binder);
+        } else {
+            ALOGE("%s(): binding to audio service failed.", __func__);
+        }
+    }
+    return mAudioManager.load();
+}
+
+status_t AudioFlinger::getMicrophones(std::vector<media::MicrophoneInfoFw> *microphones)
 {
     AutoMutex lock(mHardwareLock);
     status_t status = INVALID_OPERATION;
 
     for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
-        std::vector<media::MicrophoneInfo> mics;
+        std::vector<audio_microphone_characteristic_t> mics;
         AudioHwDevice *dev = mAudioHwDevs.valueAt(i);
         mHardwareStatus = AUDIO_HW_GET_MICROPHONES;
         status_t devStatus = dev->hwDevice()->getMicrophones(&mics);
         mHardwareStatus = AUDIO_HW_IDLE;
         if (devStatus == NO_ERROR) {
-            microphones->insert(microphones->begin(), mics.begin(), mics.end());
             // report success if at least one HW module supports the function.
+            std::transform(mics.begin(), mics.end(), std::back_inserter(*microphones), [](auto& mic)
+            {
+                auto microphone =
+                        legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(mic);
+                return microphone.ok() ? microphone.value() : media::MicrophoneInfoFw{};
+            });
             status = NO_ERROR;
         }
     }
@@ -2719,7 +2982,7 @@
 sp<AudioFlinger::ThreadBase> AudioFlinger::openOutput_l(audio_module_handle_t module,
                                                         audio_io_handle_t *output,
                                                         audio_config_t *halConfig,
-                                                        audio_config_base_t *mixerConfig __unused,
+                                                        audio_config_base_t *mixerConfig,
                                                         audio_devices_t deviceType,
                                                         const String8& address,
                                                         audio_output_flags_t flags)
@@ -2790,19 +3053,25 @@
             return thread;
         } else {
             sp<PlaybackThread> thread;
-            if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
+            if (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+                thread = sp<BitPerfectThread>::make(this, outputStream, *output, mSystemReady);
+                ALOGV("%s() created bit-perfect output: ID %d thread %p",
+                      __func__, *output, thread.get());
+            } else if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
                 thread = new SpatializerThread(this, outputStream, *output,
                                                     mSystemReady, mixerConfig);
                 ALOGV("openOutput_l() created spatializer output: ID %d thread %p",
                       *output, thread.get());
             } else if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
-                thread = new OffloadThread(this, outputStream, *output, mSystemReady);
+                thread = new OffloadThread(this, outputStream, *output,
+                        mSystemReady, halConfig->offload_info);
                 ALOGV("openOutput_l() created offload output: ID %d thread %p",
                       *output, thread.get());
             } else if ((flags & AUDIO_OUTPUT_FLAG_DIRECT)
                     || !isValidPcmSinkFormat(halConfig->format)
                     || !isValidPcmSinkChannelMask(halConfig->channel_mask)) {
-                thread = new DirectOutputThread(this, outputStream, *output, mSystemReady);
+                thread = new DirectOutputThread(this, outputStream, *output,
+                        mSystemReady, halConfig->offload_info);
                 ALOGV("openOutput_l() created direct output: ID %d thread %p",
                       *output, thread.get());
             } else {
@@ -2816,6 +3085,7 @@
             if (thread->isMsdDevice()) {
                 thread->setDownStreamPatch(&patch);
             }
+            thread->setBluetoothVariableLatencyEnabled(mBluetoothLatencyModesEnabled.load());
             return thread;
         }
     }
@@ -2838,7 +3108,6 @@
             aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
 
     audio_io_handle_t output;
-    uint32_t latencyMs;
 
     ALOGI("openOutput() this %p, module %d Device %s, SamplingRate %d, Format %#08x, "
               "Channels %#x, flags %#x",
@@ -2861,6 +3130,7 @@
     sp<ThreadBase> thread = openOutput_l(module, &output, &halConfig,
             &mixerConfig, deviceType, address, flags);
     if (thread != 0) {
+        uint32_t latencyMs = 0;
         if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
             PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
             latencyMs = playbackThread->latency();
@@ -3209,7 +3479,7 @@
                         continue;
                     }
                     if (t->hasAudioSession(chain->sessionId()) != 0) {
-                        Mutex::Autolock _l(t->mLock);
+                        Mutex::Autolock _l2(t->mLock);
                         ALOGV("closeInput() found thread %d for effect session %d",
                               t->id(), chain->sessionId());
                         t->addEffectChain_l(chain);
@@ -3261,17 +3531,23 @@
     closeInputFinish(thread);
 }
 
-status_t AudioFlinger::invalidateStream(audio_stream_type_t stream)
-{
+status_t AudioFlinger::invalidateTracks(const std::vector<audio_port_handle_t> &portIds) {
     Mutex::Autolock _l(mLock);
-    ALOGV("invalidateStream() stream %d", stream);
+    ALOGV("%s", __func__);
 
+    std::set<audio_port_handle_t> portIdSet(portIds.begin(), portIds.end());
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
         PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
-        thread->invalidateTracks(stream);
+        thread->invalidateTracks(portIdSet);
+        if (portIdSet.empty()) {
+            return NO_ERROR;
+        }
     }
     for (size_t i = 0; i < mMmapThreads.size(); i++) {
-        mMmapThreads[i]->invalidateTracks(stream);
+        mMmapThreads[i]->invalidateTracks(portIdSet);
+        if (portIdSet.empty()) {
+            return NO_ERROR;
+        }
     }
     return NO_ERROR;
 }
@@ -3414,7 +3690,8 @@
     }
 
     for (size_t i = 0; i < chains.size(); i++) {
-        sp<EffectChain> ec = chains[i];
+         // clang-tidy suggests const ref
+        sp<EffectChain> ec = chains[i];  // NOLINT(performance-unnecessary-copy-initialization)
         int sessionid = ec->sessionId();
         sp<ThreadBase> t = ec->thread().promote();
         if (t == 0) {
@@ -3479,6 +3756,20 @@
     return thread;
 }
 
+// checkOutputThread_l() must be called with AudioFlinger::mLock held
+sp<AudioFlinger::ThreadBase> AudioFlinger::checkOutputThread_l(audio_io_handle_t ioHandle) const
+{
+    if (audio_unique_id_get_use(ioHandle) != AUDIO_UNIQUE_ID_USE_OUTPUT) {
+        return nullptr;
+    }
+
+    sp<AudioFlinger::ThreadBase> thread = mPlaybackThreads.valueFor(ioHandle);
+    if (thread == nullptr) {
+        thread = mMmapThreads.valueFor(ioHandle);
+    }
+    return thread;
+}
+
 // checkPlaybackThread_l() must be called with AudioFlinger::mLock held
 AudioFlinger::PlaybackThread *AudioFlinger::checkPlaybackThread_l(audio_io_handle_t output) const
 {
@@ -3583,7 +3874,7 @@
     PlaybackThread *thread = primaryPlaybackThread_l();
 
     if (thread == NULL) {
-        return DeviceTypeSet();
+        return {};
     }
 
     return thread->outDeviceTypes();
@@ -3660,6 +3951,12 @@
 
         using namespace std::chrono_literals;
         auto inChannelMask = audio_channel_mask_out_to_in(track->channelMask());
+        if (inChannelMask == AUDIO_CHANNEL_INVALID) {
+            // The downstream PatchTrack has the proper output channel mask,
+            // so if there is no input channel mask equivalent, we can just
+            // use an index mask here to create the PatchRecord.
+            inChannelMask = audio_channel_mask_out_to_in_index_mask(track->channelMask());
+        }
         sp patchRecord = new RecordThread::PatchRecord(nullptr /* thread */,
                                                        track->sampleRate(),
                                                        inChannelMask,
@@ -3703,7 +4000,7 @@
         patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
         patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
     }
-    track->setTeePatches(std::move(teePatches));
+    track->setTeePatchesToUpdate(std::move(teePatches));
 }
 
 sp<AudioFlinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
@@ -3862,7 +4159,7 @@
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
     pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
-    if (currentPid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
+    if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW_IF(currentPid != -1 && currentPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
@@ -3870,6 +4167,7 @@
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
         currentPid = callingPid;
     }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(adjAttributionSource);
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
           adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
@@ -3990,7 +4288,7 @@
         if (sessionId == AUDIO_SESSION_DEVICE) {
             sp<Client> client = registerPid(currentPid);
             ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
-            handle = mDeviceEffectManager.createEffect_l(
+            handle = mDeviceEffectManager->createEffect_l(
                     &descOut, device, client, effectClient, mPatchPanel.patches_l(),
                     &enabledOut, &lStatus, probe, request.notifyFramesProcessed);
             if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
@@ -4088,7 +4386,7 @@
             // session and used it instead of creating a new one.
             sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
             if (chain != 0) {
-                Mutex::Autolock _l(thread->mLock);
+                Mutex::Autolock _l2(thread->mLock);
                 thread->addEffectChain_l(chain);
             }
         }
@@ -4206,6 +4504,7 @@
 status_t AudioFlinger::moveEffectChain_l(audio_session_t sessionId,
                                    AudioFlinger::PlaybackThread *srcThread,
                                    AudioFlinger::PlaybackThread *dstThread)
+NO_THREAD_SAFETY_ANALYSIS // requires srcThread and dstThread locks
 {
     ALOGV("moveEffectChain_l() session %d from thread %p to thread %p",
             sessionId, srcThread, dstThread);
@@ -4235,11 +4534,12 @@
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
     sp<EffectChain> dstChain;
-    sp<EffectModule> effect = chain->getEffectFromId_l(0);
     Vector< sp<EffectModule> > removed;
     status_t status = NO_ERROR;
     std::string errorString;
-    while (effect != nullptr) {
+    // process effects one by one.
+    for (sp<EffectModule> effect = chain->getEffectFromId_l(0); effect != nullptr;
+            effect = chain->getEffectFromId_l(0)) {
         srcThread->removeEffect_l(effect);
         removed.add(effect);
         status = dstThread->addEffect_l(effect);
@@ -4260,7 +4560,6 @@
                 break;
             }
         }
-        effect = chain->getEffectFromId_l(0);
     }
 
     size_t restored = 0;
@@ -4364,6 +4663,7 @@
 }
 
 bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l()
+NO_THREAD_SAFETY_ANALYSIS  // thread lock for getEffectChain_l.
 {
     if (mGlobalEffectEnableTime != 0 &&
             ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
@@ -4446,12 +4746,9 @@
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
-                                         const Parcel& data,
-                                         uint32_t flags,
+                                         [[maybe_unused]] const Parcel& data,
+                                         [[maybe_unused]] uint32_t flags,
                                          const std::function<status_t()>& delegate) {
-    (void) data;
-    (void) flags;
-
     // make sure transactions reserved to AudioPolicyManager do not come from other processes
     switch (code) {
         case TransactionCode::SET_STREAM_VOLUME:
@@ -4463,7 +4760,6 @@
         case TransactionCode::RESTORE_OUTPUT:
         case TransactionCode::OPEN_INPUT:
         case TransactionCode::CLOSE_INPUT:
-        case TransactionCode::INVALIDATE_STREAM:
         case TransactionCode::SET_VOICE_VOLUME:
         case TransactionCode::MOVE_EFFECTS:
         case TransactionCode::SET_EFFECT_SUSPENDED:
@@ -4476,6 +4772,10 @@
         case TransactionCode::SET_RECORD_SILENCED:
         case TransactionCode::AUDIO_POLICY_READY:
         case TransactionCode::SET_DEVICE_CONNECTED_STATE:
+        case TransactionCode::SET_REQUESTED_LATENCY_MODE:
+        case TransactionCode::GET_SUPPORTED_LATENCY_MODES:
+        case TransactionCode::INVALIDATE_TRACKS:
+        case TransactionCode::GET_AUDIO_POLICY_CONFIG:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
@@ -4497,18 +4797,22 @@
         case TransactionCode::SET_MASTER_VOLUME:
         case TransactionCode::SET_MASTER_MUTE:
         case TransactionCode::MASTER_MUTE:
+        case TransactionCode::GET_SOUND_DOSE_INTERFACE:
         case TransactionCode::SET_MODE:
         case TransactionCode::SET_MIC_MUTE:
         case TransactionCode::SET_LOW_RAM_DEVICE:
         case TransactionCode::SYSTEM_READY:
         case TransactionCode::SET_AUDIO_HAL_PIDS:
         case TransactionCode::SET_VIBRATOR_INFOS:
-        case TransactionCode::UPDATE_SECONDARY_OUTPUTS: {
+        case TransactionCode::UPDATE_SECONDARY_OUTPUTS:
+        case TransactionCode::SET_BLUETOOTH_VARIABLE_LATENCY_ENABLED:
+        case TransactionCode::IS_BLUETOOTH_VARIABLE_LATENCY_ENABLED:
+        case TransactionCode::SUPPORTS_BLUETOOTH_VARIABLE_LATENCY: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
                       IPCThreadState::self()->getCallingUid());
-                // return status only for non void methods
+                // return status only for non-void methods
                 switch (code) {
                     case TransactionCode::SYSTEM_READY:
                         break;
@@ -4556,7 +4860,9 @@
         } else {
             getIAudioFlingerStatistics().event(code, elapsedMs);
         }
-    });
+    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
+    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    true /* crashOnTimeout */);
 
     // Make sure we connect to Audio Policy Service before calling into AudioFlinger:
     //  - AudioFlinger can call into Audio Policy Service with its global mutex held
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 8e4383c..10bfdb9 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -75,15 +75,21 @@
 #include <media/ExtendedAudioBufferProvider.h>
 #include <media/VolumeShaper.h>
 #include <mediautils/ServiceUtilities.h>
+#include <mediautils/SharedMemoryAllocator.h>
 #include <mediautils/Synchronization.h>
 #include <mediautils/ThreadSnapshot.h>
 
 #include <audio_utils/clock.h>
 #include <audio_utils/FdToString.h>
 #include <audio_utils/LinearMap.h>
+#include <audio_utils/MelAggregator.h>
+#include <audio_utils/MelProcessor.h>
 #include <audio_utils/SimpleLog.h>
 #include <audio_utils/TimestampVerifier.h>
 
+#include <sounddose/SoundDoseManager.h>
+#include <timing/MonotonicFrameCounter.h>
+
 #include "FastCapture.h"
 #include "FastMixer.h"
 #include <media/nbaio/NBAIO.h>
@@ -94,7 +100,7 @@
 #include "NBAIO_Tee.h"
 #include "ThreadMetrics.h"
 #include "TrackMetrics.h"
-
+#include "AllocatorFactory.h"
 #include <android/os/IPowerManager.h>
 
 #include <media/nblog/NBLog.h>
@@ -117,6 +123,7 @@
 class DevicesFactoryHalInterface;
 class EffectsFactoryHalInterface;
 class FastMixer;
+class IAudioManager;
 class PassthruBufferProvider;
 class RecordBufferConverter;
 class ServerProxy;
@@ -131,6 +138,7 @@
 
 class AudioFlinger : public AudioFlingerServerAdapter::Delegate
 {
+    friend class sp<AudioFlinger>;
 public:
     static void instantiate() ANDROID_API;
 
@@ -202,8 +210,6 @@
 
     virtual status_t closeInput(audio_io_handle_t input);
 
-    virtual status_t invalidateStream(audio_stream_type_t stream);
-
     virtual status_t setVoiceVolume(float volume);
 
     virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
@@ -274,7 +280,7 @@
             bool isAudioPolicyReady() const { return mAudioPolicyReady.load(); }
 
 
-    virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+    virtual status_t getMicrophones(std::vector<media::MicrophoneInfoFw> *microphones);
 
     virtual status_t setAudioHalPids(const std::vector<pid_t>& pids);
 
@@ -291,7 +297,29 @@
 
     virtual int32_t getAAudioHardwareBurstMinUsec();
 
-    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected);
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                             media::DeviceConnectedState state);
+
+    virtual status_t setSimulateDeviceConnections(bool enabled);
+
+    virtual status_t setRequestedLatencyMode(
+            audio_io_handle_t output, audio_latency_mode_t mode);
+
+    virtual status_t getSupportedLatencyModes(audio_io_handle_t output,
+            std::vector<audio_latency_mode_t>* modes);
+
+    virtual status_t setBluetoothVariableLatencyEnabled(bool enabled);
+
+    virtual status_t isBluetoothVariableLatencyEnabled(bool* enabled);
+
+    virtual status_t supportsBluetoothVariableLatency(bool* support);
+
+    virtual status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+                                           sp<media::ISoundDose>* soundDose);
+
+    status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
+
+    virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig* config);
 
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
         const std::function<status_t()>& delegate) override;
@@ -312,16 +340,17 @@
                             sp<MmapStreamInterface>& interface,
                             audio_port_handle_t *handle);
 
-    static int onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration);
+    static os::HapticScale onExternalVibrationStart(
+        const sp<os::ExternalVibration>& externalVibration);
     static void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration);
 
     status_t addEffectToHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
+            audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect);
     status_t removeEffectFromHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
+            audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect);
 
     void updateDownStreamPatches_l(const struct audio_patch *patch,
-                                   const std::set<audio_io_handle_t> streams);
+                                   const std::set<audio_io_handle_t>& streams);
 
     std::optional<media::AudioVibratorInfo> getDefaultVibratorInfo_l();
 
@@ -364,7 +393,7 @@
                   audio_session_t triggerSession,
                   audio_session_t listenerSession,
                   sync_event_callback_t callBack,
-                  wp<RefBase> cookie)
+                  const wp<RefBase>& cookie)
         : mType(type), mTriggerSession(triggerSession), mListenerSession(listenerSession),
           mCallback(callBack), mCookie(cookie)
         {}
@@ -399,6 +428,8 @@
 
     bool        btNrecIsOff() const { return mBtNrecIsOff.load(); }
 
+    void             lock() ACQUIRE(mLock) { mLock.lock(); }
+    void             unlock() RELEASE(mLock) { mLock.unlock(); }
 
 private:
 
@@ -492,19 +523,19 @@
 
     // --- Client ---
     class Client : public RefBase {
-    public:
-                            Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
+      public:
+        Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
         virtual             ~Client();
-        sp<MemoryDealer>    heap() const;
+        AllocatorFactory::ClientAllocator& allocator();
         pid_t               pid() const { return mPid; }
         sp<AudioFlinger>    audioFlinger() const { return mAudioFlinger; }
 
     private:
         DISALLOW_COPY_AND_ASSIGN(Client);
 
-        const sp<AudioFlinger> mAudioFlinger;
-              sp<MemoryDealer> mMemoryDealer;
+        const sp<AudioFlinger>    mAudioFlinger;
         const pid_t         mPid;
+        AllocatorFactory::ClientAllocator mClientAllocator;
     };
 
     // --- Notification Client ---
@@ -574,6 +605,7 @@
     class OffloadThread;
     class DuplicatingThread;
     class AsyncCallbackThread;
+    class BitPerfectThread;
     class Track;
     class RecordTrack;
     class EffectBase;
@@ -623,10 +655,14 @@
 
 #include "PatchPanel.h"
 
+#include "PatchCommandThread.h"
+
 #include "Effects.h"
 
 #include "DeviceEffectManager.h"
 
+#include "MelReporter.h"
+
     // Find io handle by session id.
     // Preference is given to an io handle with a matching effect chain to session id.
     // If none found, AUDIO_IO_HANDLE_NONE is returned.
@@ -672,14 +708,16 @@
         binder::Status getVolumeShaperState(
                 int32_t id,
                 std::optional<media::VolumeShaperState>* _aidl_return) override;
-        binder::Status getDualMonoMode(media::AudioDualMonoMode* _aidl_return) override;
-        binder::Status setDualMonoMode(media::AudioDualMonoMode mode) override;
+        binder::Status getDualMonoMode(
+                media::audio::common::AudioDualMonoMode* _aidl_return) override;
+        binder::Status setDualMonoMode(
+                media::audio::common::AudioDualMonoMode mode) override;
         binder::Status getAudioDescriptionMixLevel(float* _aidl_return) override;
         binder::Status setAudioDescriptionMixLevel(float leveldB) override;
         binder::Status getPlaybackRateParameters(
-                media::AudioPlaybackRate* _aidl_return) override;
+                media::audio::common::AudioPlaybackRate* _aidl_return) override;
         binder::Status setPlaybackRateParameters(
-                const media::AudioPlaybackRate& playbackRate) override;
+                const media::audio::common::AudioPlaybackRate& playbackRate) override;
 
     private:
         const sp<PlaybackThread::Track> mTrack;
@@ -694,7 +732,7 @@
                 int /*audio_session_t*/ triggerSession);
         virtual binder::Status   stop();
         virtual binder::Status   getActiveMicrophones(
-                std::vector<media::MicrophoneInfoData>* activeMicrophones);
+                std::vector<media::MicrophoneInfoFw>* activeMicrophones);
         virtual binder::Status   setPreferredMicrophoneDirection(
                 int /*audio_microphone_direction_t*/ direction);
         virtual binder::Status   setPreferredMicrophoneFieldDimension(float zoom);
@@ -725,12 +763,15 @@
                                audio_port_handle_t *handle);
         virtual status_t stop(audio_port_handle_t handle);
         virtual status_t standby();
+                status_t reportData(const void* buffer, size_t frameCount) override;
 
     private:
         const sp<MmapThread> mThread;
     };
 
               ThreadBase *checkThread_l(audio_io_handle_t ioHandle) const;
+              sp<AudioFlinger::ThreadBase> checkOutputThread_l(audio_io_handle_t ioHandle) const
+                      REQUIRES(mLock);
               PlaybackThread *checkPlaybackThread_l(audio_io_handle_t output) const;
               MixerThread *checkMixerThread_l(audio_io_handle_t output) const;
               RecordThread *checkRecordThread_l(audio_io_handle_t input) const;
@@ -764,6 +805,8 @@
               void ioConfigChanged(audio_io_config_event_t event,
                                    const sp<AudioIoDescriptor>& ioDesc,
                                    pid_t pid = 0);
+              void onSupportedLatencyModesChanged(
+                    audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes);
 
               // Allocate an audio_unique_id_t.
               // Specific types are audio_io_handle_t, audio_session_t, effect ID (int),
@@ -832,7 +875,7 @@
                 void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices);
                 void forwardParametersToDownstreamPatches_l(
                         audio_io_handle_t upStream, const String8& keyValuePairs,
-                        std::function<bool(const sp<PlaybackThread>&)> useThread = nullptr);
+                        const std::function<bool(const sp<PlaybackThread>&)>& useThread = nullptr);
 
     // AudioStreamIn is immutable, so their fields are const.
     // For emphasis, we could also make all pointers to them be "const *",
@@ -845,7 +888,8 @@
 
         sp<DeviceHalInterface> hwDev() const { return audioHwDev->hwDevice(); }
 
-        AudioStreamIn(AudioHwDevice *dev, sp<StreamInHalInterface> in, audio_input_flags_t flags) :
+        AudioStreamIn(AudioHwDevice *dev, const sp<StreamInHalInterface>& in,
+                audio_input_flags_t flags) :
             audioHwDev(dev), stream(in), flags(flags) {}
         status_t read(void *buffer, size_t bytes, size_t *read) override {
             return stream->read(buffer, bytes, read);
@@ -916,6 +960,7 @@
         AUDIO_HW_GET_MASTER_MUTE,       // get_master_mute
         AUDIO_HW_GET_MICROPHONES,       // getMicrophones
         AUDIO_HW_SET_CONNECTED_STATE,   // setConnectedState
+        AUDIO_HW_SET_SIMULATE_CONNECTIONS, // setSimulateDeviceConnections
     };
 
     mutable     hardware_call_state                 mHardwareStatus;    // for dump only
@@ -947,7 +992,7 @@
                 float       masterVolume_l() const;
                 float       getMasterBalance_l() const;
                 bool        masterMute_l() const;
-                audio_module_handle_t loadHwModule_l(const char *name);
+                AudioHwDevice* loadHwModule_l(const char *name);
 
                 Vector < sp<SyncEvent> > mPendingSyncEvents; // sync events awaiting for a session
                                                              // to be created
@@ -980,6 +1025,8 @@
                                       size_t rejectedKVPSize, const String8& rejectedKVPs,
                                       uid_t callingUid);
 
+    sp<IAudioManager> getOrCreateAudioManager();
+
 public:
     // These methods read variables atomically without mLock,
     // though the variables are updated with mLock.
@@ -999,7 +1046,9 @@
     PatchPanel mPatchPanel;
     sp<EffectsFactoryHalInterface> mEffectsFactoryHal;
 
-    DeviceEffectManager mDeviceEffectManager;
+    const sp<PatchCommandThread> mPatchCommandThread;
+    sp<DeviceEffectManager> mDeviceEffectManager;
+    sp<MelReporter> mMelReporter;
 
     bool       mSystemReady;
     std::atomic_bool mAudioPolicyReady{};
@@ -1021,6 +1070,12 @@
              std::vector<media::audio::common::AudioMMapPolicyInfo>> mPolicyInfos;
     int32_t mAAudioBurstsPerBuffer = 0;
     int32_t mAAudioHwBurstMinMicros = 0;
+
+    /** Interface for interacting with the AudioService. */
+    mediautils::atomic_sp<IAudioManager>       mAudioManager;
+
+    // Bluetooth Variable latency control logic is enabled or disabled
+    std::atomic_bool mBluetoothLatencyModesEnabled;
 };
 
 #undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
index 8c5d239..d071922 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/AudioHwDevice.h
@@ -40,11 +40,13 @@
         // Means that this isn't a terminal module, and software patches
         // are used to transport audio data further.
         AHWD_IS_INSERT              = 0x4,
+        // This Module supports BT Latency mode control
+        AHWD_SUPPORTS_BT_LATENCY_MODES = 0x8,
     };
 
     AudioHwDevice(audio_module_handle_t handle,
                   const char *moduleName,
-                  sp<DeviceHalInterface> hwDevice,
+                  const sp<DeviceHalInterface>& hwDevice,
                   Flags flags)
         : mHandle(handle)
         , mModuleName(strdup(moduleName))
@@ -64,6 +66,10 @@
         return (0 != (mFlags & AHWD_IS_INSERT));
     }
 
+    bool supportsBluetoothVariableLatency() const {
+        return (0 != (mFlags & AHWD_SUPPORTS_BT_LATENCY_MODES));
+    }
+
     audio_module_handle_t handle() const { return mHandle; }
     const char *moduleName() const { return mModuleName; }
     sp<DeviceHalInterface> hwDevice() const { return mHwDevice; }
diff --git a/services/audioflinger/AutoPark.h b/services/audioflinger/AutoPark.h
index 9ac7b65..83f6b7d 100644
--- a/services/audioflinger/AutoPark.h
+++ b/services/audioflinger/AutoPark.h
@@ -58,4 +58,4 @@
     FastThreadState::Command    mPreviousCommand;
 };  // class AutoPark
 
-}   // namespace
+}   // namespace android
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 53ac5cb..4fb6138 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -23,6 +23,7 @@
 #include <audio_utils/primitives.h>
 
 #include "AudioFlinger.h"
+#include "EffectConfiguration.h"
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 
 // ----------------------------------------------------------------------------
@@ -30,18 +31,9 @@
 
 namespace android {
 
+using detail::AudioHalVersionInfo;
 using media::IEffectClient;
 
-void AudioFlinger::DeviceEffectManager::createAudioPatch(audio_patch_handle_t handle,
-        const PatchPanel::Patch& patch) {
-    ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
-            __func__, handle, patch.mHalHandle,
-            patch.mAudioPatch.num_sinks,
-            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
-
-    mCommandThread->createAudioPatchCommand(handle, patch);
-}
-
 void AudioFlinger::DeviceEffectManager::onCreateAudioPatch(audio_patch_handle_t handle,
         const PatchPanel::Patch& patch) {
     ALOGV("%s handle %d mHalHandle %d device sink %08x",
@@ -55,11 +47,6 @@
     }
 }
 
-void AudioFlinger::DeviceEffectManager::releaseAudioPatch(audio_patch_handle_t handle) {
-    ALOGV("%s", __func__);
-    mCommandThread->releaseAudioPatchCommand(handle);
-}
-
 void AudioFlinger::DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
     ALOGV("%s", __func__);
     Mutex::Autolock _l(mLock);
@@ -106,12 +93,17 @@
         if (lStatus == NO_ERROR) {
             lStatus = effect->addHandle(handle.get());
             if (lStatus == NO_ERROR) {
-                effect->init(patches);
-                mDeviceEffects.emplace(device, effect);
+                lStatus = effect->init(patches);
+                if (lStatus == NAME_NOT_FOUND) {
+                    lStatus = NO_ERROR;
+                }
+                if (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS) {
+                    mDeviceEffects.emplace(device, effect);
+                }
             }
         }
     }
-    if (enabled != NULL) {
+    if (enabled != nullptr) {
         *enabled = (int)effect->isEnabled();
     }
     *status = lStatus;
@@ -120,19 +112,24 @@
 
 status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
         const effect_descriptor_t *desc) {
-    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    const sp<EffectsFactoryHalInterface> effectsFactory =
+            audioflinger::EffectConfiguration::getEffectsFactoryHal();
     if (effectsFactory == nullptr) {
         return BAD_VALUE;
     }
 
-    static const float sMinDeviceEffectHalVersion = 6.0;
-    float halVersion = effectsFactory->getHalVersion();
+    static const AudioHalVersionInfo sMinDeviceEffectHalVersion =
+            AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0);
+    static const AudioHalVersionInfo halVersion =
+            audioflinger::EffectConfiguration::getAudioHalVersionInfo();
 
+    // We can trust AIDL generated AudioHalVersionInfo comparison operator (based on std::tie) as
+    // long as the type, major and minor sequence doesn't change in the definition.
     if (((desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_PRE_PROC
             && (desc->flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_POST_PROC)
             || halVersion < sMinDeviceEffectHalVersion) {
-        ALOGW("%s() non pre/post processing device effect %s or incompatible API version %f",
-                __func__, desc->name, halVersion);
+        ALOGW("%s() non pre/post processing device effect %s or incompatible API version %s",
+                __func__, desc->name, halVersion.toString().c_str());
         return BAD_VALUE;
     }
 
@@ -143,7 +140,8 @@
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
     status_t status = NO_INIT;
-    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    const sp<EffectsFactoryHalInterface> effectsFactory =
+            audioflinger::EffectConfiguration::getEffectsFactoryHal();
     if (effectsFactory != 0) {
         status = effectsFactory->createEffect(
                 pEffectUuid, sessionId, AUDIO_IO_HANDLE_NONE, deviceId, effect);
@@ -151,7 +149,9 @@
     return status;
 }
 
-void AudioFlinger::DeviceEffectManager::dump(int fd) {
+void AudioFlinger::DeviceEffectManager::dump(int fd)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
+{
     const bool locked = dumpTryLock(mLock);
     if (!locked) {
         String8 result("DeviceEffectManager may be deadlocked\n");
@@ -203,91 +203,4 @@
     return true;
 }
 
-// -----------  DeviceEffectManager::CommandThread implementation ----------
-
-
-AudioFlinger::DeviceEffectManager::CommandThread::~CommandThread()
-{
-    Mutex::Autolock _l(mLock);
-    mCommands.clear();
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::onFirstRef()
-{
-    run("DeviceEffectManage_CommandThread", ANDROID_PRIORITY_AUDIO);
-}
-
-bool AudioFlinger::DeviceEffectManager::CommandThread::threadLoop()
-{
-    mLock.lock();
-    while (!exitPending())
-    {
-        while (!mCommands.empty() && !exitPending()) {
-            sp<Command> command = mCommands.front();
-            mCommands.pop_front();
-            mLock.unlock();
-
-            switch (command->mCommand) {
-            case CREATE_AUDIO_PATCH: {
-                CreateAudioPatchData *data = (CreateAudioPatchData *)command->mData.get();
-                ALOGV("CommandThread() processing create audio patch handle %d", data->mHandle);
-                mManager.onCreateAudioPatch(data->mHandle, data->mPatch);
-                } break;
-            case RELEASE_AUDIO_PATCH: {
-                ReleaseAudioPatchData *data = (ReleaseAudioPatchData *)command->mData.get();
-                ALOGV("CommandThread() processing release audio patch handle %d", data->mHandle);
-                mManager.onReleaseAudioPatch(data->mHandle);
-                } break;
-            default:
-                ALOGW("CommandThread() unknown command %d", command->mCommand);
-            }
-            mLock.lock();
-        }
-
-        // At this stage we have either an empty command queue or the first command in the queue
-        // has a finite delay. So unless we are exiting it is safe to wait.
-        if (!exitPending()) {
-            ALOGV("CommandThread() going to sleep");
-            mWaitWorkCV.wait(mLock);
-        }
-    }
-    mLock.unlock();
-    return false;
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::sendCommand(sp<Command> command) {
-    Mutex::Autolock _l(mLock);
-    mCommands.push_back(command);
-    mWaitWorkCV.signal();
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::createAudioPatchCommand(
-        audio_patch_handle_t handle, const PatchPanel::Patch& patch)
-{
-    sp<Command> command = new Command(CREATE_AUDIO_PATCH, new CreateAudioPatchData(handle, patch));
-    ALOGV("CommandThread() adding create patch handle %d mHalHandle %d.", handle, patch.mHalHandle);
-    sendCommand(command);
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::releaseAudioPatchCommand(
-        audio_patch_handle_t handle)
-{
-    sp<Command> command = new Command(RELEASE_AUDIO_PATCH, new ReleaseAudioPatchData(handle));
-    ALOGV("CommandThread() adding release patch");
-    sendCommand(command);
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::exit()
-{
-    ALOGV("CommandThread::exit");
-    {
-        AutoMutex _l(mLock);
-        requestExit();
-        mWaitWorkCV.signal();
-    }
-    // Note that we can call it from the thread loop if all other references have been released
-    // but it will safely return WOULD_BLOCK in this case
-    requestExitAndWait();
-}
-
 } // namespace android
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index d2faa70..395781d 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -20,15 +20,15 @@
 #endif
 
 // DeviceEffectManager is concealed within AudioFlinger, their lifetimes are the same.
-class DeviceEffectManager {
+class DeviceEffectManager : public PatchCommandThread::PatchCommandListener {
 public:
-    explicit DeviceEffectManager(AudioFlinger* audioFlinger)
-        : mCommandThread(new CommandThread(*this)), mAudioFlinger(*audioFlinger),
-        mMyCallback(new DeviceEffectManagerCallback(this)) {}
+    explicit DeviceEffectManager(AudioFlinger& audioFlinger)
+        : mAudioFlinger(audioFlinger),
+          mMyCallback(new DeviceEffectManagerCallback(*this)) {}
 
-            ~DeviceEffectManager() {
-                mCommandThread->exit();
-            }
+    void onFirstRef() override {
+        mAudioFlinger.mPatchCommandThread->addListener(this);
+    }
 
     sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
                 const AudioDeviceTypeAddr& device,
@@ -39,19 +39,17 @@
                 status_t *status,
                 bool probe,
                 bool notifyFramesProcessed);
-    void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
-    void releaseAudioPatch(audio_patch_handle_t handle);
 
     size_t removeEffect(const sp<DeviceEffectProxy>& effect);
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
            int32_t sessionId, int32_t deviceId,
            sp<EffectHalInterface> *effect);
     status_t addEffectToHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
-            sp<EffectHalInterface> effect) {
+            const sp<EffectHalInterface>& effect) {
         return mAudioFlinger.addEffectToHal(deviceId, hwModuleId, effect);
     };
     status_t removeEffectFromHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
-            sp<EffectHalInterface> effect) {
+            const sp<EffectHalInterface>& effect) {
         return mAudioFlinger.removeEffectFromHal(deviceId, hwModuleId, effect);
     };
 
@@ -59,94 +57,25 @@
 
     void dump(int fd);
 
+    // PatchCommandThread::PatchCommandListener implementation
+
+    void onCreateAudioPatch(audio_patch_handle_t handle,
+                            const PatchPanel::Patch& patch) override;
+    void onReleaseAudioPatch(audio_patch_handle_t handle) override;
+
 private:
-
-    // Thread to execute create and release patch commands asynchronously. This is needed because
-    // PatchPanel::createAudioPatch and releaseAudioPatch are executed from audio policy service
-    // with mutex locked and effect management requires to call back into audio policy service
-    class Command;
-    class CommandThread : public Thread {
-    public:
-
-        enum {
-            CREATE_AUDIO_PATCH,
-            RELEASE_AUDIO_PATCH,
-        };
-
-        CommandThread(DeviceEffectManager& manager)
-            : Thread(false), mManager(manager) {}
-        ~CommandThread() override;
-
-        // Thread virtuals
-        void onFirstRef() override;
-        bool threadLoop() override;
-
-                void exit();
-
-                void createAudioPatchCommand(audio_patch_handle_t handle,
-                        const PatchPanel::Patch& patch);
-                void releaseAudioPatchCommand(audio_patch_handle_t handle);
-
-    private:
-        class CommandData;
-
-        // descriptor for requested tone playback event
-        class Command: public RefBase {
-        public:
-            Command() = default;
-            Command(int command, sp<CommandData> data)
-                : mCommand(command), mData(data) {}
-
-            int mCommand = -1;
-            sp<CommandData> mData;
-        };
-
-        class CommandData: public RefBase {
-        public:
-            virtual ~CommandData() = default;
-        };
-
-        class CreateAudioPatchData : public CommandData {
-        public:
-            CreateAudioPatchData(audio_patch_handle_t handle, const PatchPanel::Patch& patch)
-                :   mHandle(handle), mPatch(patch) {}
-
-            audio_patch_handle_t mHandle;
-            const PatchPanel::Patch mPatch;
-        };
-
-        class ReleaseAudioPatchData : public CommandData {
-        public:
-            ReleaseAudioPatchData(audio_patch_handle_t handle)
-                :   mHandle(handle) {}
-
-            audio_patch_handle_t mHandle;
-        };
-
-        void sendCommand(sp<Command> command);
-
-        Mutex   mLock;
-        Condition mWaitWorkCV;
-        std::deque <sp<Command>> mCommands; // list of pending commands
-        DeviceEffectManager& mManager;
-    };
-
-    void onCreateAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
-    void onReleaseAudioPatch(audio_patch_handle_t handle);
-
     status_t checkEffectCompatibility(const effect_descriptor_t *desc);
 
     Mutex mLock;
-    sp<CommandThread> mCommandThread;
     AudioFlinger &mAudioFlinger;
     const sp<DeviceEffectManagerCallback> mMyCallback;
     std::map<AudioDeviceTypeAddr, sp<DeviceEffectProxy>> mDeviceEffects;
 };
 
-class DeviceEffectManagerCallback :  public EffectCallbackInterface {
+class DeviceEffectManagerCallback : public EffectCallbackInterface {
 public:
-            DeviceEffectManagerCallback(DeviceEffectManager *manager)
-                : mManager(*manager) {}
+    explicit DeviceEffectManagerCallback(DeviceEffectManager& manager)
+        : mManager(manager) {}
 
     status_t createEffectHal(const effect_uuid_t *pEffectUuid,
            int32_t sessionId, int32_t deviceId,
@@ -176,10 +105,10 @@
     size_t    frameCount() const override  { return 0; }
     uint32_t  latency() const override  { return 0; }
 
-    status_t addEffectToHal(sp<EffectHalInterface> effect __unused) override {
+    status_t addEffectToHal(const sp<EffectHalInterface>& /* effect */) override {
         return NO_ERROR;
     }
-    status_t removeEffectFromHal(sp<EffectHalInterface> effect __unused) override {
+    status_t removeEffectFromHal(const sp<EffectHalInterface>& /* effect */) override {
         return NO_ERROR;
     }
 
@@ -204,11 +133,11 @@
     int newEffectId() { return mManager.audioFlinger().nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT); }
 
     status_t addEffectToHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+            audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
         return mManager.addEffectToHal(deviceId, hwModuleId, effect);
     }
     status_t removeEffectFromHal(audio_port_handle_t deviceId,
-            audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+            audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
         return mManager.removeEffectFromHal(deviceId, hwModuleId, effect);
     }
 private:
diff --git a/services/audioflinger/EffectConfiguration.h b/services/audioflinger/EffectConfiguration.h
new file mode 100644
index 0000000..2f07fa2
--- /dev/null
+++ b/services/audioflinger/EffectConfiguration.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+
+namespace android::audioflinger {
+
+/**
+ * Effect Configuration abstraction and helper class.
+ */
+class EffectConfiguration {
+public:
+    static bool isHidl() {
+        static const bool isHidl = getAudioHalVersionInfo().isHidl();
+        return isHidl;
+    }
+
+    static const sp<EffectsFactoryHalInterface>& getEffectsFactoryHal() {
+        static const auto effectsFactoryHal = EffectsFactoryHalInterface::create();
+        return effectsFactoryHal;
+    }
+
+    static const detail::AudioHalVersionInfo& getAudioHalVersionInfo() {
+        static const auto audioHalVersionInfo = getEffectsFactoryHal() ?
+                getEffectsFactoryHal()->getHalVersion() : detail::AudioHalVersionInfo{
+                        detail::AudioHalVersionInfo::Type::HIDL, 0 /* major */, 0 /* minor */ };
+        return audioHalVersionInfo;
+    }
+};
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index e6d7cf7..77aa804 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -44,6 +44,7 @@
 #include <mediautils/TimeCheck.h>
 
 #include "AudioFlinger.h"
+#include "EffectConfiguration.h"
 
 // ----------------------------------------------------------------------------
 
@@ -65,6 +66,7 @@
 namespace android {
 
 using aidl_utils::statusTFromBinderStatus;
+using audioflinger::EffectConfiguration;
 using binder::Status;
 
 namespace {
@@ -278,8 +280,8 @@
         if (!doRegister && !(registered && doEnable)) {
             return NO_ERROR;
         }
-        mPolicyLock.lock();
     }
+    mPolicyLock.lock();
     ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
         __func__, mDescriptor.name, mId, mSessionId, doRegister, registered, doEnable, enabled);
     if (doRegister) {
@@ -498,6 +500,7 @@
 }
 
 void AudioFlinger::EffectBase::dump(int fd, const Vector<String16>& args __unused)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
 {
     String8 result;
 
@@ -569,7 +572,7 @@
       mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
       mDisableWaitCnt(0),    // set by process() and updateState()
       mOffloaded(false),
-      mAddedToHal(false)
+      mIsOutput(false)
 #ifdef FLOAT_EFFECT_CHAIN
       , mSupportsFloat(false)
 #endif
@@ -962,6 +965,7 @@
     mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
     mConfig.inputCfg.buffer.frameCount = callback->frameCount();
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
+    mIsOutput = callback->isOutput();
 
     ALOGV("configure() %p chain %p buffer %p framecount %zu",
           this, callback->chain().promote().get(),
@@ -980,7 +984,8 @@
 
 #ifdef MULTICHANNEL_EFFECT_CHAIN
     if (status != NO_ERROR &&
-            callback->isOutput() &&
+            EffectConfiguration::isHidl() && // only HIDL effects support channel conversion
+            mIsOutput &&
             (mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
                     || mConfig.outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO)) {
         // Older effects may require exact STEREO position mask.
@@ -1010,7 +1015,8 @@
         mSupportsFloat = true;
     }
 
-    if (status != NO_ERROR) {
+    // only HIDL effects support integer conversion.
+    if (status != NO_ERROR && EffectConfiguration::isHidl()) {
         ALOGV("EFFECT_CMD_SET_CONFIG failed with float format, retry with int16_t.");
         mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
         mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -1056,6 +1062,13 @@
                     &size,
                     &cmdStatus);
         }
+
+        if (isVolumeControl()) {
+            // Force initializing the volume as 0 for volume control effect for safer ramping
+            uint32_t left = 0;
+            uint32_t right = 0;
+            setVolumeInternal(&left, &right, true /*controller*/);
+        }
     }
 
     // mConfig.outputCfg.buffer.frameCount cannot be zero.
@@ -1094,12 +1107,12 @@
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
          (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
-        if (mAddedToHal) {
+        if (mCurrentHalStream == getCallback()->io()) {
             return;
         }
 
         (void)getCallback()->addEffectToHal(mEffectInterface);
-        mAddedToHal = true;
+        mCurrentHalStream = getCallback()->io();
     }
 }
 
@@ -1195,12 +1208,11 @@
 {
     if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
              (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
-        if (!mAddedToHal) {
-            return NO_ERROR;
+        if (mCurrentHalStream != getCallback()->io()) {
+            return (mCurrentHalStream == AUDIO_IO_HANDLE_NONE) ? NO_ERROR : INVALID_OPERATION;
         }
-
         getCallback()->removeEffectFromHal(mEffectInterface);
-        mAddedToHal = false;
+        mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
     }
     return NO_ERROR;
 }
@@ -1240,13 +1252,13 @@
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (maxReplySize < sizeof(effect_param_t) ||
+            (maxReplySize < static_cast<signed>(sizeof(effect_param_t)) ||
                    param->psize > maxReplySize - sizeof(effect_param_t))) {
         android_errorWriteLog(0x534e4554, "29251553");
         return -EINVAL;
     }
     if (cmdCode == EFFECT_CMD_GET_PARAM &&
-            (sizeof(effect_param_t) > maxReplySize
+            (static_cast<signed>(sizeof(effect_param_t)) > maxReplySize
                     || param->psize > maxReplySize - sizeof(effect_param_t)
                     || param->vsize > maxReplySize - sizeof(effect_param_t)
                             - param->psize
@@ -1429,23 +1441,24 @@
             ((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_MONITOR)) {
-        uint32_t volume[2];
-        uint32_t *pVolume = NULL;
-        uint32_t size = sizeof(volume);
-        volume[0] = *left;
-        volume[1] = *right;
-        if (controller) {
-            pVolume = volume;
-        }
-        status = mEffectInterface->command(EFFECT_CMD_SET_VOLUME,
-                                           size,
-                                           volume,
-                                           &size,
-                                           pVolume);
-        if (controller && status == NO_ERROR && size == sizeof(volume)) {
-            *left = volume[0];
-            *right = volume[1];
-        }
+        status = setVolumeInternal(left, right, controller);
+    }
+    return status;
+}
+
+status_t AudioFlinger::EffectModule::setVolumeInternal(
+        uint32_t *left, uint32_t *right, bool controller) {
+    uint32_t volume[2] = {*left, *right};
+    uint32_t *pVolume = controller ? volume : nullptr;
+    uint32_t size = sizeof(volume);
+    status_t status = mEffectInterface->command(EFFECT_CMD_SET_VOLUME,
+                                                size,
+                                                volume,
+                                                &size,
+                                                pVolume);
+    if (controller && status == NO_ERROR && size == sizeof(volume)) {
+        *left = volume[0];
+        *right = volume[1];
     }
     return status;
 }
@@ -1587,7 +1600,7 @@
     return isHapticGenerator(&mDescriptor.type);
 }
 
-status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
 {
     if (mStatus != NO_ERROR) {
         return mStatus;
@@ -1603,7 +1616,7 @@
     param->vsize = sizeof(int32_t) * 2;
     *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
     *((int32_t*)param->data + 1) = id;
-    *((int32_t*)param->data + 2) = intensity;
+    *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1643,6 +1656,22 @@
     return status;
 }
 
+status_t AudioFlinger::EffectModule::getConfigs(
+        audio_config_base_t* inputCfg, audio_config_base_t* outputCfg, bool* isOutput) const {
+    Mutex::Autolock _l(mLock);
+    if (mConfig.inputCfg.mask == 0 || mConfig.outputCfg.mask == 0) {
+        return NO_INIT;
+    }
+    inputCfg->sample_rate = mConfig.inputCfg.samplingRate;
+    inputCfg->channel_mask = static_cast<audio_channel_mask_t>(mConfig.inputCfg.channels);
+    inputCfg->format = static_cast<audio_format_t>(mConfig.inputCfg.format);
+    outputCfg->sample_rate = mConfig.outputCfg.samplingRate;
+    outputCfg->channel_mask = static_cast<audio_channel_mask_t>(mConfig.outputCfg.channels);
+    outputCfg->format = static_cast<audio_format_t>(mConfig.outputCfg.format);
+    *isOutput = mIsOutput;
+    return NO_ERROR;
+}
+
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
@@ -1659,6 +1688,7 @@
 }
 
 void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     EffectBase::dump(fd, args);
 
@@ -1730,12 +1760,20 @@
     mNotifyFramesProcessed(notifyFramesProcessed)
 {
     ALOGV("constructor %p client %p", this, client.get());
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 
     if (client == 0) {
         return;
     }
     int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
-    mCblkMemory = client->heap()->allocate(EFFECT_PARAM_BUFFER_SIZE + bufOffset);
+    mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{
+            {static_cast<size_t>(EFFECT_PARAM_BUFFER_SIZE + bufOffset)},
+            std::string("Effect ID: ")
+                    .append(std::to_string(effect->id()))
+                    .append(" Session ID: ")
+                    .append(std::to_string(static_cast<int>(effect->sessionId())))
+                    .append(" \n")
+            });
     if (mCblkMemory == 0 ||
             (mCblk = static_cast<effect_param_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
         ALOGE("not enough memory for Effect size=%zu", EFFECT_PARAM_BUFFER_SIZE +
@@ -1760,6 +1798,7 @@
 BINDER_METHOD_ENTRY(command) \
 BINDER_METHOD_ENTRY(disconnect) \
 BINDER_METHOD_ENTRY(getCblk) \
+BINDER_METHOD_ENTRY(getConfig) \
 
 // singleton for Binder Method Statistics for IEffect
 mediautils::MethodStatistics<int>& getIEffectStatistics() {
@@ -1790,7 +1829,7 @@
         } else {
             getIEffectStatistics().event(code, elapsedMs);
         }
-    }, 0 /* timeoutMs */);
+    }, {} /* timeoutDuration */, {} /* secondChanceDuration */, false /* crashOnTimeout */);
     return BnEffect::onTransact(code, data, reply, flags);
 }
 
@@ -1803,6 +1842,13 @@
   *_aidl_return = (code); \
   return Status::ok();
 
+#define VALUE_OR_RETURN_STATUS_AS_OUT(exp)              \
+    ({                                                  \
+        auto _tmp = (exp);                              \
+        if (!_tmp.ok()) { RETURN(_tmp.error()); }       \
+        std::move(_tmp.value());                        \
+    })
+
 Status AudioFlinger::EffectHandle::enable(int32_t* _aidl_return)
 {
     AutoMutex _l(mLock);
@@ -1904,7 +1950,7 @@
         }
         mCblkMemory.clear();    // free the shared memory before releasing the heap it belongs to
         // Client destructor must run with AudioFlinger client mutex locked
-        Mutex::Autolock _l(mClient->audioFlinger()->mClientLock);
+        Mutex::Autolock _l2(mClient->audioFlinger()->mClientLock);
         mClient.clear();
     }
 }
@@ -1914,6 +1960,32 @@
     return Status::ok();
 }
 
+Status AudioFlinger::EffectHandle::getConfig(
+        media::EffectConfig* _config, int32_t* _aidl_return) {
+    AutoMutex _l(mLock);
+    sp<EffectBase> effect = mEffect.promote();
+    if (effect == nullptr || mDisconnected) {
+        RETURN(DEAD_OBJECT);
+    }
+    sp<EffectModule> effectModule = effect->asEffectModule();
+    if (effectModule == nullptr) {
+        RETURN(INVALID_OPERATION);
+    }
+    audio_config_base_t inputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
+    audio_config_base_t outputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
+    bool isOutput;
+    status_t status = effectModule->getConfigs(&inputCfg, &outputCfg, &isOutput);
+    if (status == NO_ERROR) {
+        constexpr bool isInput = false; // effects always use 'OUT' channel masks.
+        _config->inputCfg = VALUE_OR_RETURN_STATUS_AS_OUT(
+                legacy2aidl_audio_config_base_t_AudioConfigBase(inputCfg, isInput));
+        _config->outputCfg = VALUE_OR_RETURN_STATUS_AS_OUT(
+                legacy2aidl_audio_config_base_t_AudioConfigBase(outputCfg, isInput));
+        _config->isOnInputStream = !isOutput;
+    }
+    RETURN(status);
+}
+
 Status AudioFlinger::EffectHandle::command(int32_t cmdCode,
                        const std::vector<uint8_t>& cmdData,
                        int32_t maxResponseSize,
@@ -1942,14 +2014,14 @@
     }
 
     if (cmdCode == EFFECT_CMD_ENABLE) {
-        if (maxResponseSize < sizeof(int)) {
+        if (maxResponseSize < static_cast<signed>(sizeof(int))) {
             android_errorWriteLog(0x534e4554, "32095713");
             RETURN(BAD_VALUE);
         }
         writeToBuffer(NO_ERROR, response);
         return enable(_aidl_return);
     } else if (cmdCode == EFFECT_CMD_DISABLE) {
-        if (maxResponseSize < sizeof(int)) {
+        if (maxResponseSize < static_cast<signed>(sizeof(int))) {
             android_errorWriteLog(0x534e4554, "32095713");
             RETURN(BAD_VALUE);
         }
@@ -1973,7 +2045,7 @@
             RETURN(INVALID_OPERATION);
         }
 
-        if (maxResponseSize < sizeof(int)) {
+        if (maxResponseSize < (signed)sizeof(int)) {
             android_errorWriteLog(0x534e4554, "32095713");
             RETURN(BAD_VALUE);
         }
@@ -1982,7 +2054,7 @@
         // No need to trylock() here as this function is executed in the binder thread serving a
         // particular client process:  no risk to block the whole media server process or mixer
         // threads if we are stuck here
-        Mutex::Autolock _l(mCblk->lock);
+        Mutex::Autolock _l2(mCblk->lock);
         // keep local copy of index in case of client corruption b/32220769
         const uint32_t clientIndex = mCblk->clientIndex;
         const uint32_t serverIndex = mCblk->serverIndex;
@@ -2085,6 +2157,7 @@
 }
 
 void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
 
@@ -2339,7 +2412,7 @@
             }
         } else {
             effect->setInBuffer(mInBuffer);
-            if (idx_insert == previousSize) {
+            if (idx_insert == static_cast<ssize_t>(previousSize)) {
                 if (idx_insert != 0) {
                     mEffects[idx_insert-1]->configure();
                     mEffects[idx_insert-1]->setOutBuffer(mInBuffer);
@@ -2399,7 +2472,7 @@
             }
             // remember position of first insert effect and by default
             // select this as insert position for new effect
-            if (idx_insert == size) {
+            if (idx_insert == static_cast<ssize_t>(size)) {
                 idx_insert = i;
             }
             // remember position of last insert effect claiming
@@ -2609,7 +2682,7 @@
     return false;
 }
 
-void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
 {
     Mutex::Autolock _l(mLock);
     for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2629,6 +2702,7 @@
 }
 
 void AudioFlinger::EffectChain::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     String8 result;
 
@@ -2893,6 +2967,9 @@
     if ((*flags & AUDIO_OUTPUT_FLAG_FAST) != 0 && !isFastCompatible()) {
         *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_FAST);
     }
+    if ((*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != 0 && !isBitPerfectCompatible()) {
+        *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+    }
 }
 
 void AudioFlinger::EffectChain::checkInputFlagCompatibility(audio_input_flags_t *flags) const
@@ -2930,6 +3007,18 @@
     return true;
 }
 
+bool AudioFlinger::EffectChain::isBitPerfectCompatible() const {
+    Mutex::Autolock _l(mLock);
+    for (const auto &effect : mEffects) {
+        if (effect->isProcessImplemented()
+                && effect->isImplementationSoftware()) {
+            return false;
+        }
+    }
+    // Allow effects without processing or hw accelerated effects.
+    return true;
+}
+
 // isCompatibleWithThread_l() must be called with thread->mLock held
 bool AudioFlinger::EffectChain::isCompatibleWithThread_l(const sp<ThreadBase>& thread) const
 {
@@ -2947,7 +3036,8 @@
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
         sp<EffectHalInterface> *effect) {
     status_t status = NO_INIT;
-    sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+    const sp<EffectsFactoryHalInterface> effectsFactory =
+            EffectConfiguration::getEffectsFactoryHal();
     if (effectsFactory != 0) {
         status = effectsFactory->createEffect(pEffectUuid, sessionId, io(), deviceId, effect);
     }
@@ -2966,7 +3056,7 @@
 }
 
 status_t AudioFlinger::EffectChain::EffectCallback::addEffectToHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     status_t result = NO_INIT;
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
@@ -2982,7 +3072,7 @@
 }
 
 status_t AudioFlinger::EffectChain::EffectCallback::removeEffectFromHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     status_t result = NO_INIT;
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
@@ -3124,15 +3214,20 @@
     return t->frameCount();
 }
 
-uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const {
+uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const
+NO_THREAD_SAFETY_ANALYSIS  // latency_l() access
+{
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return 0;
     }
+    // TODO(b/275956781) - this requires the thread lock.
     return t->latency_l();
 }
 
-void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const {
+void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const
+NO_THREAD_SAFETY_ANALYSIS  // setVolumeForOutput_l() access
+{
     sp<ThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
@@ -3285,8 +3380,18 @@
     ALOGV("%s type %d device type %d address %s device ID %d patch.isSoftware() %d",
             __func__, port->type, port->ext.device.type,
             port->ext.device.address, port->id, patch.isSoftware());
-    if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType
-        || port->ext.device.address != mDevice.address()) {
+    if (port->type != AUDIO_PORT_TYPE_DEVICE || port->ext.device.type != mDevice.mType ||
+        port->ext.device.address != mDevice.address()) {
+        return NAME_NOT_FOUND;
+    }
+    if (((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) &&
+        (audio_port_config_has_input_direction(port))) {
+        ALOGI("%s don't create postprocessing effect on record port", __func__);
+        return NAME_NOT_FOUND;
+    }
+    if (((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC) &&
+        (!audio_port_config_has_input_direction(port))) {
+        ALOGI("%s don't create preprocessing effect on playback port", __func__);
         return NAME_NOT_FOUND;
     }
     status_t status = NAME_NOT_FOUND;
@@ -3338,6 +3443,7 @@
     } else {
         status = BAD_VALUE;
     }
+
     if (status == NO_ERROR || status == ALREADY_EXISTS) {
         Status bs;
         if (isEnabled()) {
@@ -3376,7 +3482,7 @@
 }
 
 status_t AudioFlinger::DeviceEffectProxy::addEffectToHal(
-    sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     if (mHalEffect == nullptr) {
         return NO_INIT;
     }
@@ -3385,7 +3491,7 @@
 }
 
 status_t AudioFlinger::DeviceEffectProxy::removeEffectFromHal(
-    sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     if (mHalEffect == nullptr) {
         return NO_INIT;
     }
@@ -3423,7 +3529,9 @@
     return audio_channel_count_from_in_mask(channelMask());
 }
 
-void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces) {
+void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
+{
     const Vector<String16> args;
     EffectBase::dump(fd, args);
 
@@ -3502,7 +3610,7 @@
 }
 
 status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::addEffectToHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return NO_INIT;
@@ -3511,7 +3619,7 @@
 }
 
 status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::removeEffectFromHal(
-        sp<EffectHalInterface> effect) {
+        const sp<EffectHalInterface>& effect) {
     sp<DeviceEffectProxy> proxy = mProxy.promote();
     if (proxy == nullptr) {
         return NO_INIT;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 42614cc..885d3e5 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -45,8 +45,8 @@
     // Non trivial methods usually implemented with help from ThreadBase:
     //   pay attention to mutex locking order
     virtual uint32_t latency() const { return 0; }
-    virtual status_t addEffectToHal(sp<EffectHalInterface> effect) = 0;
-    virtual status_t removeEffectFromHal(sp<EffectHalInterface> effect) = 0;
+    virtual status_t addEffectToHal(const sp<EffectHalInterface>& effect) = 0;
+    virtual status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) = 0;
     virtual void setVolumeForOutput(float left, float right) const = 0;
     virtual bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) = 0;
     virtual void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
@@ -159,8 +159,8 @@
     bool             isPinned() const { return mPinned; }
     void             unPin() { mPinned = false; }
 
-    void             lock() { mLock.lock(); }
-    void             unlock() { mLock.unlock(); }
+    void             lock() ACQUIRE(mLock) { mLock.lock(); }
+    void             unlock() RELEASE(mLock) { mLock.unlock(); }
 
     status_t         updatePolicyState();
 
@@ -280,9 +280,13 @@
     static bool      isHapticGenerator(const effect_uuid_t* type);
     bool             isHapticGenerator() const;
 
-    status_t         setHapticIntensity(int id, int intensity);
+    status_t         setHapticIntensity(int id, os::HapticScale intensity);
     status_t         setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo);
 
+    status_t         getConfigs(audio_config_base_t* inputCfg,
+                                audio_config_base_t* outputCfg,
+                                bool* isOutput) const;
+
     void             dump(int fd, const Vector<String16>& args);
 
 private:
@@ -302,6 +306,8 @@
                 ? EFFECT_BUFFER_ACCESS_WRITE : EFFECT_BUFFER_ACCESS_ACCUMULATE;
     }
 
+    status_t setVolumeInternal(uint32_t *left, uint32_t *right, bool controller);
+
 
     effect_config_t     mConfig;    // input and output audio configuration
     sp<EffectHalInterface> mEffectInterface; // Effect module HAL
@@ -313,7 +319,9 @@
                                     // sending disable command.
     uint32_t mDisableWaitCnt;       // current process() calls count during disable period.
     bool     mOffloaded;            // effect is currently offloaded to the audio DSP
-    bool     mAddedToHal;           // effect has been added to the audio HAL
+    // effect has been added to this HAL input stream
+    audio_io_handle_t mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
+    bool     mIsOutput;             // direction of the AF thread
 
 #ifdef FLOAT_EFFECT_CHAIN
     bool    mSupportsFloat;         // effect supports float processing
@@ -370,6 +378,8 @@
                                     int32_t* _aidl_return) override;
     android::binder::Status disconnect() override;
     android::binder::Status getCblk(media::SharedFileRegion* _aidl_return) override;
+    android::binder::Status getConfig(media::EffectConfig* _config,
+                                      int32_t* _aidl_return) override;
 
     sp<Client> client() const { return mClient; }
 
@@ -450,10 +460,10 @@
 
     void process_l();
 
-    void lock() {
+    void lock() ACQUIRE(mLock) {
         mLock.lock();
     }
-    void unlock() {
+    void unlock() RELEASE(mLock) {
         mLock.unlock();
     }
 
@@ -536,12 +546,15 @@
     // Is this EffectChain compatible with the FAST audio flag.
     bool isFastCompatible() const;
 
+    // Is this EffectChain compatible with the bit-perfect audio flag.
+    bool isBitPerfectCompatible() const;
+
     // isCompatibleWithThread_l() must be called with thread->mLock held
     bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
 
     bool containsHapticGeneratingEffect_l();
 
-    void setHapticIntensity_l(int id, int intensity);
+    void setHapticIntensity_l(int id, os::HapticScale intensity);
 
     sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
     wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
@@ -596,8 +609,8 @@
         size_t frameCount() const override;
         uint32_t latency() const override;
 
-        status_t addEffectToHal(sp<EffectHalInterface> effect) override;
-        status_t removeEffectFromHal(sp<EffectHalInterface> effect) override;
+        status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
+        status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
         bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
         void setVolumeForOutput(float left, float right) const override;
 
@@ -712,8 +725,8 @@
 
     size_t removeEffect(const sp<EffectModule>& effect);
 
-    status_t addEffectToHal(sp<EffectHalInterface> effect);
-    status_t removeEffectFromHal(sp<EffectHalInterface> effect);
+    status_t addEffectToHal(const sp<EffectHalInterface>& effect);
+    status_t removeEffectFromHal(const sp<EffectHalInterface>& effect);
 
     const AudioDeviceTypeAddr& device() { return mDevice; };
     bool isOutput() const;
@@ -757,8 +770,8 @@
         size_t frameCount() const override  { return 0; }
         uint32_t latency() const override  { return 0; }
 
-        status_t addEffectToHal(sp<EffectHalInterface> effect) override;
-        status_t removeEffectFromHal(sp<EffectHalInterface> effect) override;
+        status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
+        status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
 
         bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
         void setVolumeForOutput(float left __unused, float right __unused) const override {}
diff --git a/services/audioflinger/FastCaptureDumpState.cpp b/services/audioflinger/FastCaptureDumpState.cpp
index b8b3866..243dfa5 100644
--- a/services/audioflinger/FastCaptureDumpState.cpp
+++ b/services/audioflinger/FastCaptureDumpState.cpp
@@ -51,4 +51,4 @@
                 periodSec * 1e3, mSilenced ? "true" : "false");
 }
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastCaptureDumpState.h b/services/audioflinger/FastCaptureDumpState.h
index a1b8706..34ce456 100644
--- a/services/audioflinger/FastCaptureDumpState.h
+++ b/services/audioflinger/FastCaptureDumpState.h
@@ -38,6 +38,6 @@
     bool     mSilenced = false; // capture is silenced
 };
 
-}   // android
+}  // namespace android
 
 #endif  // ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H
diff --git a/services/audioflinger/FastCaptureState.cpp b/services/audioflinger/FastCaptureState.cpp
index c4d5e45..918ba9c 100644
--- a/services/audioflinger/FastCaptureState.cpp
+++ b/services/audioflinger/FastCaptureState.cpp
@@ -42,4 +42,4 @@
     LOG_ALWAYS_FATAL("%s", __func__);
 }
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 26bd92d..61dd3f2 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -79,8 +79,6 @@
     mMasterMono(false),
     mThreadIoHandle(parentIoHandle)
 {
-    (void)mThreadIoHandle; // prevent unused warning, see C++17 [[maybe_unused]]
-
     // FIXME pass sInitial as parameter to base class constructor, and make it static local
     mPrevious = &sInitial;
     mCurrent = &sInitial;
diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/FastMixer.h
index 97ab635..d71519f 100644
--- a/services/audioflinger/FastMixer.h
+++ b/services/audioflinger/FastMixer.h
@@ -107,7 +107,8 @@
     std::atomic<float> mMasterBalance{};
     std::atomic_int_fast64_t mBoottimeOffset;
 
-    const audio_io_handle_t mThreadIoHandle; // parent thread id for debugging purposes
+    // parent thread id for debugging purposes
+    [[maybe_unused]] const audio_io_handle_t mThreadIoHandle;
 #ifdef TEE_SINK
     NBAIO_Tee       mTee;
 #endif
diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/FastMixerDumpState.cpp
index 3f20282..d041882 100644
--- a/services/audioflinger/FastMixerDumpState.cpp
+++ b/services/audioflinger/FastMixerDumpState.cpp
@@ -203,4 +203,4 @@
     }
 }
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastMixerDumpState.h b/services/audioflinger/FastMixerDumpState.h
index 9b91cbc..294ef78 100644
--- a/services/audioflinger/FastMixerDumpState.h
+++ b/services/audioflinger/FastMixerDumpState.h
@@ -81,6 +81,6 @@
     TimestampVerifier<int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
 };
 
-}   // android
+}  // namespace android
 
 #endif  // ANDROID_AUDIO_FAST_MIXER_DUMP_STATE_H
diff --git a/services/audioflinger/FastThread.h b/services/audioflinger/FastThread.h
index 3f6b206..2f0f73f 100644
--- a/services/audioflinger/FastThread.h
+++ b/services/audioflinger/FastThread.h
@@ -92,6 +92,6 @@
 
 };  // class FastThread
 
-}   // android
+}  // namespace android
 
 #endif  // ANDROID_AUDIO_FAST_THREAD_H
diff --git a/services/audioflinger/FastThreadDumpState.cpp b/services/audioflinger/FastThreadDumpState.cpp
index 964a725..e91073f 100644
--- a/services/audioflinger/FastThreadDumpState.cpp
+++ b/services/audioflinger/FastThreadDumpState.cpp
@@ -56,4 +56,4 @@
 }
 #endif
 
-}   // android
+}  // namespace android
diff --git a/services/audioflinger/FastThreadDumpState.h b/services/audioflinger/FastThreadDumpState.h
index 1ce0914..0b20e55 100644
--- a/services/audioflinger/FastThreadDumpState.h
+++ b/services/audioflinger/FastThreadDumpState.h
@@ -67,6 +67,6 @@
 
 };  // struct FastThreadDumpState
 
-}   // android
+}  // namespace android
 
 #endif  // ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H
diff --git a/services/audioflinger/FastThreadState.h b/services/audioflinger/FastThreadState.h
index 54c0dc6..9fb4e06 100644
--- a/services/audioflinger/FastThreadState.h
+++ b/services/audioflinger/FastThreadState.h
@@ -50,6 +50,6 @@
     static const char *commandToString(Command command);
 };  // struct FastThreadState
 
-}   // android
+}  // namespace android
 
 #endif  // ANDROID_AUDIO_FAST_THREAD_STATE_H
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
new file mode 100644
index 0000000..39f772b
--- /dev/null
+++ b/services/audioflinger/MelReporter.cpp
@@ -0,0 +1,296 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "AudioFlinger::MelReporter"
+
+#include "AudioFlinger.h"
+
+#include <android/media/ISoundDoseCallback.h>
+#include <audio_utils/power.h>
+#include <utils/Log.h>
+
+using aidl::android::hardware::audio::core::sounddose::ISoundDose;
+
+namespace android {
+
+bool AudioFlinger::MelReporter::activateHalSoundDoseComputation(const std::string& module,
+        const sp<DeviceHalInterface>& device) {
+    if (mSoundDoseManager->forceUseFrameworkMel()) {
+        ALOGD("%s: Forcing use of internal MEL computation.", __func__);
+        activateInternalSoundDoseComputation();
+        return false;
+    }
+
+    ndk::SpAIBinder soundDoseBinder;
+    if (device->getSoundDoseInterface(module, &soundDoseBinder) != OK) {
+        ALOGW("%s: HAL cannot provide sound dose interface for module %s, use internal MEL",
+              __func__, module.c_str());
+        activateInternalSoundDoseComputation();
+        return false;
+    }
+
+    if (soundDoseBinder == nullptr) {
+         ALOGW("%s: HAL doesn't implement a sound dose interface for module %s, use internal MEL",
+              __func__, module.c_str());
+        activateInternalSoundDoseComputation();
+        return false;
+    }
+
+    std::shared_ptr<ISoundDose> soundDoseInterface = ISoundDose::fromBinder(soundDoseBinder);
+
+    if (!mSoundDoseManager->setHalSoundDoseInterface(soundDoseInterface)) {
+        ALOGW("%s: cannot activate HAL MEL reporting for module %s", __func__, module.c_str());
+        activateInternalSoundDoseComputation();
+        return false;
+    }
+
+    stopInternalMelComputation();
+    return true;
+}
+
+void AudioFlinger::MelReporter::activateInternalSoundDoseComputation() {
+    {
+        std::lock_guard _l(mLock);
+        if (!mUseHalSoundDoseInterface) {
+            // no need to start internal MEL on active patches
+            return;
+        }
+        mUseHalSoundDoseInterface = false;
+    }
+
+    mSoundDoseManager->setHalSoundDoseInterface(nullptr);
+}
+
+void AudioFlinger::MelReporter::onFirstRef() {
+    mAudioFlinger.mPatchCommandThread->addListener(this);
+}
+
+bool AudioFlinger::MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return false;
+    }
+    if (mSoundDoseManager->forceComputeCsdOnAllDevices()) {
+        return true;
+    }
+
+    switch (device) {
+        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
+        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+        case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
+            return true;
+        default:
+            return false;
+    }
+}
+
+void AudioFlinger::MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
+        const std::vector<playback_track_metadata_v7_t>& metadataVec) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return;
+    }
+
+    std::lock_guard _laf(mAudioFlinger.mLock);
+    std::lock_guard _l(mLock);
+    auto activeMelPatchId = activePatchStreamHandle_l(streamHandle);
+    if (!activeMelPatchId) {
+        ALOGV("%s stream handle %d does not have an active patch", __func__, streamHandle);
+        return;
+    }
+
+    bool shouldActivateCsd = false;
+    for (const auto& metadata : metadataVec) {
+        if (metadata.base.usage == AUDIO_USAGE_GAME || metadata.base.usage == AUDIO_USAGE_MEDIA) {
+            shouldActivateCsd = true;
+        }
+    }
+
+    auto activeMelPatchIt = mActiveMelPatches.find(activeMelPatchId.value());
+    if (activeMelPatchIt != mActiveMelPatches.end()
+        && shouldActivateCsd != activeMelPatchIt->second.csdActive) {
+        if (activeMelPatchIt->second.csdActive) {
+            ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
+            stopMelComputationForPatch_l(activeMelPatchIt->second);
+        } else {
+            ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
+            startMelComputationForActivePatch_l(activeMelPatchIt->second);
+        }
+        activeMelPatchIt->second.csdActive = shouldActivateCsd;
+    }
+}
+
+void AudioFlinger::MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
+        const PatchPanel::Patch& patch) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return;
+    }
+
+    ALOGV("%s: handle %d mHalHandle %d device sink %08x",
+            __func__, handle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+    if (patch.mAudioPatch.num_sources == 0
+        || patch.mAudioPatch.sources[0].type != AUDIO_PORT_TYPE_MIX) {
+        ALOGV("%s: patch does not contain any mix sources", __func__);
+        return;
+    }
+
+    audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
+    ActiveMelPatch newPatch;
+    newPatch.streamHandle = streamHandle;
+    for (size_t i = 0; i < patch.mAudioPatch.num_sinks; ++i) {
+        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
+            && shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
+            audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
+            newPatch.deviceHandles.push_back(deviceId);
+            AudioDeviceTypeAddr adt{patch.mAudioPatch.sinks[i].ext.device.type,
+                                    patch.mAudioPatch.sinks[i].ext.device.address};
+            mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
+        }
+    }
+
+    if (!newPatch.deviceHandles.empty()) {
+        std::lock_guard _afl(mAudioFlinger.mLock);
+        std::lock_guard _l(mLock);
+        ALOGV("%s add patch handle %d to active devices", __func__, handle);
+        startMelComputationForActivePatch_l(newPatch);
+        newPatch.csdActive = true;
+        mActiveMelPatches[handle] = newPatch;
+    }
+}
+
+void AudioFlinger::MelReporter::startMelComputationForActivePatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS  // access of AudioFlinger::checkOutputThread_l
+{
+    auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
+    if (outputThread == nullptr) {
+        ALOGE("%s cannot find thread for stream handle %d", __func__, patch.streamHandle);
+        return;
+    }
+
+    for (const auto& deviceHandle : patch.deviceHandles) {
+        ++mActiveDevices[deviceHandle];
+        ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
+              patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
+
+        if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
+            outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
+                deviceHandle,
+                patch.streamHandle,
+                outputThread->mSampleRate,
+                outputThread->mChannelCount,
+                outputThread->mFormat));
+        }
+    }
+}
+
+void AudioFlinger::MelReporter::onReleaseAudioPatch(audio_patch_handle_t handle) {
+    if (!mSoundDoseManager->isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return;
+    }
+
+    ActiveMelPatch melPatch;
+    {
+        std::lock_guard _l(mLock);
+
+        auto patchIt = mActiveMelPatches.find(handle);
+        if (patchIt == mActiveMelPatches.end()) {
+            ALOGV("%s patch handle %d does not contain any mix sources with active MEL calculation",
+                    __func__, handle);
+            return;
+        }
+
+        melPatch = patchIt->second;
+        mActiveMelPatches.erase(patchIt);
+    }
+
+    std::lock_guard _afl(mAudioFlinger.mLock);
+    std::lock_guard _l(mLock);
+    stopMelComputationForPatch_l(melPatch);
+}
+
+sp<media::ISoundDose> AudioFlinger::MelReporter::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback>& callback) {
+    // no need to lock since getSoundDoseInterface is synchronized
+    return mSoundDoseManager->getSoundDoseInterface(callback);
+}
+
+void AudioFlinger::MelReporter::stopInternalMelComputation() {
+    ALOGV("%s", __func__);
+    std::lock_guard _l(mLock);
+    mActiveMelPatches.clear();
+    mUseHalSoundDoseInterface = true;
+}
+
+void AudioFlinger::MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS  // access of AudioFlinger::checkOutputThread_l
+{
+    if (!patch.csdActive) {
+        // no need to stop CSD inactive patches
+        return;
+    }
+
+    auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
+
+    ALOGV("%s: stop MEL for stream id: %d", __func__, patch.streamHandle);
+    for (const auto& deviceId : patch.deviceHandles) {
+        if (mActiveDevices[deviceId] > 0) {
+            --mActiveDevices[deviceId];
+            if (mActiveDevices[deviceId] == 0) {
+                // no stream is using deviceId anymore
+                ALOGI("%s removing device %d from active CSD devices", __func__, deviceId);
+                mSoundDoseManager->clearMapDeviceIdEntries(deviceId);
+            }
+        }
+    }
+
+    if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
+        outputThread->stopMelComputation_l();
+    }
+}
+
+
+std::optional<audio_patch_handle_t> AudioFlinger::MelReporter::activePatchStreamHandle_l(
+        audio_io_handle_t streamHandle) {
+    for(const auto& patchIt : mActiveMelPatches) {
+        if (patchIt.second.streamHandle == streamHandle) {
+            return patchIt.first;
+        }
+    }
+    return std::nullopt;
+}
+
+bool AudioFlinger::MelReporter::useHalSoundDoseInterface_l() {
+    return !mSoundDoseManager->forceUseFrameworkMel() & mUseHalSoundDoseInterface;
+}
+
+std::string AudioFlinger::MelReporter::dump() {
+    std::lock_guard _l(mLock);
+    std::string output("\nSound Dose:\n");
+    output.append(mSoundDoseManager->dump());
+    return output;
+}
+
+}  // namespace android
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
new file mode 100644
index 0000000..2bc33f2
--- /dev/null
+++ b/services/audioflinger/MelReporter.h
@@ -0,0 +1,117 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef INCLUDING_FROM_AUDIOFLINGER_H
+    #error This header file should only be included from AudioFlinger.h
+#endif
+
+#include <mutex>
+#include <sounddose/SoundDoseManager.h>
+#include <unordered_map>
+
+constexpr static int kMaxTimestampDeltaInSec = 120;
+
+/**
+ * Class for listening to new patches and starting the MEL computation. MelReporter is
+ * concealed within AudioFlinger, their lifetimes are the same.
+ */
+class MelReporter : public PatchCommandThread::PatchCommandListener {
+public:
+    explicit MelReporter(AudioFlinger& audioFlinger)
+        : mAudioFlinger(audioFlinger),
+          mSoundDoseManager(sp<SoundDoseManager>::make()) {}
+
+    void onFirstRef() override;
+
+    /**
+     * Activates the MEL reporting from the HAL sound dose interface. If the HAL
+     * does not support the sound dose interface for this module, the internal MEL
+     * calculation will be use.
+     *
+     * <p>If the device is using the audio AIDL HAL then this method will try to get the sound
+     * dose interface from IModule#getSoundDose(). Otherwise, if the legacy audio HIDL HAL is used
+     * this method will be looking for the standalone sound dose implementation. It falls back to
+     * the internal MEL computation if no valid sound dose interface can be retrieved.
+     *
+     * @return true if the MEL reporting will be done from any sound dose HAL interface
+     * implementation, false otherwise.
+     */
+    bool activateHalSoundDoseComputation(const std::string& module,
+                                         const sp<DeviceHalInterface>& device);
+
+    /**
+     * Activates the MEL reporting from internal framework values. These are used
+     * as a fallback when there is no sound dose interface implementation from HAL.
+     * Note: the internal CSD computation does not guarantee a certification with
+     * IEC62368-1 3rd edition or EN50332-3
+     */
+    void activateInternalSoundDoseComputation();
+
+    sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
+
+    std::string dump();
+
+    // PatchCommandListener methods
+    void onCreateAudioPatch(audio_patch_handle_t handle,
+                            const PatchPanel::Patch& patch) override;
+    void onReleaseAudioPatch(audio_patch_handle_t handle) override;
+
+    /**
+     * The new metadata can determine whether we should compute MEL for the given thread.
+     * This is the case only if one of the tracks in the thread mix is using MEDIA or GAME.
+     * Otherwise, this method will disable CSD.
+     **/
+    void updateMetadataForCsd(audio_io_handle_t streamHandle,
+                              const std::vector<playback_track_metadata_v7_t>& metadataVec);
+private:
+    struct ActiveMelPatch {
+        audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
+        std::vector<audio_port_handle_t> deviceHandles;
+        bool csdActive;
+    };
+
+    /** Returns true if we should compute MEL for the given device. */
+    bool shouldComputeMelForDeviceType(audio_devices_t device);
+
+    void stopInternalMelComputation();
+
+    /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
+    void stopMelComputationForPatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
+
+    /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
+    void startMelComputationForActivePatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
+
+    std::optional<audio_patch_handle_t>
+    activePatchStreamHandle_l(audio_io_handle_t streamHandle) REQUIRES(mLock);
+
+    bool useHalSoundDoseInterface_l() REQUIRES(mLock);
+
+    AudioFlinger& mAudioFlinger;  // does not own the object
+
+    sp<SoundDoseManager> mSoundDoseManager;
+
+    /**
+     * Lock for protecting the active mel patches. Do not mix with the AudioFlinger lock.
+     * Locking order AudioFlinger::mLock -> PatchCommandThread::mLock -> MelReporter::mLock.
+     */
+    std::mutex mLock;
+    std::unordered_map<audio_patch_handle_t, ActiveMelPatch>
+        mActiveMelPatches GUARDED_BY(AudioFlinger::MelReporter::mLock);
+    std::unordered_map<audio_port_handle_t, int>
+        mActiveDevices GUARDED_BY(AudioFlinger::MelReporter::mLock);
+    bool mUseHalSoundDoseInterface GUARDED_BY(AudioFlinger::MelReporter::mLock) = false;
+};
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index eb640bb..cb46c52 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -54,6 +54,14 @@
             bool        getAndSetSilencedNotified_l() { bool silencedNotified = mSilencedNotified;
                                                         mSilencedNotified = true;
                                                         return silencedNotified; }
+
+    /**
+     * Updates the mute state and notifies the audio service. Call this only when holding player
+     * thread lock.
+     */
+    void processMuteEvent_l(const sp<IAudioManager>& audioManager,
+                            mute_state_t muteState)
+                            REQUIRES(AudioFlinger::MmapPlaybackThread::mLock);
 private:
     friend class MmapThread;
 
@@ -71,5 +79,12 @@
     pid_t mPid;
     bool  mSilenced;            // protected by MMapThread::mLock
     bool  mSilencedNotified;    // protected by MMapThread::mLock
+
+    // TODO: replace PersistableBundle with own struct
+    // access these two variables only when holding player thread lock.
+    std::unique_ptr<os::PersistableBundle> mMuteEventExtras
+            GUARDED_BY(AudioFlinger::MmapPlaybackThread::mLock);
+    mute_state_t mMuteState
+            GUARDED_BY(AudioFlinger::MmapPlaybackThread::mLock);
 };  // end of Track
 
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
new file mode 100644
index 0000000..f4aab1f
--- /dev/null
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -0,0 +1,158 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "AudioFlinger::PatchCommandThread"
+//#define LOG_NDEBUG 0
+
+#include "AudioFlinger.h"
+
+namespace android {
+
+constexpr char kPatchCommandThreadName[] = "AudioFlinger_PatchCommandThread";
+
+AudioFlinger::PatchCommandThread::~PatchCommandThread() {
+    exit();
+
+    std::lock_guard _l(mLock);
+    mCommands.clear();
+}
+
+void AudioFlinger::PatchCommandThread::onFirstRef() {
+    run(kPatchCommandThreadName, ANDROID_PRIORITY_AUDIO);
+}
+
+void AudioFlinger::PatchCommandThread::addListener(const sp<PatchCommandListener>& listener) {
+    ALOGV("%s add listener %p", __func__, static_cast<void*>(listener.get()));
+    std::lock_guard _l(mListenerLock);
+    mListeners.emplace_back(listener);
+}
+
+void AudioFlinger::PatchCommandThread::createAudioPatch(audio_patch_handle_t handle,
+        const PatchPanel::Patch& patch) {
+    ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
+            __func__, handle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+
+    createAudioPatchCommand(handle, patch);
+}
+
+void AudioFlinger::PatchCommandThread::releaseAudioPatch(audio_patch_handle_t handle) {
+    ALOGV("%s", __func__);
+    releaseAudioPatchCommand(handle);
+}
+
+bool AudioFlinger::PatchCommandThread::threadLoop()
+NO_THREAD_SAFETY_ANALYSIS  // bug in clang compiler.
+{
+    std::unique_lock _l(mLock);
+
+    while (!exitPending()) {
+        while (!mCommands.empty() && !exitPending()) {
+            const sp<Command> command = mCommands.front();
+            mCommands.pop_front();
+            _l.unlock();
+
+            std::vector<wp<PatchCommandListener>> listenersCopy;
+            {
+                std::lock_guard _ll(mListenerLock);
+                listenersCopy = mListeners;
+            }
+
+            switch (command->mCommand) {
+                case CREATE_AUDIO_PATCH: {
+                    const auto data = (CreateAudioPatchData*) command->mData.get();
+                    ALOGV("%s processing create audio patch handle %d",
+                          __func__,
+                          data->mHandle);
+
+                    for (const auto& listener : listenersCopy) {
+                        auto spListener = listener.promote();
+                        if (spListener) {
+                            spListener->onCreateAudioPatch(data->mHandle, data->mPatch);
+                        }
+                    }
+                }
+                    break;
+                case RELEASE_AUDIO_PATCH: {
+                    const auto data = (ReleaseAudioPatchData*) command->mData.get();
+                    ALOGV("%s processing release audio patch handle %d",
+                          __func__,
+                          data->mHandle);
+
+                    for (const auto& listener : listenersCopy) {
+                        auto spListener = listener.promote();
+                        if (spListener) {
+                            spListener->onReleaseAudioPatch(data->mHandle);
+                        }
+                    }
+                }
+                    break;
+                default:
+                    ALOGW("%s unknown command %d", __func__, command->mCommand);
+                    break;
+            }
+            _l.lock();
+        }
+
+        // At this stage we have either an empty command queue or the first command in the queue
+        // has a finite delay. So unless we are exiting it is safe to wait.
+        if (!exitPending()) {
+            ALOGV("%s going to sleep", __func__);
+            mWaitWorkCV.wait(_l);
+        }
+    }
+    return false;
+}
+
+void AudioFlinger::PatchCommandThread::sendCommand(const sp<Command>& command) {
+    std::lock_guard _l(mLock);
+    mCommands.emplace_back(command);
+    mWaitWorkCV.notify_one();
+}
+
+void AudioFlinger::PatchCommandThread::createAudioPatchCommand(
+        audio_patch_handle_t handle, const PatchPanel::Patch& patch) {
+    auto command = sp<Command>::make(CREATE_AUDIO_PATCH,
+                                     new CreateAudioPatchData(handle, patch));
+    ALOGV("%s adding create patch handle %d mHalHandle %d.",
+          __func__,
+          handle,
+          patch.mHalHandle);
+    sendCommand(command);
+}
+
+void AudioFlinger::PatchCommandThread::releaseAudioPatchCommand(audio_patch_handle_t handle) {
+    sp<Command> command =
+        sp<Command>::make(RELEASE_AUDIO_PATCH, new ReleaseAudioPatchData(handle));
+    ALOGV("%s adding release patch", __func__);
+    sendCommand(command);
+}
+
+void AudioFlinger::PatchCommandThread::exit() {
+    ALOGV("%s", __func__);
+    {
+        std::lock_guard _l(mLock);
+        requestExit();
+        mWaitWorkCV.notify_one();
+    }
+    // Note that we can call it from the thread loop if all other references have been released
+    // but it will safely return WOULD_BLOCK in this case
+    requestExitAndWait();
+}
+
+}  // namespace android
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
new file mode 100644
index 0000000..b52e0a9
--- /dev/null
+++ b/services/audioflinger/PatchCommandThread.h
@@ -0,0 +1,102 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef INCLUDING_FROM_AUDIOFLINGER_H
+    #error This header file should only be included from AudioFlinger.h
+#endif
+
+class Command;
+
+// Thread to execute create and release patch commands asynchronously. This is needed because
+// PatchPanel::createAudioPatch and releaseAudioPatch are executed from audio policy service
+// with mutex locked and effect management requires to call back into audio policy service
+class PatchCommandThread : public Thread {
+public:
+
+    enum {
+        CREATE_AUDIO_PATCH,
+        RELEASE_AUDIO_PATCH,
+    };
+
+    class PatchCommandListener : public virtual RefBase {
+    public:
+        virtual void onCreateAudioPatch(audio_patch_handle_t handle,
+                                        const PatchPanel::Patch& patch) = 0;
+        virtual void onReleaseAudioPatch(audio_patch_handle_t handle) = 0;
+    };
+
+    PatchCommandThread() : Thread(false /* canCallJava */) {}
+    ~PatchCommandThread() override;
+
+    void addListener(const sp<PatchCommandListener>& listener);
+
+    void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
+    void releaseAudioPatch(audio_patch_handle_t handle);
+
+    // Thread virtuals
+    void onFirstRef() override;
+    bool threadLoop() override;
+
+    void exit();
+
+    void createAudioPatchCommand(audio_patch_handle_t handle,
+            const PatchPanel::Patch& patch);
+    void releaseAudioPatchCommand(audio_patch_handle_t handle);
+
+private:
+    class CommandData;
+
+    // Command type received from the PatchPanel
+    class Command: public RefBase {
+    public:
+        Command() = default;
+        Command(int command, const sp<CommandData>& data)
+            : mCommand(command), mData(data) {}
+
+        const int mCommand = -1;
+        const sp<CommandData> mData;
+    };
+
+    class CommandData: public RefBase {};
+
+    class CreateAudioPatchData : public CommandData {
+    public:
+        CreateAudioPatchData(audio_patch_handle_t handle, const PatchPanel::Patch& patch)
+            :   mHandle(handle), mPatch(patch) {}
+
+        const audio_patch_handle_t mHandle;
+        const PatchPanel::Patch mPatch;
+    };
+
+    class ReleaseAudioPatchData : public CommandData {
+    public:
+        explicit ReleaseAudioPatchData(audio_patch_handle_t handle)
+            :   mHandle(handle) {}
+
+        audio_patch_handle_t mHandle;
+    };
+
+    void sendCommand(const sp<Command>& command);
+
+    std::string mThreadName;
+    std::mutex mLock;
+    std::condition_variable mWaitWorkCV;
+    std::deque<sp<Command>> mCommands GUARDED_BY(mLock); // list of pending commands
+
+    std::mutex mListenerLock;
+    std::vector<wp<PatchCommandListener>> mListeners GUARDED_BY(mListenerLock);
+};
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 45dd258..d0feba5 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -135,6 +135,10 @@
 status_t AudioFlinger::PatchPanel::createAudioPatch(const struct audio_patch *patch,
                                    audio_patch_handle_t *handle,
                                    bool endpointPatch)
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendCreateAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the create patch request.
+ NO_THREAD_SAFETY_ANALYSIS
 {
     if (handle == NULL || patch == NULL) {
         return BAD_VALUE;
@@ -245,7 +249,6 @@
                         status = INVALID_OPERATION;
                         goto exit;
                     }
-
                     sp<ThreadBase> thread =
                             mAudioFlinger.checkPlaybackThread_l(patch->sources[1].ext.mix.handle);
                     if (thread == 0) {
@@ -313,12 +316,19 @@
                         patch->sources[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
                         patch->sources[0].flags.input : AUDIO_INPUT_FLAG_NONE;
                 audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
+                audio_source_t source = AUDIO_SOURCE_MIC;
+                // For telephony patches, propagate voice communication use case to record side
+                if (patch->num_sources == 2
+                        && patch->sources[1].ext.mix.usecase.stream
+                                == AUDIO_STREAM_VOICE_CALL) {
+                    source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+                }
                 sp<ThreadBase> thread = mAudioFlinger.openInput_l(srcModule,
                                                                     &input,
                                                                     &config,
                                                                     device,
                                                                     address,
-                                                                    AUDIO_SOURCE_MIC,
+                                                                    source,
                                                                     flags,
                                                                     outputDevice,
                                                                     outputDeviceAddress);
@@ -349,11 +359,12 @@
                             goto exit;
                         }
                     }
+                    mAudioFlinger.unlock();
                     status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+                    mAudioFlinger.lock();
                     if (status == NO_ERROR) {
                         newPatch.setThread(thread);
                     }
-
                     // remove stale audio patch with same input as sink if any
                     for (auto& iter : mPatches) {
                         if (iter.second.mAudioPatch.sinks[0].ext.mix.handle == thread->id()) {
@@ -415,7 +426,9 @@
                 mAudioFlinger.updateOutDevicesForRecordThreads_l(devices);
             }
 
+            mAudioFlinger.unlock();
             status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+            mAudioFlinger.lock();
             if (status == NO_ERROR) {
                 newPatch.setThread(thread);
             }
@@ -442,7 +455,7 @@
     if (status == NO_ERROR) {
         *handle = (audio_patch_handle_t) mAudioFlinger.nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH);
         newPatch.mHalHandle = halHandle;
-        mAudioFlinger.mDeviceEffectManager.createAudioPatch(*handle, newPatch);
+        mAudioFlinger.mPatchCommandThread->createAudioPatch(*handle, newPatch);
         if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
             addSoftwarePatchToInsertedModules(insertedModule, *handle, &newPatch.mAudioPatch);
         }
@@ -516,9 +529,14 @@
     audio_output_flags_t outputFlags = mAudioPatch.sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
             mAudioPatch.sinks[0].flags.output : AUDIO_OUTPUT_FLAG_NONE;
     audio_stream_type_t streamType = AUDIO_STREAM_PATCH;
+    audio_source_t source = AUDIO_SOURCE_DEFAULT;
     if (mAudioPatch.num_sources == 2 && mAudioPatch.sources[1].type == AUDIO_PORT_TYPE_MIX) {
         // "reuse one existing output mix" case
         streamType = mAudioPatch.sources[1].ext.mix.usecase.stream;
+        // For telephony patches, propagate voice communication use case to record side
+        if (streamType == AUDIO_STREAM_VOICE_CALL) {
+            source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+        }
     }
     if (mPlayback.thread()->hasFastMixer()) {
         // Create a fast track if the playback thread has fast mixer to get better performance.
@@ -546,7 +564,8 @@
                                                  inChannelMask,
                                                  format,
                                                  frameCount,
-                                                 inputFlags);
+                                                 inputFlags,
+                                                 source);
     } else {
         // use a pseudo LCM between input and output framecount
         int playbackShift = __builtin_ctz(playbackFrameCount);
@@ -566,7 +585,9 @@
                                                  frameCount,
                                                  nullptr,
                                                  (size_t)0 /* bufferSize */,
-                                                 inputFlags);
+                                                 inputFlags,
+                                                 {} /* timeout */,
+                                                 source);
     }
     status = mRecord.checkTrack(tempRecordTrack.get());
     if (status != NO_ERROR) {
@@ -714,7 +735,11 @@
 
 /* Disconnect a patch */
 status_t AudioFlinger::PatchPanel::releaseAudioPatch(audio_patch_handle_t handle)
-{
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendReleaseAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the release patch request.
+ NO_THREAD_SAFETY_ANALYSIS
+ {
     ALOGV("%s handle %d", __func__, handle);
     status_t status = NO_ERROR;
 
@@ -751,7 +776,9 @@
                         break;
                     }
                 }
+                mAudioFlinger.unlock();
                 status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+                mAudioFlinger.lock();
             } else {
                 status = hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
             }
@@ -772,7 +799,9 @@
                     break;
                 }
             }
+            mAudioFlinger.unlock();
             status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+            mAudioFlinger.lock();
         } break;
         default:
             status = BAD_VALUE;
@@ -785,7 +814,7 @@
 void AudioFlinger::PatchPanel::erasePatch(audio_patch_handle_t handle) {
     mPatches.erase(handle);
     removeSoftwarePatchFromInsertedModules(handle);
-    mAudioFlinger.mDeviceEffectManager.releaseAudioPatch(handle);
+    mAudioFlinger.mPatchCommandThread->releaseAudioPatch(handle);
 }
 
 /* List connected audio ports and they attributes */
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 93593a3..5555766 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -31,7 +31,6 @@
                   mPlaybackThreadHandle(playbackThreadHandle),
                   mRecordThreadHandle(recordThreadHandle) {}
         SoftwarePatch(const SoftwarePatch&) = default;
-        SoftwarePatch& operator=(const SoftwarePatch&) = default;
 
         // Must be called under AudioFlinger::mLock
         status_t getLatencyMs_l(double *latencyMs) const;
@@ -200,7 +199,7 @@
             return mRecord.handle() != AUDIO_PATCH_HANDLE_NONE ||
                     mPlayback.handle() != AUDIO_PATCH_HANDLE_NONE; }
 
-        void setThread(sp<ThreadBase> thread) { mThread = thread; }
+        void setThread(const sp<ThreadBase>& thread) { mThread = thread; }
         wp<ThreadBase> thread() const { return mThread; }
 
         // returns the latency of the patch (from record to playback).
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 6a138bb..0e1a3c9 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -83,7 +83,8 @@
                                   * ready as possible (aka. Buffer is full). */
                                 size_t frameCountToBeReady = SIZE_MAX,
                                 float speed = 1.0f,
-                                bool isSpatialized = false);
+                                bool isSpatialized = false,
+                                bool isBitPerfect = false);
     virtual             ~Track();
     virtual status_t    initCheck() const;
 
@@ -147,8 +148,12 @@
     sp<media::VolumeHandler>   getVolumeHandler() { return mVolumeHandler; }
     /** Set the computed normalized final volume of the track.
      * !masterMute * masterVolume * streamVolume * averageLRVolume */
-    void                setFinalVolume(float volume);
+    void                setFinalVolume(float volumeLeft, float volumeRight);
     float               getFinalVolume() const { return mFinalVolume; }
+    void                getFinalVolume(float* left, float* right) const {
+                            *left = mFinalVolumeLeft;
+                            *right = mFinalVolumeRight;
+    }
 
     using SourceMetadatas = std::vector<playback_track_metadata_v7_t>;
     using MetadataInserter = std::back_insert_iterator<SourceMetadatas>;
@@ -181,7 +186,9 @@
             }
             sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
 
-            void    setTeePatches(TeePatches teePatches);
+            // This function should be called with holding thread lock.
+            void    updateTeePatches();
+            void    setTeePatchesToUpdate(TeePatches teePatchesToUpdate);
 
     void tallyUnderrunFrames(size_t frames) override {
        if (isOut()) { // we expect this from output tracks only
@@ -203,6 +210,13 @@
     audio_output_flags_t getOutputFlags() const { return mFlags; }
     float getSpeed() const { return mSpeed; }
     bool isSpatialized() const override { return mIsSpatialized; }
+    bool isBitPerfect() const override { return mIsBitPerfect; }
+
+    /**
+     * Updates the mute state and notifies the audio service. Call this only when holding player
+     * thread lock.
+     */
+    void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState);
 
 protected:
     // for numerous
@@ -310,6 +324,7 @@
         binder::Status unmute(/*out*/ bool *ret) override;
     private:
         Track* const mTrack;
+        bool setMute(bool muted);
     };
     sp<AudioVibrationController> mAudioVibrationController;
     sp<os::ExternalVibration>    mExternalVibration;
@@ -346,14 +361,25 @@
                                         // 'volatile' means accessed without lock or
                                         // barrier, but is read/written atomically
     float               mFinalVolume; // combine master volume, stream type volume and track volume
+    float               mFinalVolumeLeft; // combine master volume, stream type volume and track
+                                          // volume
+    float               mFinalVolumeRight; // combine master volume, stream type volume and track
+                                           // volume
     sp<AudioTrackServerProxy>  mAudioTrackServerProxy;
     bool                mResumeToStopping; // track was paused in stopping state.
     bool                mFlushHwPending; // track requests for thread flush
     bool                mPauseHwPending = false; // direct/offload track request for thread pause
     audio_output_flags_t mFlags;
     TeePatches  mTeePatches;
+    std::optional<TeePatches> mTeePatchesToUpdate;
     const float         mSpeed;
     const bool          mIsSpatialized;
+    const bool          mIsBitPerfect;
+
+    // TODO: replace PersistableBundle with own struct
+    // access these two variables only when holding player thread lock.
+    std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
+    mute_state_t        mMuteState;
 };  // end of Track
 
 
@@ -403,6 +429,7 @@
 private:
     status_t            obtainBuffer(AudioBufferProvider::Buffer* buffer,
                                      uint32_t waitTimeMs);
+    void                queueBuffer(Buffer& inBuffer);
     void                clearBufferQueue();
 
     void                restartIfDisabled();
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index e8552c4..f0a5f76 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -73,7 +73,8 @@
             void        setSilenced(bool silenced) { if (!isPatchTrack()) mSilenced = silenced; }
             bool        isSilenced() const { return mSilenced; }
 
-            status_t    getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
+            status_t    getActiveMicrophones(
+                    std::vector<media::MicrophoneInfoFw>* activeMicrophones);
 
             status_t    setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
             status_t    setPreferredMicrophoneFieldDimension(float zoom);
@@ -87,6 +88,10 @@
                                     && (flags & AUDIO_INPUT_FLAG_HW_AV_SYNC) == 0;
                         }
 
+            using SinkMetadatas = std::vector<record_track_metadata_v7_t>;
+            using MetadataInserter = std::back_insert_iterator<SinkMetadatas>;
+            virtual void    copyMetadataTo(MetadataInserter& backInserter) const;
+
 private:
     friend class AudioFlinger;  // for mState
 
@@ -134,7 +139,8 @@
                 void *buffer,
                 size_t bufferSize,
                 audio_input_flags_t flags,
-                const Timeout& timeout = {});
+                const Timeout& timeout = {},
+                audio_source_t source = AUDIO_SOURCE_DEFAULT);
     virtual             ~PatchRecord();
 
     virtual Source* getSource() { return nullptr; }
@@ -166,7 +172,8 @@
                         audio_channel_mask_t channelMask,
                         audio_format_t format,
                         size_t frameCount,
-                        audio_input_flags_t flags);
+                        audio_input_flags_t flags,
+                        audio_source_t source = AUDIO_SOURCE_DEFAULT);
 
     Source* getSource() override { return static_cast<Source*>(this); }
 
diff --git a/services/audioflinger/StateQueue.cpp b/services/audioflinger/StateQueue.cpp
index 9d4188f..38ce2c2 100644
--- a/services/audioflinger/StateQueue.cpp
+++ b/services/audioflinger/StateQueue.cpp
@@ -187,7 +187,9 @@
 
 }   // namespace android
 
-// hack for gcc
+// Hack to avoid explicit template instantiation of
+// template class StateQueue<FastCaptureState>;
+// template class StateQueue<FastMixerState>;
 #ifdef STATE_QUEUE_INSTANTIATIONS
-#include STATE_QUEUE_INSTANTIATIONS
+#include STATE_QUEUE_INSTANTIATIONS  // NOLINT(bugprone-suspicious-include)
 #endif
diff --git a/services/audioflinger/TEST_MAPPING b/services/audioflinger/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/services/audioflinger/TEST_MAPPING
+++ b/services/audioflinger/TEST_MAPPING
@@ -4,7 +4,16 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
diff --git a/services/audioflinger/ThreadMetrics.h b/services/audioflinger/ThreadMetrics.h
index 6526655..5493b3c 100644
--- a/services/audioflinger/ThreadMetrics.h
+++ b/services/audioflinger/ThreadMetrics.h
@@ -148,7 +148,14 @@
             item.set(AMEDIAMETRICS_PROP_CUMULATIVETIMENS, mCumulativeTimeNs)
                 .set(AMEDIAMETRICS_PROP_DEVICETIMENS, mDeviceTimeNs)
                 .set(AMEDIAMETRICS_PROP_EVENT, eventName)
-                .set(AMEDIAMETRICS_PROP_INTERVALCOUNT, (int32_t)mIntervalCount);
+                .set(AMEDIAMETRICS_PROP_INTERVALCOUNT, (int32_t)mIntervalCount)
+                // we set "last" device to indicate the device the group was
+                // associated with (because a createPatch which is logged in ThreadMetrics
+                // could have changed the device).
+                .set(mIsOut
+                        ? AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_OUTPUTDEVICES
+                        : AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_INPUTDEVICES,
+                        mDevices.c_str());
             if (mDeviceLatencyMs.getN() > 0) {
                 item.set(AMEDIAMETRICS_PROP_DEVICELATENCYMS, mDeviceLatencyMs.getMean());
             }
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 56ebb6e..700bdd2 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -17,7 +17,7 @@
 
 
 #define LOG_TAG "AudioFlinger"
-//#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
 #include "Configuration.h"
@@ -31,6 +31,7 @@
 #include <sys/syscall.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
+#include <binder/PersistableBundle.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioParameter.h>
@@ -43,6 +44,7 @@
 #include <private/media/AudioTrackShared.h>
 #include <private/android_filesystem_config.h>
 #include <audio_utils/Balance.h>
+#include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
 #include <audio_utils/channels.h>
 #include <audio_utils/mono_blend.h>
@@ -178,6 +180,11 @@
 // Direct output thread minimum sleep time in idle or active(underrun) state
 static const nsecs_t kDirectMinSleepTimeUs = 10000;
 
+// Minimum amount of time between checking to see if the timestamp is advancing
+// for underrun detection. If we check too frequently, we may not detect a
+// timestamp update and will falsely detect underrun.
+static const nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1000;
+
 // The universal constant for ubiquitous 20ms value. The value of 20ms seems to provide a good
 // balance between power consumption and latency, and allows threads to be scheduled reliably
 // by the CFS scheduler.
@@ -264,6 +271,23 @@
     return ss.str();
 }
 
+static std::string toString(audio_latency_mode_t mode) {
+    // We convert to the AIDL type to print (eventually the legacy type will be removed).
+    const auto result = legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode);
+    return result.has_value() ? media::audio::common::toString(*result) : "UNKNOWN";
+}
+
+// Could be made a template, but other toString overloads for std::vector are confused.
+static std::string toString(const std::vector<audio_latency_mode_t>& elements) {
+    std::string s("{ ");
+    for (const auto& e : elements) {
+        s.append(toString(e));
+        s.append(" ");
+    }
+    s.append("}");
+    return s;
+}
+
 static pthread_once_t sFastTrackMultiplierOnce = PTHREAD_ONCE_INIT;
 
 static void sFastTrackMultiplierInit()
@@ -352,7 +376,7 @@
         // try three times to get the clock offset, choose the one
         // with the minimum gap in measurements.
         const int tries = 3;
-        nsecs_t bestGap, measured;
+        nsecs_t bestGap = 0, measured = 0; // not required, initialized for clang-tidy
         for (int i = 0; i < tries; ++i) {
             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
             const nsecs_t tbase = systemTime(clockbase);
@@ -512,6 +536,8 @@
         return "MMAP_CAPTURE";
     case SPATIALIZER:
         return "SPATIALIZER";
+    case BIT_PERFECT:
+        return "BIT_PERFECT";
     default:
         return "unknown";
     }
@@ -601,6 +627,7 @@
 // sendConfigEvent_l() must be called with ThreadBase::mLock held
 // Can temporarily release the lock if waiting for a reply from processConfigEvents_l().
 status_t AudioFlinger::ThreadBase::sendConfigEvent_l(sp<ConfigEvent>& event)
+NO_THREAD_SAFETY_ANALYSIS  // condition variable
 {
     status_t status = NO_ERROR;
 
@@ -741,6 +768,12 @@
     sendConfigEvent_l(configEvent);
 }
 
+void AudioFlinger::ThreadBase::sendHalLatencyModesChangedEvent_l()
+{
+    sp<ConfigEvent> configEvent = sp<HalLatencyModesChangedEvent>::make();
+    sendConfigEvent_l(configEvent);
+}
+
 // post condition: mConfigEvents.isEmpty()
 void AudioFlinger::ThreadBase::processConfigEvents_l()
 {
@@ -779,6 +812,7 @@
                                             (CreateAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = createAudioPatch_l(&data->mPatch, &data->mHandle);
             const DeviceTypeSet newDevices = getDeviceTypes();
+            configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_CREATE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
                     dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -789,6 +823,7 @@
                                             (ReleaseAudioPatchConfigEventData *)event->mData.get();
             event->mStatus = releaseAudioPatch_l(data->mHandle);
             const DeviceTypeSet newDevices = getDeviceTypes();
+            configChanged = oldDevices != newDevices;
             mLocalLog.log("CFG_EVENT_RELEASE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
                     dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
                     dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -808,6 +843,10 @@
             setCheckOutputStageEffects();
         } break;
 
+        case CFG_EVENT_HAL_LATENCY_MODES_CHANGED: {
+            onHalLatencyModesChanged_l();
+        } break;
+
         default:
             ALOG_ASSERT(false, "processConfigEvents_l() unknown event type %d", event->mType);
             break;
@@ -904,6 +943,7 @@
 }
 
 void AudioFlinger::ThreadBase::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS  // conditional try lock
 {
     dprintf(fd, "\n%s thread %p, name %s, tid %d, type %d (%s):\n", isOutput() ? "Output" : "Input",
             this, mThreadName, getTid(), type(), threadTypeToString(type()));
@@ -1272,7 +1312,9 @@
 
 void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled(bool enabled,
                                                            audio_session_t sessionId,
-                                                           bool threadLocked) {
+                                                           bool threadLocked)
+NO_THREAD_SAFETY_ANALYSIS  // manual locking
+{
     if (!threadLocked) {
         mLock.lock();
     }
@@ -1483,6 +1525,26 @@
             }
         }
         break;
+    case BIT_PERFECT:
+        if ((desc->flags & EFFECT_FLAG_HW_ACC_TUNNEL) != 0) {
+            // Allow HW accelerated effects of tunnel type
+            break;
+        }
+        // As bit-perfect tracks will not be allowed to apply audio effect that will touch the audio
+        // data, effects will not be allowed on 1) global effects (AUDIO_SESSION_OUTPUT_MIX),
+        // 2) post-processing effects (AUDIO_SESSION_OUTPUT_STAGE or AUDIO_SESSION_DEVICE) and
+        // 3) there is any bit-perfect track with the given session id.
+        if (sessionId == AUDIO_SESSION_OUTPUT_MIX || sessionId == AUDIO_SESSION_OUTPUT_STAGE ||
+            sessionId == AUDIO_SESSION_DEVICE) {
+            ALOGW("%s: effect %s not supported on bit-perfect thread %s",
+                  __func__, desc->name, mThreadName);
+            return BAD_VALUE;
+        } else if ((hasAudioSession_l(sessionId) & ThreadBase::BIT_PERFECT_SESSION) != 0) {
+            ALOGW("%s: effect %s not supported as there is a bit-perfect track with session as %d",
+                  __func__, desc->name, sessionId);
+            return BAD_VALUE;
+        }
+        break;
     default:
         LOG_ALWAYS_FATAL("checkEffectCompatibility_l(): wrong thread type %d", mType);
     }
@@ -1736,6 +1798,7 @@
 
 void AudioFlinger::ThreadBase::lockEffectChains_l(
         Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
 {
     effectChains = mEffectChains;
     for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -1745,6 +1808,7 @@
 
 void AudioFlinger::ThreadBase::unlockEffectChains(
         const Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
 {
     for (size_t i = 0; i < effectChains.size(); i++) {
         effectChains[i]->unlock();
@@ -1852,7 +1916,7 @@
 
 template <typename T>
 void AudioFlinger::ThreadBase::ActiveTracks<T>::updatePowerState(
-        sp<ThreadBase> thread, bool force) {
+        const sp<ThreadBase>& thread, bool force) {
     // Updates ActiveTracks client uids to the thread wakelock.
     if (mActiveTracksGeneration != mLastActiveTracksGeneration || force) {
         thread->updateWakeLockUids_l(getWakeLockUids());
@@ -1987,6 +2051,21 @@
     return AudioSystem::getStrategyForStream(stream);
 }
 
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::ThreadBase::startMelComputation_l(
+        const sp<audio_utils::MelProcessor>& /*processor*/)
+{
+    // Do nothing
+    ALOGW("%s: ThreadBase does not support CSD", __func__);
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::ThreadBase::stopMelComputation_l()
+{
+    // Do nothing
+    ALOGW("%s: ThreadBase does not support CSD", __func__);
+}
+
 // ----------------------------------------------------------------------------
 //      Playback
 // ----------------------------------------------------------------------------
@@ -2030,7 +2109,8 @@
         mFastTrackAvailMask(((1 << FastMixerState::sMaxFastTracks) - 1) & ~1),
         mHwSupportsPause(false), mHwPaused(false), mFlushPending(false),
         mLeftVolFloat(-1.0), mRightVolFloat(-1.0),
-        mDownStreamPatch{}
+        mDownStreamPatch{},
+        mIsTimestampAdvancing(kMinimumTimeBetweenTimestampChecksNs)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
     mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName);
@@ -2106,9 +2186,19 @@
     if (!isStreamInitialized()) {
         ALOGE("The stream is not open yet"); // This should not happen.
     } else {
-        // setEventCallback will need a strong pointer as a parameter. Calling it
-        // here instead of constructor of PlaybackThread so that the onFirstRef
-        // callback would not be made on an incompletely constructed object.
+        // Callbacks take strong or weak pointers as a parameter.
+        // Since PlaybackThread passes itself as a callback handler, it can only
+        // be done outside of the constructor. Creating weak and especially strong
+        // pointers to a refcounted object in its own constructor is strongly
+        // discouraged, see comments in system/core/libutils/include/utils/RefBase.h.
+        // Even if a function takes a weak pointer, it is possible that it will
+        // need to convert it to a strong pointer down the line.
+        if (mOutput->flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING &&
+                mOutput->stream->setCallback(this) == OK) {
+            mUseAsyncWrite = true;
+            mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
+        }
+
         if (mOutput->stream->setEventCallback(this) != OK) {
             ALOGD("Failed to add event callback");
         }
@@ -2246,7 +2336,8 @@
         status_t *status,
         audio_port_handle_t portId,
         const sp<media::IAudioTrackCallback>& callback,
-        bool isSpatialized)
+        bool isSpatialized,
+        bool isBitPerfect)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2278,6 +2369,25 @@
         *flags = (audio_output_flags_t)(*flags & outputFlags);
     }
 
+    if (isBitPerfect) {
+        sp<EffectChain> chain = getEffectChain_l(sessionId);
+        if (chain.get() != nullptr) {
+            // Bit-perfect is required according to the configuration and preferred mixer
+            // attributes, but it is not in the output flag from the client's request. Explicitly
+            // adding bit-perfect flag to check the compatibility
+            audio_output_flags_t flagsToCheck =
+                    (audio_output_flags_t)(*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+            chain->checkOutputFlagCompatibility(&flagsToCheck);
+            if ((flagsToCheck & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE) {
+                ALOGE("%s cannot create track as there is data-processing effect attached to "
+                      "given session id(%d)", __func__, sessionId);
+                lStatus = BAD_VALUE;
+                goto Exit;
+            }
+            *flags = flagsToCheck;
+        }
+    }
+
     // client expresses a preference for FAST, but we get the final say
     if (*flags & AUDIO_OUTPUT_FLAG_FAST) {
       if (
@@ -2458,6 +2568,18 @@
     *pNotificationFrameCount = notificationFrameCount;
 
     switch (mType) {
+    case BIT_PERFECT:
+        if (isBitPerfect) {
+            if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) {
+                ALOGE("%s, bad parameter when request streaming bit-perfect, sampleRate=%u, "
+                      "format=%#x, channelMask=%#x, mSampleRate=%u, mFormat=%#x, mChannelMask=%#x",
+                      __func__, sampleRate, format, channelMask, mSampleRate, mFormat,
+                      mChannelMask);
+                lStatus = BAD_VALUE;
+                goto Exit;
+            }
+        }
+        break;
 
     case DIRECT:
         if (audio_is_linear_pcm(format)) { // TODO maybe use audio_has_proportional_frames()?
@@ -2539,7 +2661,7 @@
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
                           sessionId, creatorPid, attributionSource, trackFlags,
                           TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
-                          speed, isSpatialized);
+                          speed, isSpatialized, isBitPerfect);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2672,6 +2794,7 @@
 
 // addTrack_l() must be called with ThreadBase::mLock held
 status_t AudioFlinger::PlaybackThread::addTrack_l(const sp<Track>& track)
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
 {
     status_t status = ALREADY_EXISTS;
 
@@ -2695,7 +2818,11 @@
             }
             // abort if start is rejected by audio policy manager
             if (status != NO_ERROR) {
-                return PERMISSION_DENIED;
+                // Do not replace the error if it is DEAD_OBJECT. When this happens, it indicates
+                // current playback thread is reopened, which may happen when clients set preferred
+                // mixer configuration. Returning DEAD_OBJECT will make the client restore track
+                // immediately.
+                return status == DEAD_OBJECT ? status : PERMISSION_DENIED;
             }
 #ifdef ADD_BATTERY_DATA
             // to track the speaker usage
@@ -2725,7 +2852,7 @@
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mLock.unlock();
-            const int intensity = AudioFlinger::onExternalVibrationStart(
+            const os::HapticScale intensity = AudioFlinger::onExternalVibrationStart(
                     track->getExternalVibration());
             std::optional<media::AudioVibratorInfo> vibratorInfo;
             {
@@ -2735,7 +2862,7 @@
                 vibratorInfo = std::move(mAudioFlinger->getDefaultVibratorInfo_l());
             }
             mLock.lock();
-            track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
+            track->setHapticIntensity(intensity);
             if (vibratorInfo) {
                 track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
             }
@@ -2781,6 +2908,9 @@
     if (!trackActive) {
         removeTrack_l(track);
     } else if (track->isFastTrack() || track->isOffloaded() || track->isDirect()) {
+        if (track->isPausePending()) {
+            track->pauseAck();
+        }
         track->mState = TrackBase::STOPPING_1;
     }
 
@@ -2821,7 +2951,7 @@
     if (initCheck() == NO_ERROR && mOutput->stream->getParameters(keys, &out_s8) == OK) {
         return out_s8;
     }
-    return String8();
+    return {};
 }
 
 status_t AudioFlinger::DirectOutputThread::selectPresentation(int presentationId, int programId) {
@@ -2875,7 +3005,14 @@
 void AudioFlinger::PlaybackThread::onCodecFormatChanged(
         const std::basic_string<uint8_t>& metadataBs)
 {
-    std::thread([this, metadataBs]() {
+    wp<AudioFlinger::PlaybackThread> weakPointerThis = this;
+    std::thread([this, metadataBs, weakPointerThis]() {
+            sp<AudioFlinger::PlaybackThread> playbackThread = weakPointerThis.promote();
+            if (playbackThread == nullptr) {
+                ALOGW("PlaybackThread was destroyed, skip codec format change event");
+                return;
+            }
+
             audio_utils::metadata::Data metadata =
                     audio_utils::metadata::dataFromByteString(metadataBs);
             if (metadata.empty()) {
@@ -2966,13 +3103,6 @@
                 mFrameCount);
     }
 
-    if (mOutput->flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING) {
-        if (mOutput->stream->setCallback(this) == OK) {
-            mUseAsyncWrite = true;
-            mCallbackThread = new AudioFlinger::AsyncCallbackThread(this);
-        }
-    }
-
     mHwSupportsPause = false;
     if (mOutput->flags & AUDIO_OUTPUT_FLAG_DIRECT) {
         bool supportsPause = false, supportsResume = false;
@@ -3127,21 +3257,21 @@
     item.record();
 }
 
-void AudioFlinger::PlaybackThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::PlaybackThread::updateMetadata_l()
 {
     if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
-        return; // nothing to do
+        return {}; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
     auto backInserter = std::back_inserter(metadata.tracks);
     for (const sp<Track> &track : mActiveTracks) {
         // No track is invalid as this is called after prepareTrack_l in the same critical section
-        // Do not forward metadata for PatchTrack with unspecified stream type
-        if (track->streamType() != AUDIO_STREAM_PATCH) {
-            track->copyMetadataTo(backInserter);
-        }
+        track->copyMetadataTo(backInserter);
     }
     sendMetadataToBackend_l(metadata);
+    MetadataUpdate change;
+    change.playbackMetadataUpdate = metadata.tracks;
+    return change;
 }
 
 void AudioFlinger::PlaybackThread::sendMetadataToBackend_l(
@@ -3252,7 +3382,7 @@
 }
 
 void AudioFlinger::PlaybackThread::threadLoop_removeTracks(
-        const Vector< sp<Track> >& tracksToRemove)
+        [[maybe_unused]] const Vector< sp<Track> >& tracksToRemove)
 {
     // Miscellaneous track cleanup when removed from the active list,
     // called without Thread lock but synchronized with threadLoop processing.
@@ -3263,8 +3393,6 @@
             addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop);
         }
     }
-#else
-    (void)tracksToRemove; // suppress unused warning
 #endif
 }
 
@@ -3319,8 +3447,10 @@
         }
         ssize_t framesWritten = mNormalSink->write((char *)mSinkBuffer + offset, count);
         ATRACE_END();
+
         if (framesWritten > 0) {
             bytesWritten = framesWritten * mFrameSize;
+
 #ifdef TEE_SINK
             mTee.write((char *)mSinkBuffer + offset, framesWritten);
 #endif
@@ -3363,6 +3493,25 @@
     return bytesWritten;
 }
 
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::PlaybackThread::startMelComputation_l(
+        const sp<audio_utils::MelProcessor>& processor)
+{
+    auto outputSink = static_cast<AudioStreamOutSink*>(mOutputSink.get());
+    if (outputSink != nullptr) {
+        outputSink->startMelComputation(processor);
+    }
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::PlaybackThread::stopMelComputation_l()
+{
+    auto outputSink = static_cast<AudioStreamOutSink*>(mOutputSink.get());
+    if (outputSink != nullptr) {
+        outputSink->stopMelComputation();
+    }
+}
+
 void AudioFlinger::PlaybackThread::threadLoop_drain()
 {
     bool supportsDrain = false;
@@ -3421,9 +3570,10 @@
     mActiveSleepTimeUs = activeSleepTimeUs();
     mIdleSleepTimeUs = idleSleepTimeUs();
 
+    mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
+
     // make sure standby delay is not too short when connected to an A2DP sink to avoid
     // truncating audio when going to standby.
-    mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
     if (!Intersection(outDeviceTypes(),  getAudioDeviceOutAllA2dpSet()).empty()) {
         if (mStandbyDelayNs < kDefaultStandbyTimeInNsecs) {
             mStandbyDelayNs = kDefaultStandbyTimeInNsecs;
@@ -3453,6 +3603,28 @@
     invalidateTracks_l(streamType);
 }
 
+void AudioFlinger::PlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
+    Mutex::Autolock _l(mLock);
+    invalidateTracks_l(portIds);
+}
+
+bool AudioFlinger::PlaybackThread::invalidateTracks_l(std::set<audio_port_handle_t>& portIds) {
+    bool trackMatch = false;
+    const size_t size = mTracks.size();
+    for (size_t i = 0; i < size; i++) {
+        sp<Track> t = mTracks[i];
+        if (t->isExternalTrack() && portIds.find(t->portId()) != portIds.end()) {
+            t->invalidate();
+            portIds.erase(t->portId());
+            trackMatch = true;
+        }
+        if (portIds.empty()) {
+            break;
+        }
+    }
+    return trackMatch;
+}
+
 // getTrackById_l must be called with holding thread lock
 AudioFlinger::PlaybackThread::Track* AudioFlinger::PlaybackThread::getTrackById_l(
         audio_port_handle_t trackPortId) {
@@ -3497,9 +3669,9 @@
             if (result != OK) return result;
 
 #ifdef FLOAT_EFFECT_CHAIN
-            buffer = halInBuffer->audioBuffer()->f32;
+            buffer = halInBuffer ? halInBuffer->audioBuffer()->f32 : buffer;
 #else
-            buffer = halInBuffer->audioBuffer()->s16;
+            buffer = halInBuffer ? halInBuffer->audioBuffer()->s16 : buffer;
 #endif
             ALOGV("addEffectChain_l() creating new input buffer %p session %d",
                     buffer, session);
@@ -3528,21 +3700,22 @@
         halOutBuffer = halInBuffer;
         ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session);
         if (!audio_is_global_session(session)) {
-            buffer = reinterpret_cast<effect_buffer_t*>(halInBuffer->externalData());
+            buffer = halInBuffer ? reinterpret_cast<effect_buffer_t*>(halInBuffer->externalData())
+                                 : buffer;
             // Only one effect chain can be present in direct output thread and it uses
             // the sink buffer as input
             if (mType != DIRECT) {
                 size_t numSamples = mNormalFrameCount
                         * (audio_channel_count_from_out_mask(mMixerChannelMask)
                                                              + mHapticChannelCount);
-                status_t result = mAudioFlinger->mEffectsFactoryHal->allocateBuffer(
+                const status_t allocateStatus = mAudioFlinger->mEffectsFactoryHal->allocateBuffer(
                         numSamples * sizeof(effect_buffer_t),
                         &halInBuffer);
-                if (result != OK) return result;
+                if (allocateStatus != OK) return allocateStatus;
 #ifdef FLOAT_EFFECT_CHAIN
-                buffer = halInBuffer->audioBuffer()->f32;
+                buffer = halInBuffer ? halInBuffer->audioBuffer()->f32 : buffer;
 #else
-                buffer = halInBuffer->audioBuffer()->s16;
+                buffer = halInBuffer ? halInBuffer->audioBuffer()->s16 : buffer;
 #endif
                 ALOGV("addEffectChain_l() creating new input buffer %p session %d",
                         buffer, session);
@@ -3624,8 +3797,8 @@
             }
 
             // detach all tracks with same session ID from this chain
-            for (size_t i = 0; i < mTracks.size(); ++i) {
-                sp<Track> track = mTracks[i];
+            for (size_t j = 0; j < mTracks.size(); ++j) {
+                sp<Track> track = mTracks[j];
                 if (session == track->sessionId()) {
                     track->setMainBuffer(reinterpret_cast<effect_buffer_t*>(mSinkBuffer));
                     chain->decTrackCnt();
@@ -3678,6 +3851,7 @@
 }
 
 bool AudioFlinger::PlaybackThread::threadLoop()
+NO_THREAD_SAFETY_ANALYSIS  // manual locking of AudioFlinger
 {
     tlNBLogWriter = mNBLogWriter.get();
 
@@ -3746,7 +3920,7 @@
             // is more informational.
             if (mAudioFlinger->mLock.tryLock() == NO_ERROR) {
                 std::vector<PatchPanel::SoftwarePatch> swPatches;
-                double latencyMs;
+                double latencyMs = 0.; // not required; initialized for clang-tidy
                 status_t status = INVALID_OPERATION;
                 audio_patch_handle_t downstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
                 if (mAudioFlinger->mPatchPanel.getDownstreamSoftwarePatches(id(), &swPatches) == OK
@@ -3766,8 +3940,7 @@
                         ALOGVV("new downstream latency %lf ms", latencyMs);
                     } else {
                         ALOGD("out of range downstream latency %lf ms", latencyMs);
-                        if (latencyMs < minLatency) latencyMs = minLatency;
-                        else if (latencyMs > maxLatency) latencyMs = maxLatency;
+                        latencyMs = std::clamp(latencyMs, minLatency, maxLatency);
                     }
                     mDownstreamLatencyStatMs.add(latencyMs);
                 }
@@ -3785,6 +3958,7 @@
             checkOutputStageEffects();
         }
 
+        MetadataUpdate metadataUpdate;
         { // scope for mLock
 
             Mutex::Autolock _l(mLock);
@@ -3844,7 +4018,7 @@
                         LOG_AUDIO_STATE();
                         mThreadMetrics.logEndInterval();
                         mThreadSnapshot.onEnd();
-                        mStandby = true;
+                        setStandby_l();
                     }
                     sendStatistics(false /* force */);
                 }
@@ -3886,7 +4060,7 @@
 
             mActiveTracks.updatePowerState(this);
 
-            updateMetadata_l();
+            metadataUpdate = updateMetadata_l();
 
             // prevent any changes in effect chain list and in each effect chain
             // during mixing and effect process as the audio buffers could be deleted
@@ -3922,6 +4096,21 @@
             // stop(), pause(), etc.), but the threadLoop is entitled to call audio
             // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
             activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
+
+            setHalLatencyMode_l();
+
+            for (const auto &track : mActiveTracks ) {
+                track->updateTeePatches();
+            }
+
+            // signal actual start of output stream when the render position reported by the kernel
+            // starts moving.
+            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+                    && (mKernelPositionOnStandby
+                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
+                mHalStarted = true;
+                mWaitHalStartCV.broadcast();
+            }
         } // mLock scope ends
 
         if (mBytesRemaining == 0) {
@@ -3954,7 +4143,8 @@
             // Either threadLoop_mix() or threadLoop_sleepTime() should have set
             // mMixerBuffer with data if mMixerBufferValid is true and mSleepTimeUs == 0.
             // Merge mMixerBuffer data into mEffectBuffer (if any effects are valid)
-            // or mSinkBuffer (if there are no effects).
+            // or mSinkBuffer (if there are no effects and there is no data already copied to
+            // mSinkBuffer).
             //
             // This is done pre-effects computation; if effects change to
             // support higher precision, this needs to move.
@@ -3963,23 +4153,28 @@
             // TODO use mSleepTimeUs == 0 as an additional condition.
             uint32_t mixerChannelCount = mEffectBufferValid ?
                         audio_channel_count_from_out_mask(mMixerChannelMask) : mChannelCount;
-            if (mMixerBufferValid) {
+            if (mMixerBufferValid && (mEffectBufferValid || !mHasDataCopiedToSinkBuffer)) {
                 void *buffer = mEffectBufferValid ? mEffectBuffer : mSinkBuffer;
                 audio_format_t format = mEffectBufferValid ? mEffectBufferFormat : mFormat;
 
-                // mono blend occurs for mixer threads only (not direct or offloaded)
-                // and is handled here if we're going directly to the sink.
-                if (requireMonoBlend() && !mEffectBufferValid) {
-                    mono_blend(mMixerBuffer, mMixerBufferFormat, mChannelCount, mNormalFrameCount,
-                               true /*limit*/);
-                }
+                // Apply mono blending and balancing if the effect buffer is not valid. Otherwise,
+                // do these processes after effects are applied.
+                if (!mEffectBufferValid) {
+                    // mono blend occurs for mixer threads only (not direct or offloaded)
+                    // and is handled here if we're going directly to the sink.
+                    if (requireMonoBlend()) {
+                        mono_blend(mMixerBuffer, mMixerBufferFormat, mChannelCount,
+                                mNormalFrameCount, true /*limit*/);
+                    }
 
-                if (!hasFastMixer()) {
-                    // Balance must take effect after mono conversion.
-                    // We do it here if there is no FastMixer.
-                    // mBalance detects zero balance within the class for speed (not needed here).
-                    mBalance.setBalance(mMasterBalance.load());
-                    mBalance.process((float *)mMixerBuffer, mNormalFrameCount);
+                    if (!hasFastMixer()) {
+                        // Balance must take effect after mono conversion.
+                        // We do it here if there is no FastMixer.
+                        // mBalance detects zero balance within the class for speed
+                        // (not needed here).
+                        mBalance.setBalance(mMasterBalance.load());
+                        mBalance.process((float *)mMixerBuffer, mNormalFrameCount);
+                    }
                 }
 
                 memcpy_by_audio_format(buffer, format, mMixerBuffer, mMixerBufferFormat,
@@ -4049,7 +4244,7 @@
         // Effects buffer (buffer valid), we need to
         // copy into the sink buffer.
         // TODO use mSleepTimeUs == 0 as an additional condition.
-        if (mEffectBufferValid) {
+        if (mEffectBufferValid && !mHasDataCopiedToSinkBuffer) {
             //ALOGV("writing effect buffer to sink buffer format %#x", mFormat);
             void *effectBuffer = (mType == SPATIALIZER) ? mPostSpatializerBuffer : mEffectBuffer;
             if (requireMonoBlend()) {
@@ -4082,10 +4277,19 @@
                                        mEffectBufferFormat,
                                        mNormalFrameCount * mHapticChannelCount);
             }
-
-            memcpy_by_audio_format(mSinkBuffer, mFormat, effectBuffer, mEffectBufferFormat,
-                    mNormalFrameCount * (mChannelCount + mHapticChannelCount));
-
+            const size_t framesToCopy = mNormalFrameCount * (mChannelCount + mHapticChannelCount);
+            if (mFormat == AUDIO_FORMAT_PCM_FLOAT &&
+                    mEffectBufferFormat == AUDIO_FORMAT_PCM_FLOAT) {
+                // Clamp PCM float values more than this distance from 0 to insulate
+                // a HAL which doesn't handle NaN correctly.
+                static constexpr float HAL_FLOAT_SAMPLE_LIMIT = 2.0f;
+                memcpy_to_float_from_float_with_clamping(static_cast<float*>(mSinkBuffer),
+                        static_cast<const float*>(effectBuffer),
+                        framesToCopy, HAL_FLOAT_SAMPLE_LIMIT /* absMax */);
+            } else {
+                memcpy_by_audio_format(mSinkBuffer, mFormat,
+                        effectBuffer, mEffectBufferFormat, framesToCopy);
+            }
             // The sample data is partially interleaved when haptic channels exist,
             // we need to adjust channels here.
             if (mHapticChannelCount > 0) {
@@ -4099,6 +4303,11 @@
         // enable changes in effect chain
         unlockEffectChains(effectChains);
 
+        if (!metadataUpdate.playbackMetadataUpdate.empty()) {
+            mAudioFlinger->mMelReporter->updateMetadataForCsd(id(),
+                    metadataUpdate.playbackMetadataUpdate);
+        }
+
         if (!waitingAsyncCallback()) {
             // mSleepTimeUs == 0 means we must write to audio hardware
             if (mSleepTimeUs == 0) {
@@ -4287,7 +4496,7 @@
 
     if (!mStandby) {
         threadLoop_standby();
-        mStandby = true;
+        setStandby();
     }
 
     releaseWakeLock();
@@ -4433,6 +4642,7 @@
 
 // removeTracks_l() must be called with ThreadBase::mLock held
 void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
 {
     for (const auto& track : tracksToRemove) {
         mActiveTracks.remove(track);
@@ -4467,7 +4677,7 @@
             // When the track is stop, set the haptic intensity as MUTE
             // for the HapticGenerator effect.
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+                chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
             }
         }
     }
@@ -4535,6 +4745,8 @@
     } else {
         status = PlaybackThread::createAudioPatch_l(patch, handle);
     }
+
+    updateHalSupportedLatencyModes_l();
     return status;
 }
 
@@ -4553,8 +4765,8 @@
                             "as it does not support audio patches",
                             patch->sinks[i].ext.device.type);
         type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
-        deviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
-                patch->sinks[i].ext.device.address));
+        deviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
+                patch->sinks[i].ext.device.address);
     }
 
     audio_port_handle_t sinkPortId = patch->sinks[0].id;
@@ -4616,6 +4828,9 @@
     if (configChanged) {
         sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
     }
+    // Force metadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -4646,6 +4861,9 @@
     } else {
         status = mOutput->stream->legacyReleaseAudioPatch();
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -4680,6 +4898,7 @@
     :   PlaybackThread(audioFlinger, output, id, type, systemReady, mixerConfig),
         // mAudioMixer below
         // mFastMixer below
+        mBluetoothLatencyModesEnabled(false),
         mFastMixerFutex(0),
         mMasterMono(false)
         // mOutputSink below
@@ -4715,7 +4934,7 @@
 
     // initialize fast mixer depending on configuration
     bool initFastMixer;
-    if (mType == SPATIALIZER) {
+    if (mType == SPATIALIZER || mType == BIT_PERFECT) {
         initFastMixer = false;
     } else {
         switch (kUseFastMixer) {
@@ -4762,14 +4981,15 @@
         // When it wakes up after a maximum latency, it runs a few cycles quickly before
         // finally blocking.  Note the pipe implementation rounds up the request to a power of 2.
         MonoPipe *monoPipe = new MonoPipe(mNormalFrameCount * 4, format, true /*writeCanBlock*/);
-        const NBAIO_Format offers[1] = {format};
-        size_t numCounterOffers = 0;
+        const NBAIO_Format offersFast[1] = {format};
+        size_t numCounterOffersFast = 0;
 #if !LOG_NDEBUG
-        ssize_t index =
+        index =
 #else
         (void)
 #endif
-                monoPipe->negotiate(offers, 1, NULL, numCounterOffers);
+                monoPipe->negotiate(offersFast, std::size(offersFast),
+                        nullptr /* counterOffers */, numCounterOffersFast);
         ALOG_ASSERT(index == 0);
         monoPipe->setAvgFrames((mScreenState & 1) ?
                 (monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2);
@@ -4895,6 +5115,21 @@
     delete mAudioMixer;
 }
 
+void AudioFlinger::MixerThread::onFirstRef() {
+    PlaybackThread::onFirstRef();
+
+    Mutex::Autolock _l(mLock);
+    if (mOutput != nullptr && mOutput->stream != nullptr) {
+        status_t status = mOutput->stream->setLatencyModeCallback(this);
+        if (status != INVALID_OPERATION) {
+            updateHalSupportedLatencyModes_l();
+        }
+        // Default to enabled if the HAL supports it. This can be changed by Audioflinger after
+        // the thread construction according to AudioFlinger::mBluetoothLatencyModesEnabled
+        mBluetoothLatencyModesEnabled.store(
+                mOutput->audioHwDev->supportsBluetoothVariableLatency());
+    }
+}
 
 uint32_t AudioFlinger::MixerThread::correctLatency_l(uint32_t latency) const
 {
@@ -5016,6 +5251,7 @@
         mCallbackThread->setDraining(mDrainSequence);
     }
     mHwPaused = false;
+    setHalLatencyMode_l();
 }
 
 void AudioFlinger::PlaybackThread::onAddNewTrack_l()
@@ -5180,7 +5416,7 @@
         // tallyUnderrunFrames() is called to update the track counters
         // with the number of underrun frames for a particular mixer period.
         // We defer tallying until we know the final mixer status.
-        void tallyUnderrunFrames(sp<Track> track, size_t underrunFrames) {
+        void tallyUnderrunFrames(const sp<Track>& track, size_t underrunFrames) {
             mUnderrunFrames.emplace_back(track, underrunFrames);
         }
 
@@ -5357,10 +5593,21 @@
                 volume *= vh;
                 track->mCachedVolume = volume;
                 gain_minifloat_packed_t vlr = proxy->getVolumeLR();
-                float vlf = volume * float_from_gain(gain_minifloat_unpack_left(vlr));
-                float vrf = volume * float_from_gain(gain_minifloat_unpack_right(vlr));
+                float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
+                float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
 
-                track->setFinalVolume((vlf + vrf) / 2.f);
+                track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+                    /*muteState=*/{masterVolume == 0.f,
+                                   mStreamTypes[track->streamType()].volume == 0.f,
+                                   mStreamTypes[track->streamType()].mute,
+                                   track->isPlaybackRestricted(),
+                                   vlf == 0.f && vrf == 0.f,
+                                   vh == 0.f});
+
+                vlf *= volume;
+                vrf *= volume;
+
+                track->setFinalVolume(vlf, vrf);
                 ++fastTracks;
             } else {
                 // was it previously active?
@@ -5429,7 +5676,7 @@
         // during last round
         size_t desiredFrames;
         const uint32_t sampleRate = track->mAudioTrackServerProxy->getSampleRate();
-        AudioPlaybackRate playbackRate = track->mAudioTrackServerProxy->getPlaybackRate();
+        const AudioPlaybackRate playbackRate = track->mAudioTrackServerProxy->getPlaybackRate();
 
         desiredFrames = sourceFramesNeededWithTimestretch(
                 sampleRate, mNormalFrameCount, mSampleRate, playbackRate.mSpeed);
@@ -5531,6 +5778,15 @@
                     ALOGV("Track right volume out of range: %.3g", vrf);
                     vrf = GAIN_FLOAT_UNITY;
                 }
+
+                track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+                    /*muteState=*/{masterVolume == 0.f,
+                                   mStreamTypes[track->streamType()].volume == 0.f,
+                                   mStreamTypes[track->streamType()].mute,
+                                   track->isPlaybackRestricted(),
+                                   vlf == 0.f && vrf == 0.f,
+                                   vh == 0.f});
+
                 // now apply the master volume and stream type volume and shaper volume
                 vlf *= v * vh;
                 vrf *= v * vh;
@@ -5550,7 +5806,7 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
-            track->setFinalVolume((vrf + vlf) / 2.f);
+            track->setFinalVolume(vrf, vlf);
 
             // Delegate volume control to effect in track effect chain if needed
             if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
@@ -5613,12 +5869,12 @@
                 AudioMixer::SAMPLE_RATE,
                 (void *)(uintptr_t)reqSampleRate);
 
-            AudioPlaybackRate playbackRate = proxy->getPlaybackRate();
             mAudioMixer->setParameter(
                 trackId,
                 AudioMixer::TIMESTRETCH,
                 AudioMixer::PLAYBACK_RATE,
-                &playbackRate);
+                // cast away constness for this generic API.
+                const_cast<void *>(reinterpret_cast<const void *>(&playbackRate)));
 
             /*
              * Select the appropriate output buffer for the track.
@@ -5770,7 +6026,7 @@
     }
 
     // Push the new FastMixer state if necessary
-    bool pauseAudioWatchdog = false;
+    [[maybe_unused]] bool pauseAudioWatchdog = false;
     if (didModify) {
         state->mFastTracksGen++;
         // if the fast mixer was active, but now there are no fast tracks, then put it in cold idle
@@ -5889,18 +6145,35 @@
     return trackCount;
 }
 
-bool AudioFlinger::PlaybackThread::checkRunningTimestamp()
+bool AudioFlinger::PlaybackThread::IsTimestampAdvancing::check(AudioStreamOut * output)
 {
+    // Check the timestamp to see if it's advancing once every 150ms. If we check too frequently, we
+    // could falsely detect that the frame position has stalled due to underrun because we haven't
+    // given the Audio HAL enough time to update.
+    const nsecs_t nowNs = systemTime();
+    if (nowNs - mPreviousNs < mMinimumTimeBetweenChecksNs) {
+        return mLatchedValue;
+    }
+    mPreviousNs = nowNs;
+    mLatchedValue = false;
+    // Determine if the presentation position is still advancing.
     uint64_t position = 0;
     struct timespec unused;
-    const status_t ret = mOutput->getPresentationPosition(&position, &unused);
+    const status_t ret = output->getPresentationPosition(&position, &unused);
     if (ret == NO_ERROR) {
-        if (position != mLastCheckedTimestampPosition) {
-            mLastCheckedTimestampPosition = position;
-            return true;
+        if (position != mPreviousPosition) {
+            mPreviousPosition = position;
+            mLatchedValue = true;
         }
     }
-    return false;
+    return mLatchedValue;
+}
+
+void AudioFlinger::PlaybackThread::IsTimestampAdvancing::clear()
+{
+    mLatchedValue = true;
+    mPreviousPosition = 0;
+    mPreviousNs = 0;
 }
 
 // isTrackAllowed_l() must be called with ThreadBase::mLock held
@@ -5970,12 +6243,12 @@
     if (status == NO_ERROR) {
         status = mOutput->stream->setParameters(keyValuePair);
         if (!mStandby && status == INVALID_OPERATION) {
+            ALOGW("%s: setParameters failed with keyValuePair %s, entering standby",
+                    __func__, keyValuePair.c_str());
             mOutput->standby();
-            if (!mStandby) {
-                mThreadMetrics.logEndInterval();
-                mThreadSnapshot.onEnd();
-                mStandby = true;
-            }
+            mThreadMetrics.logEndInterval();
+            mThreadSnapshot.onEnd();
+            setStandby_l();
             mBytesWritten = 0;
             status = mOutput->stream->setParameters(keyValuePair);
         }
@@ -5985,12 +6258,12 @@
             mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate);
             for (const auto &track : mTracks) {
                 const int trackId = track->id();
-                status_t status = mAudioMixer->create(
+                const status_t createStatus = mAudioMixer->create(
                         trackId,
                         track->mChannelMask,
                         track->mFormat,
                         track->mSessionId);
-                ALOGW_IF(status != NO_ERROR,
+                ALOGW_IF(createStatus != NO_ERROR,
                         "%s(): AudioMixer cannot create track(%d)"
                         " mask %#x, format %#x, sessionId %d",
                         __func__,
@@ -6043,6 +6316,12 @@
     } else {
         dprintf(fd, "  No FastMixer\n");
     }
+
+     dprintf(fd, "Bluetooth latency modes are %senabled\n",
+            mBluetoothLatencyModesEnabled ? "" : "not ");
+     dprintf(fd, "HAL does %ssupport Bluetooth latency modes\n", mOutput != nullptr &&
+             mOutput->audioHwDev->supportsBluetoothVariableLatency() ? "" : "not ");
+     dprintf(fd, "Supported latency modes: %s\n", toString(mSupportedLatencyModes).c_str());
 }
 
 uint32_t AudioFlinger::MixerThread::idleSleepTimeUs() const
@@ -6066,11 +6345,107 @@
     maxPeriod = seconds(mNormalFrameCount) / mSampleRate * 15;
 }
 
+void AudioFlinger::MixerThread::onHalLatencyModesChanged_l() {
+    mAudioFlinger->onSupportedLatencyModesChanged(mId, mSupportedLatencyModes);
+}
+
+void AudioFlinger::MixerThread::setHalLatencyMode_l() {
+    // Only handle latency mode if:
+    // - mBluetoothLatencyModesEnabled is true
+    // - the HAL supports latency modes
+    // - the selected device is Bluetooth LE or A2DP
+    if (!mBluetoothLatencyModesEnabled.load() || mSupportedLatencyModes.empty()) {
+        return;
+    }
+    if (mOutDeviceTypeAddrs.size() != 1
+            || !(audio_is_a2dp_out_device(mOutDeviceTypeAddrs[0].mType)
+                 || audio_is_ble_out_device(mOutDeviceTypeAddrs[0].mType))) {
+        return;
+    }
+
+    audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
+    if (mSupportedLatencyModes.size() == 1) {
+        // If the HAL only support one latency mode currently, confirm the choice
+        latencyMode = mSupportedLatencyModes[0];
+    } else if (mSupportedLatencyModes.size() > 1) {
+        // Request low latency if:
+        // - At least one active track is either:
+        //   - a fast track with gaming usage or
+        //   - a track with acessibility usage
+        for (const auto& track : mActiveTracks) {
+            if ((track->isFastTrack() && track->attributes().usage == AUDIO_USAGE_GAME)
+                    || track->attributes().usage == AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY) {
+                latencyMode = AUDIO_LATENCY_MODE_LOW;
+                break;
+            }
+        }
+    }
+
+    if (latencyMode != mSetLatencyMode) {
+        status_t status = mOutput->stream->setLatencyMode(latencyMode);
+        ALOGD("%s: thread(%d) setLatencyMode(%s) returned %d",
+                __func__, mId, toString(latencyMode).c_str(), status);
+        if (status == NO_ERROR) {
+            mSetLatencyMode = latencyMode;
+        }
+    }
+}
+
+void AudioFlinger::MixerThread::updateHalSupportedLatencyModes_l() {
+
+    if (mOutput == nullptr || mOutput->stream == nullptr) {
+        return;
+    }
+    std::vector<audio_latency_mode_t> latencyModes;
+    const status_t status = mOutput->stream->getRecommendedLatencyModes(&latencyModes);
+    if (status != NO_ERROR) {
+        latencyModes.clear();
+    }
+    if (latencyModes != mSupportedLatencyModes) {
+        ALOGD("%s: thread(%d) status %d supported latency modes: %s",
+            __func__, mId, status, toString(latencyModes).c_str());
+        mSupportedLatencyModes.swap(latencyModes);
+        sendHalLatencyModesChangedEvent_l();
+    }
+}
+
+status_t AudioFlinger::MixerThread::getSupportedLatencyModes(
+        std::vector<audio_latency_mode_t>* modes) {
+    if (modes == nullptr) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    *modes = mSupportedLatencyModes;
+    return NO_ERROR;
+}
+
+void AudioFlinger::MixerThread::onRecommendedLatencyModeChanged(
+        std::vector<audio_latency_mode_t> modes) {
+    Mutex::Autolock _l(mLock);
+    if (modes != mSupportedLatencyModes) {
+        ALOGD("%s: thread(%d) supported latency modes: %s",
+            __func__, mId, toString(modes).c_str());
+        mSupportedLatencyModes.swap(modes);
+        sendHalLatencyModesChangedEvent_l();
+    }
+}
+
+status_t AudioFlinger::MixerThread::setBluetoothVariableLatencyEnabled(bool enabled) {
+    if (mOutput == nullptr || mOutput->audioHwDev == nullptr
+            || !mOutput->audioHwDev->supportsBluetoothVariableLatency()) {
+        return INVALID_OPERATION;
+    }
+    mBluetoothLatencyModesEnabled.store(enabled);
+    return NO_ERROR;
+}
+
 // ----------------------------------------------------------------------------
 
 AudioFlinger::DirectOutputThread::DirectOutputThread(const sp<AudioFlinger>& audioFlinger,
-        AudioStreamOut* output, audio_io_handle_t id, ThreadBase::type_t type, bool systemReady)
+        AudioStreamOut* output, audio_io_handle_t id, ThreadBase::type_t type, bool systemReady,
+        const audio_offload_info_t& offloadInfo)
     :   PlaybackThread(audioFlinger, output, id, type, systemReady)
+    , mOffloadInfo(offloadInfo)
 {
     setMasterBalance(audioFlinger->getMasterBalance_l());
 }
@@ -6102,31 +6477,57 @@
 
     // Ensure volumeshaper state always advances even when muted.
     const sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
-    const auto [shaperVolume, shaperActive] = track->getVolumeHandler()->getVolume(
-            proxy->framesReleased());
+
+    const size_t framesReleased = proxy->framesReleased();
+    const int64_t frames = mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+    const int64_t time = mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+    ALOGV("%s: Direct/Offload bufferConsumed:%zu  timestamp frames:%lld  time:%lld",
+            __func__, framesReleased, (long long)frames, (long long)time);
+
+    const int64_t volumeShaperFrames =
+            mMonotonicFrameCounter.updateAndGetMonotonicFrameCount(frames, time);
+    const auto [shaperVolume, shaperActive] =
+            track->getVolumeHandler()->getVolume(volumeShaperFrames);
     mVolumeShaperActive = shaperActive;
 
+    gain_minifloat_packed_t vlr = proxy->getVolumeLR();
+    left = float_from_gain(gain_minifloat_unpack_left(vlr));
+    right = float_from_gain(gain_minifloat_unpack_right(vlr));
+
+    const bool clientVolumeMute = (left == 0.f && right == 0.f);
+
     if (mMasterMute || mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
         left = right = 0;
     } else {
         float typeVolume = mStreamTypes[track->streamType()].volume;
         const float v = mMasterVolume * typeVolume * shaperVolume;
 
-        gain_minifloat_packed_t vlr = proxy->getVolumeLR();
-        left = float_from_gain(gain_minifloat_unpack_left(vlr));
         if (left > GAIN_FLOAT_UNITY) {
             left = GAIN_FLOAT_UNITY;
         }
-        left *= v * mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
-        right = float_from_gain(gain_minifloat_unpack_right(vlr));
         if (right > GAIN_FLOAT_UNITY) {
             right = GAIN_FLOAT_UNITY;
         }
-        right *= v * mMasterBalanceRight;
+        left *= v;
+        right *= v;
+        if (mAudioFlinger->getMode() != AUDIO_MODE_IN_COMMUNICATION
+                || audio_channel_count_from_out_mask(mChannelMask) > 1) {
+            left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+            right *= mMasterBalanceRight;
+        }
     }
 
+    track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+        /*muteState=*/{mMasterMute,
+                       mStreamTypes[track->streamType()].volume == 0.f,
+                       mStreamTypes[track->streamType()].mute,
+                       track->isPlaybackRestricted(),
+                       clientVolumeMute,
+                       shaperVolume == 0.f});
+
     if (lastTrack) {
-        track->setFinalVolume((left + right) / 2.f);
+        track->setFinalVolume(left, right);
         if (left != mLeftVolFloat || right != mRightVolFloat) {
             mLeftVolFloat = left;
             mRightVolFloat = right;
@@ -6160,7 +6561,8 @@
                 mFlushPending = true;
             }
         } else /* mType == OFFLOAD */ {
-            if (previousTrack->sessionId() != latestTrack->sessionId()) {
+            if (previousTrack->sessionId() != latestTrack->sessionId() ||
+                previousTrack->isFlushPending()) {
                 mFlushPending = true;
             }
         }
@@ -6335,9 +6737,10 @@
                 // No buffers for this track. Give it a few chances to
                 // fill a buffer, then remove it from active list.
                 // Only consider last track started for mixer state control
-                if (--(track->mRetryCount) <= 0) {
-                    const bool running = checkRunningTimestamp();
-                    if (running) { // still running, give us more time.
+                bool isTimestampAdvancing = mIsTimestampAdvancing.check(mOutput);
+                if (!isTunerStream()  // tuner streams remain active in underrun
+                        && --(track->mRetryCount) <= 0) {
+                    if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->mRetryCount = kMaxTrackRetriesOffload;
                     } else {
                         ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
@@ -6380,6 +6783,7 @@
             (doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
         status_t result = mOutput->stream->pause();
         ALOGE_IF(result != OK, "Error when pausing output stream: %d", result);
+        doHwResume = !doHwPause;  // resume if pause is due to flush.
     }
     if (mFlushPending) {
         flushHw_l();
@@ -6500,7 +6904,7 @@
             if (!mStandby) {
                 mThreadMetrics.logEndInterval();
                 mThreadSnapshot.onEnd();
-                mStandby = true;
+                setStandby_l();
             }
             mBytesWritten = 0;
             status = mOutput->stream->setParameters(keyValuePair);
@@ -6571,6 +6975,7 @@
     mFlushPending = false;
     mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
+    mMonotonicFrameCounter.onFlush();
 }
 
 int64_t AudioFlinger::DirectOutputThread::computeWaitTimeNs_l() const {
@@ -6699,8 +7104,9 @@
 
 // ----------------------------------------------------------------------------
 AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
-        AudioStreamOut* output, audio_io_handle_t id, bool systemReady)
-    :   DirectOutputThread(audioFlinger, output, id, OFFLOAD, systemReady),
+        AudioStreamOut* output, audio_io_handle_t id, bool systemReady,
+        const audio_offload_info_t& offloadInfo)
+    :   DirectOutputThread(audioFlinger, output, id, OFFLOAD, systemReady, offloadInfo),
         mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true)
 {
     //FIXME: mStandby should be set to true by ThreadBase constructo
@@ -6918,9 +7324,10 @@
             } else {
                 // No buffers for this track. Give it a few chances to
                 // fill a buffer, then remove it from active list.
-                if (--(track->mRetryCount) <= 0) {
-                    const bool running = checkRunningTimestamp();
-                    if (running) { // still running, give us more time.
+                bool isTimestampAdvancing = mIsTimestampAdvancing.check(mOutput);
+                if (!isTunerStream()  // tuner streams remain active in underrun
+                        && --(track->mRetryCount) <= 0) {
+                    if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->mRetryCount = kMaxTrackRetriesOffload;
                     } else {
                         ALOGV("OffloadThread: BUFFER TIMEOUT: remove track(%d) from active list",
@@ -6948,6 +7355,7 @@
     if (!mStandby && (doHwPause || (mFlushPending && !mHwPaused && (count != 0)))) {
         status_t result = mOutput->stream->pause();
         ALOGE_IF(result != OK, "Error when pausing output stream: %d", result);
+        doHwResume = !doHwPause;  // resume if pause is due to flush.
     }
     if (mFlushPending) {
         flushHw_l();
@@ -7009,6 +7417,13 @@
     }
 }
 
+void AudioFlinger::OffloadThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
+    Mutex::Autolock _l(mLock);
+    if (PlaybackThread::invalidateTracks_l(portIds)) {
+        mFlushPending = true;
+    }
+}
+
 // ----------------------------------------------------------------------------
 
 AudioFlinger::DuplicatingThread::DuplicatingThread(const sp<AudioFlinger>& audioFlinger,
@@ -7030,7 +7445,7 @@
 void AudioFlinger::DuplicatingThread::threadLoop_mix()
 {
     // mix buffers...
-    if (outputsReady(outputTracks)) {
+    if (outputsReady()) {
         mAudioMixer->process();
     } else {
         if (mMixerBufferValid) {
@@ -7101,7 +7516,7 @@
     }
 }
 
-void AudioFlinger::DuplicatingThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
+void AudioFlinger::DuplicatingThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     MixerThread::dumpInternals_l(fd, args);
 
@@ -7205,9 +7620,7 @@
     }
 }
 
-
-bool AudioFlinger::DuplicatingThread::outputsReady(
-        const SortedVector< sp<OutputTrack> > &outputTracks)
+bool AudioFlinger::DuplicatingThread::outputsReady()
 {
     for (size_t i = 0; i < outputTracks.size(); i++) {
         sp<ThreadBase> thread = outputTracks[i]->thread().promote();
@@ -7259,6 +7672,68 @@
 {
 }
 
+void AudioFlinger::SpatializerThread::onFirstRef() {
+    MixerThread::onFirstRef();
+
+    const pid_t tid = getTid();
+    if (tid == -1) {
+        // Unusual: PlaybackThread::onFirstRef() should set the threadLoop running.
+        ALOGW("%s: Cannot update Spatializer mixer thread priority, not running", __func__);
+    } else {
+        const int priorityBoost = requestSpatializerPriority(getpid(), tid);
+        if (priorityBoost > 0) {
+            stream()->setHalThreadPriority(priorityBoost);
+        }
+    }
+}
+
+void AudioFlinger::SpatializerThread::setHalLatencyMode_l() {
+    // if mSupportedLatencyModes is empty, the HAL stream does not support
+    // latency mode control and we can exit.
+    if (mSupportedLatencyModes.empty()) {
+        return;
+    }
+    audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
+    if (mSupportedLatencyModes.size() == 1) {
+        // If the HAL only support one latency mode currently, confirm the choice
+        latencyMode = mSupportedLatencyModes[0];
+    } else if (mSupportedLatencyModes.size() > 1) {
+        // Request low latency if:
+        // - The low latency mode is requested by the spatializer controller
+        //   (mRequestedLatencyMode = AUDIO_LATENCY_MODE_LOW)
+        //      AND
+        // - At least one active track is spatialized
+        bool hasSpatializedActiveTrack = false;
+        for (const auto& track : mActiveTracks) {
+            if (track->isSpatialized()) {
+                hasSpatializedActiveTrack = true;
+                break;
+            }
+        }
+        if (hasSpatializedActiveTrack && mRequestedLatencyMode == AUDIO_LATENCY_MODE_LOW) {
+            latencyMode = AUDIO_LATENCY_MODE_LOW;
+        }
+    }
+
+    if (latencyMode != mSetLatencyMode) {
+        status_t status = mOutput->stream->setLatencyMode(latencyMode);
+        ALOGD("%s: thread(%d) setLatencyMode(%s) returned %d",
+                __func__, mId, toString(latencyMode).c_str(), status);
+        if (status == NO_ERROR) {
+            mSetLatencyMode = latencyMode;
+        }
+    }
+}
+
+status_t AudioFlinger::SpatializerThread::setRequestedLatencyMode(audio_latency_mode_t mode) {
+    if (mode != AUDIO_LATENCY_MODE_LOW && mode != AUDIO_LATENCY_MODE_FREE) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    mRequestedLatencyMode = mode;
+    return NO_ERROR;
+}
+
 void AudioFlinger::SpatializerThread::checkOutputStageEffects()
 {
     bool hasVirtualizer = false;
@@ -7311,7 +7786,6 @@
     }
 }
 
-
 // ----------------------------------------------------------------------------
 //      Record
 // ----------------------------------------------------------------------------
@@ -7364,7 +7838,7 @@
     size_t numCounterOffers = 0;
     const NBAIO_Format offers[1] = {Format_from_SR_C(mSampleRate, mChannelCount, mFormat)};
 #if !LOG_NDEBUG
-    ssize_t index =
+    [[maybe_unused]] ssize_t index =
 #else
     (void)
 #endif
@@ -7383,10 +7857,11 @@
         ALOGV("%p kUseFastCapture = Always, initFastCapture = true", this);
         break;
     case FastCapture_Static:
-        initFastCapture = (mFrameCount * 1000) / mSampleRate < kMinNormalCaptureBufferSizeMs;
-        ALOGV("%p kUseFastCapture = Static, (%lld * 1000) / %u vs %u, initFastCapture = %d",
-                this, (long long)mFrameCount, mSampleRate, kMinNormalCaptureBufferSizeMs,
-                initFastCapture);
+        initFastCapture = !mIsMsdDevice // Disable fast capture for MSD BUS devices.
+                && (mFrameCount * 1000) / mSampleRate < kMinNormalCaptureBufferSizeMs;
+        ALOGV("%p kUseFastCapture = Static, (%lld * 1000) / %u vs %u, initFastCapture = %d "
+                "mIsMsdDevice = %d", this, (long long)mFrameCount, mSampleRate,
+                kMinNormalCaptureBufferSizeMs, initFastCapture, mIsMsdDevice);
         break;
     // case FastCapture_Dynamic:
     }
@@ -7412,15 +7887,17 @@
         // pipe will be shared directly with fast clients, so clear to avoid leaking old information
         memset(pipeBuffer, 0, pipeSize);
         Pipe *pipe = new Pipe(pipeFramesP2, format, pipeBuffer);
-        const NBAIO_Format offers[1] = {format};
-        size_t numCounterOffers = 0;
-        ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers);
-        ALOG_ASSERT(index == 0);
+        const NBAIO_Format offersFast[1] = {format};
+        size_t numCounterOffersFast = 0;
+        [[maybe_unused]] ssize_t index2 = pipe->negotiate(offersFast, std::size(offersFast),
+                nullptr /* counterOffers */, numCounterOffersFast);
+        ALOG_ASSERT(index2 == 0);
         mPipeSink = pipe;
         PipeReader *pipeReader = new PipeReader(*pipe);
-        numCounterOffers = 0;
-        index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers);
-        ALOG_ASSERT(index == 0);
+        numCounterOffersFast = 0;
+        index2 = pipeReader->negotiate(offersFast, std::size(offersFast),
+                nullptr /* counterOffers */, numCounterOffersFast);
+        ALOG_ASSERT(index2 == 0);
         mPipeSource = pipeReader;
         mPipeFramesP2 = pipeFramesP2;
         mPipeMemory = pipeMemory;
@@ -7767,7 +8244,7 @@
         // copy to the right place.  Permitted because mRsmpInBuffer was over-allocated.
 
         int32_t rear = mRsmpInRear & (mRsmpInFramesP2 - 1);
-        ssize_t framesRead;
+        ssize_t framesRead = 0; // not needed, remove clang-tidy warning.
         const int64_t lastIoBeginNs = systemTime(); // start IO timing
 
         // If an NBAIO source is present, use it to read the normal capture's data
@@ -7960,8 +8437,9 @@
                     // straight from RecordThread buffer to RecordTrack buffer.
                     AudioBufferProvider::Buffer buffer;
                     buffer.frameCount = framesOut;
-                    status_t status = activeTrack->mResamplerBufferProvider->getNextBuffer(&buffer);
-                    if (status == OK && buffer.frameCount != 0) {
+                    const status_t getNextBufferStatus =
+                            activeTrack->mResamplerBufferProvider->getNextBuffer(&buffer);
+                    if (getNextBufferStatus == OK && buffer.frameCount != 0) {
                         ALOGV_IF(buffer.frameCount != framesOut,
                                 "%s() read less than expected (%zu vs %zu)",
                                 __func__, buffer.frameCount, framesOut);
@@ -7971,7 +8449,7 @@
                     } else {
                         framesOut = 0;
                         ALOGE("%s() cannot fill request, status: %d, frameCount: %zu",
-                            __func__, status, buffer.frameCount);
+                            __func__, getNextBufferStatus, buffer.frameCount);
                     }
                 } else {
                     // process frames from the RecordThread buffer provider to the RecordTrack
@@ -8165,8 +8643,6 @@
     audio_input_flags_t inputFlags = mInput->flags;
     audio_input_flags_t requestedFlags = *flags;
     uint32_t sampleRate;
-    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
-            attributionSource);
 
     lStatus = initCheck();
     if (lStatus != NO_ERROR) {
@@ -8181,7 +8657,7 @@
     }
 
     if (maxSharedAudioHistoryMs != 0) {
-        if (!captureHotwordAllowed(checkedAttributionSource)) {
+        if (!captureHotwordAllowed(attributionSource)) {
             lStatus = PERMISSION_DENIED;
             goto Exit;
         }
@@ -8302,16 +8778,16 @@
         Mutex::Autolock _l(mLock);
         int32_t startFrames = -1;
         if (!mSharedAudioPackageName.empty()
-                && mSharedAudioPackageName == checkedAttributionSource.packageName
+                && mSharedAudioPackageName == attributionSource.packageName
                 && mSharedAudioSessionId == sessionId
-                && captureHotwordAllowed(checkedAttributionSource)) {
+                && captureHotwordAllowed(attributionSource)) {
             startFrames = mSharedAudioStartFrames;
         }
 
         track = new RecordTrack(this, client, attr, sampleRate,
                       format, channelMask, frameCount,
                       nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
-                      checkedAttributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
+                      attributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
                       startFrames);
 
         lStatus = track->initCheck();
@@ -8393,7 +8869,6 @@
         //      or using a separate command thread
         recordTrack->mState = TrackBase::STARTING_1;
         mActiveTracks.add(recordTrack);
-        status_t status = NO_ERROR;
         if (recordTrack->isExternalTrack()) {
             mLock.unlock();
             status = AudioSystem::startInput(recordTrack->portId());
@@ -8520,7 +8995,7 @@
 }
 
 status_t AudioFlinger::RecordThread::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo>* activeMicrophones)
+        std::vector<media::MicrophoneInfoFw>* activeMicrophones)
 {
     ALOGV("RecordThread::getActiveMicrophones");
     AutoMutex _l(mLock);
@@ -8584,7 +9059,7 @@
     // "best effort" behavior of the API.
     if (sharedOffset < 0) {
         sharedAudioStartFrames = mRsmpInRear;
-    } else if (sharedOffset > mRsmpInFrames) {
+    } else if (sharedOffset > static_cast<signed>(mRsmpInFrames)) {
         sharedAudioStartFrames =
                 audio_utils::safe_sub_overflow(mRsmpInRear, (int32_t)mRsmpInFrames);
     }
@@ -8605,29 +9080,20 @@
     mSharedAudioPackageName = "";
 }
 
-void AudioFlinger::RecordThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::RecordThread::updateMetadata_l()
 {
     if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
-        return; // nothing to do
+        return {}; // nothing to do
     }
     StreamInHalInterface::SinkMetadata metadata;
+    auto backInserter = std::back_inserter(metadata.tracks);
     for (const sp<RecordTrack> &track : mActiveTracks) {
-        // Do not forward PatchRecord metadata to audio HAL
-        if (track->isPatchTrack()) {
-            continue;
-        }
-        // No track is invalid as this is called after prepareTrack_l in the same critical section
-        record_track_metadata_v7_t trackMetadata;
-        trackMetadata.base = {
-                .source = track->attributes().source,
-                .gain = 1, // capture tracks do not have volumes
-        };
-        trackMetadata.channel_mask = track->channelMask(),
-        strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
-
-        metadata.tracks.push_back(trackMetadata);
+        track->copyMetadataTo(backInserter);
     }
     mInput->stream->updateSinkMetadata(metadata);
+    MetadataUpdate change;
+    change.recordMetadataUpdate = metadata.tracks;
+    return change;
 }
 
 // destroyTrack_l() must be called with ThreadBase::mLock held
@@ -8846,7 +9312,7 @@
     if (stepCount == 0) {
         return;
     }
-    ALOG_ASSERT(stepCount <= mRsmpInUnrel);
+    ALOG_ASSERT(stepCount <= (int32_t)mRsmpInUnrel);
     mRsmpInUnrel -= stepCount;
     mRsmpInFront = audio_utils::safe_add_overflow(mRsmpInFront, stepCount);
     buffer->raw = NULL;
@@ -8884,7 +9350,8 @@
     audio_format_t reqFormat = mFormat;
     uint32_t samplingRate = mSampleRate;
     // TODO this may change if we want to support capture from HDMI PCM multi channel (e.g on TVs).
-    audio_channel_mask_t channelMask = audio_channel_in_mask_from_count(mChannelCount);
+    [[maybe_unused]] audio_channel_mask_t channelMask =
+                                audio_channel_in_mask_from_count(mChannelCount);
 
     AudioParameter param = AudioParameter(keyValuePair);
     int value;
@@ -8970,7 +9437,7 @@
             return out_s8;
         }
     }
-    return String8();
+    return {};
 }
 
 void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
@@ -9160,6 +9627,9 @@
         track->logEndInterval();
         track->logBeginInterval(pathSourcesAsString);
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -9176,6 +9646,9 @@
     } else {
         status = mInput->stream->legacyReleaseAudioPatch();
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -9205,7 +9678,7 @@
             maxFilled = filled;
         }
     }
-    if (maxFilled > mRsmpInFrames) {
+    if (maxFilled > static_cast<signed>(mRsmpInFrames)) {
         (void)__builtin_sub_overflow(mRsmpInRear, mRsmpInFrames, &oldestFront);
     }
     return oldestFront;
@@ -9394,10 +9867,14 @@
     return mThread->standby();
 }
 
+status_t AudioFlinger::MmapThreadHandle::reportData(const void* buffer, size_t frameCount) {
+    return mThread->reportData(buffer, frameCount);
+}
+
 
 AudioFlinger::MmapThread::MmapThread(
         const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
-        AudioHwDevice *hwDev, sp<StreamHalInterface> stream, bool systemReady, bool isOut)
+        AudioHwDevice *hwDev, const sp<StreamHalInterface>& stream, bool systemReady, bool isOut)
     : ThreadBase(audioFlinger, id, (isOut ? MMAP_PLAYBACK : MMAP_CAPTURE), systemReady, isOut),
       mSessionId(AUDIO_SESSION_NONE),
       mPortId(AUDIO_PORT_HANDLE_NONE),
@@ -9472,8 +9949,10 @@
     return mHalStream->getMmapPosition(position);
 }
 
-status_t AudioFlinger::MmapThread::exitStandby()
+status_t AudioFlinger::MmapThread::exitStandby_l()
 {
+    // The HAL must receive track metadata before starting the stream
+    updateMetadata_l();
     status_t ret = mHalStream->start();
     if (ret != NO_ERROR) {
         ALOGE("%s: error mHalStream->start() = %d for first track", __FUNCTION__, ret);
@@ -9499,18 +9978,18 @@
 
     status_t ret;
 
+    // For the first track, reuse portId and session allocated when the stream was opened.
     if (*handle == mPortId) {
-        // For the first track, reuse portId and session allocated when the stream was opened.
-        ret = exitStandby();
-        if (ret == NO_ERROR) {
-            acquireWakeLock();
-        }
-        return ret;
+        acquireWakeLock();
+        return NO_ERROR;
     }
 
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
 
     audio_io_handle_t io = mId;
+    AttributionSourceState adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            client.attributionSource);
+
     if (isOutput()) {
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
         config.sample_rate = mSampleRate;
@@ -9522,16 +10001,18 @@
         audio_port_handle_t deviceId = mDeviceId;
         std::vector<audio_io_handle_t> secondaryOutputs;
         bool isSpatialized;
+        bool isBitPerfect;
         ret = AudioSystem::getOutputForAttr(&mAttr, &io,
                                             mSessionId,
                                             &stream,
-                                            client.attributionSource,
+                                            adjAttributionSource,
                                             &config,
                                             flags,
                                             &deviceId,
                                             &portId,
                                             &secondaryOutputs,
-                                            &isSpatialized);
+                                            &isSpatialized,
+                                            &isBitPerfect);
         ALOGD_IF(!secondaryOutputs.empty(),
                  "MmapThread::start does not support secondary outputs, ignoring them");
     } else {
@@ -9543,7 +10024,7 @@
         ret = AudioSystem::getInputForAttr(&mAttr, &io,
                                               RECORD_RIID_INVALID,
                                               mSessionId,
-                                              client.attributionSource,
+                                              adjAttributionSource,
                                               &config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ,
                                               &deviceId,
@@ -9560,6 +10041,12 @@
     if (isOutput()) {
         ret = AudioSystem::startOutput(portId);
     } else {
+        {
+            // Add the track record before starting input so that the silent status for the
+            // client can be cached.
+            Mutex::Autolock _l(mLock);
+            setClientSilencedState_l(portId, false /*silenced*/);
+        }
         ret = AudioSystem::startInput(portId);
     }
 
@@ -9578,6 +10065,7 @@
         } else {
             mHalStream->stop();
         }
+        eraseClientSilencedState_l(portId);
         return PERMISSION_DENIED;
     }
 
@@ -9586,18 +10074,22 @@
                                         mChannelMask, mSessionId, isOutput(),
                                         client.attributionSource,
                                         IPCThreadState::self()->getCallingPid(), portId);
+    if (!isOutput()) {
+        track->setSilenced_l(isClientSilenced_l(portId));
+    }
 
     if (isOutput()) {
         // force volume update when a new track is added
         mHalVolFloat = -1.0f;
     } else if (!track->isSilenced_l()) {
         for (const sp<MmapTrack> &t : mActiveTracks) {
-            if (t->isSilenced_l() && t->uid() != client.attributionSource.uid)
+            if (t->isSilenced_l()
+                    && t->uid() != static_cast<uid_t>(client.attributionSource.uid)) {
                 t->invalidate();
+            }
         }
     }
 
-
     mActiveTracks.add(track);
     sp<EffectChain> chain = getEffectChain_l(mSessionId);
     if (chain != 0) {
@@ -9608,11 +10100,16 @@
 
     track->logBeginInterval(patchSinksToString(&mPatch)); // log to MediaMetrics
     *handle = portId;
+
+    if (mActiveTracks.size() == 1) {
+        ret = exitStandby_l();
+    }
+
     broadcast_l();
 
-    ALOGV("%s DONE handle %d stream %p", __FUNCTION__, *handle, mHalStream.get());
+    ALOGV("%s DONE status %d handle %d stream %p", __FUNCTION__, ret, *handle, mHalStream.get());
 
-    return NO_ERROR;
+    return ret;
 }
 
 status_t AudioFlinger::MmapThread::stop(audio_port_handle_t handle)
@@ -9624,7 +10121,6 @@
     }
 
     if (handle == mPortId) {
-        mHalStream->stop();
         releaseWakeLock();
         return NO_ERROR;
     }
@@ -9643,6 +10139,7 @@
     }
 
     mActiveTracks.remove(track);
+    eraseClientSilencedState_l(track->portId());
 
     mLock.unlock();
     if (isOutput()) {
@@ -9660,6 +10157,10 @@
         chain->decTrackCnt();
     }
 
+    if (mActiveTracks.isEmpty()) {
+        mHalStream->stop();
+    }
+
     broadcast_l();
 
     return NO_ERROR;
@@ -9685,6 +10186,10 @@
     return NO_ERROR;
 }
 
+status_t AudioFlinger::MmapThread::reportData(const void* /*buffer*/, size_t /*frameCount*/) {
+    // This is a stub implementation. The MmapPlaybackThread overrides this function.
+    return INVALID_OPERATION;
+}
 
 void AudioFlinger::MmapThread::readHalParameters_l()
 {
@@ -9818,7 +10323,7 @@
     if (initCheck() == NO_ERROR && mHalStream->getParameters(keys, &out_s8) == OK) {
         return out_s8;
     }
-    return String8();
+    return {};
 }
 
 void AudioFlinger::MmapThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
@@ -9848,6 +10353,7 @@
 
 status_t AudioFlinger::MmapThread::createAudioPatch_l(const struct audio_patch *patch,
                                                           audio_patch_handle_t *handle)
+NO_THREAD_SAFETY_ANALYSIS  // elease and re-acquire mLock
 {
     status_t status = NO_ERROR;
 
@@ -9865,8 +10371,8 @@
                                 "as it does not support audio patches",
                                 patch->sinks[i].ext.device.type);
             type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
-            sinkDeviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
-                    patch->sinks[i].ext.device.address));
+            sinkDeviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
+                    patch->sinks[i].ext.device.address);
         }
         deviceId = patch->sinks[0].id;
         numDevices = mPatch.num_sinks;
@@ -9930,6 +10436,9 @@
         mPatch = *patch;
         mDeviceId = deviceId;
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -9949,6 +10458,9 @@
     } else {
         status = mHalStream->legacyReleaseAudioPatch();
     }
+    // Force meteadata update after a route change
+    mActiveTracks.setHasChanged();
+
     return status;
 }
 
@@ -10069,20 +10581,24 @@
 }
 
 void AudioFlinger::MmapThread::checkInvalidTracks_l()
+NO_THREAD_SAFETY_ANALYSIS  // release and re-acquire mLock
 {
+    sp<MmapStreamCallback> callback;
     for (const sp<MmapTrack> &track : mActiveTracks) {
         if (track->isInvalid()) {
-            sp<MmapStreamCallback> callback = mCallback.promote();
-            if (callback != 0) {
-                mLock.unlock();
-                callback->onTearDown(track->portId());
-                mLock.lock();
-            } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
-                ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
+            callback = mCallback.promote();
+            if (callback == nullptr &&  mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+                ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
                 mNoCallbackWarningCount++;
             }
+            break;
         }
     }
+    if (callback != 0) {
+        mLock.unlock();
+        callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+        mLock.lock();
+    }
 }
 
 void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
@@ -10219,7 +10735,27 @@
     }
 }
 
+void AudioFlinger::MmapPlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds)
+{
+    Mutex::Autolock _l(mLock);
+    bool trackMatch = false;
+    for (const sp<MmapTrack> &track : mActiveTracks) {
+        if (portIds.find(track->portId()) != portIds.end()) {
+            track->invalidate();
+            trackMatch = true;
+            portIds.erase(track->portId());
+        }
+        if (portIds.empty()) {
+            break;
+        }
+    }
+    if (trackMatch) {
+        broadcast_l();
+    }
+}
+
 void AudioFlinger::MmapPlaybackThread::processVolume_l()
+NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent_l
 {
     float volume;
 
@@ -10248,20 +10784,10 @@
         } else {
             sp<MmapStreamCallback> callback = mCallback.promote();
             if (callback != 0) {
-                int channelCount;
-                if (isOutput()) {
-                    channelCount = audio_channel_count_from_out_mask(mChannelMask);
-                } else {
-                    channelCount = audio_channel_count_from_in_mask(mChannelMask);
-                }
-                Vector<float> values;
-                for (int i = 0; i < channelCount; i++) {
-                    values.add(volume);
-                }
                 mHalVolFloat = volume; // SW volume control worked, so update value.
                 mNoCallbackWarningCount = 0;
                 mLock.unlock();
-                callback->onVolumeChanged(mChannelMask, values);
+                callback->onVolumeChanged(volume);
                 mLock.lock();
             } else {
                 if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
@@ -10272,14 +10798,22 @@
         }
         for (const sp<MmapTrack> &track : mActiveTracks) {
             track->setMetadataHasChanged();
+            track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+                /*muteState=*/{mMasterMute,
+                               mStreamVolume == 0.f,
+                               mStreamMute,
+                               // TODO(b/241533526): adjust logic to include mute from AppOps
+                               false /*muteFromPlaybackRestricted*/,
+                               false /*muteFromClientVolume*/,
+                               false /*muteFromVolumeShaper*/});
         }
     }
 }
 
-void AudioFlinger::MmapPlaybackThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::MmapPlaybackThread::updateMetadata_l()
 {
     if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
-        return; // nothing to do
+        return {}; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
     for (const sp<MmapTrack> &track : mActiveTracks) {
@@ -10295,7 +10829,11 @@
         metadata.tracks.push_back(trackMetadata);
     }
     mOutput->stream->updateSourceMetadata(metadata);
-}
+
+    MetadataUpdate change;
+    change.playbackMetadataUpdate = metadata.tracks;
+    return change;
+};
 
 void AudioFlinger::MmapPlaybackThread::checkSilentMode_l()
 {
@@ -10337,6 +10875,40 @@
     return status;
 }
 
+status_t AudioFlinger::MmapPlaybackThread::reportData(const void* buffer, size_t frameCount) {
+    // Send to MelProcessor for sound dose measurement.
+    auto processor = mMelProcessor.load();
+    if (processor) {
+        processor->process(buffer, frameCount * mFrameSize);
+    }
+
+    return NO_ERROR;
+}
+
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::MmapPlaybackThread::startMelComputation_l(
+        const sp<audio_utils::MelProcessor>& processor)
+{
+    ALOGV("%s: starting mel processor for thread %d", __func__, id());
+    mMelProcessor.store(processor);
+    if (processor) {
+        processor->resume();
+    }
+
+    // no need to update output format for MMapPlaybackThread since it is
+    // assigned constant for each thread
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::MmapPlaybackThread::stopMelComputation_l()
+{
+    ALOGV("%s: pausing mel processor for thread %d", __func__, id());
+    auto melProcessor = mMelProcessor.load();
+    if (melProcessor != nullptr) {
+        melProcessor->pause();
+    }
+}
+
 void AudioFlinger::MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     MmapThread::dumpInternals_l(fd, args);
@@ -10356,16 +10928,15 @@
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
 }
 
-status_t AudioFlinger::MmapCaptureThread::exitStandby()
+status_t AudioFlinger::MmapCaptureThread::exitStandby_l()
 {
     {
         // mInput might have been cleared by clearInput()
-        Mutex::Autolock _l(mLock);
         if (mInput != nullptr && mInput->stream != nullptr) {
             mInput->stream->setGain(1.0f);
         }
     }
-    return MmapThread::exitStandby();
+    return MmapThread::exitStandby_l();
 }
 
 AudioFlinger::AudioStreamIn* AudioFlinger::MmapCaptureThread::clearInput()
@@ -10404,10 +10975,10 @@
     }
 }
 
-void AudioFlinger::MmapCaptureThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::MmapCaptureThread::updateMetadata_l()
 {
     if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
-        return; // nothing to do
+        return {}; // nothing to do
     }
     StreamInHalInterface::SinkMetadata metadata;
     for (const sp<MmapTrack> &track : mActiveTracks) {
@@ -10422,6 +10993,9 @@
         metadata.tracks.push_back(trackMetadata);
     }
     mInput->stream->updateSinkMetadata(metadata);
+    MetadataUpdate change;
+    change.recordMetadataUpdate = metadata.tracks;
+    return change;
 }
 
 void AudioFlinger::MmapCaptureThread::setRecordSilenced(audio_port_handle_t portId, bool silenced)
@@ -10433,6 +11007,7 @@
             broadcast_l();
         }
     }
+    setClientSilencedIfExists_l(portId, silenced);
 }
 
 void AudioFlinger::MmapCaptureThread::toAudioPortConfig(struct audio_port_config *config)
@@ -10453,4 +11028,48 @@
     return mInput->getCapturePosition((int64_t*)position, timeNanos);
 }
 
+// ----------------------------------------------------------------------------
+
+AudioFlinger::BitPerfectThread::BitPerfectThread(const sp<AudioFlinger> &audioflinger,
+        AudioStreamOut *output, audio_io_handle_t id, bool systemReady)
+        : MixerThread(audioflinger, output, id, systemReady, BIT_PERFECT) {}
+
+AudioFlinger::PlaybackThread::mixer_state AudioFlinger::BitPerfectThread::prepareTracks_l(
+        Vector<sp<Track>> *tracksToRemove) {
+    mixer_state result = MixerThread::prepareTracks_l(tracksToRemove);
+    // If there is only one active track and it is bit-perfect, enable tee buffer.
+    float volumeLeft = 1.0f;
+    float volumeRight = 1.0f;
+    if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+        const int trackId = mActiveTracks[0]->id();
+        mAudioMixer->setParameter(
+                    trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
+        mAudioMixer->setParameter(
+                    trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
+                    (void *)(uintptr_t)mNormalFrameCount);
+        mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+        mIsBitPerfect = true;
+    } else {
+        mIsBitPerfect = false;
+        // No need to copy bit-perfect data directly to sink buffer given there are multiple tracks
+        // active.
+        for (const auto& track : mActiveTracks) {
+            const int trackId = track->id();
+            mAudioMixer->setParameter(
+                        trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, nullptr);
+        }
+    }
+    if (mVolumeLeft != volumeLeft || mVolumeRight != volumeRight) {
+        mVolumeLeft = volumeLeft;
+        mVolumeRight = volumeRight;
+        setVolumeForOutput_l(volumeLeft, volumeRight);
+    }
+    return result;
+}
+
+void AudioFlinger::BitPerfectThread::threadLoop_mix() {
+    MixerThread::threadLoop_mix();
+    mHasDataCopiedToSinkBuffer = mIsBitPerfect;
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index b2962ed8..e88134b 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -33,6 +33,7 @@
         MMAP_PLAYBACK,      // Thread class for MMAP playback stream
         MMAP_CAPTURE,       // Thread class for MMAP capture stream
         SPATIALIZER,  //
+        BIT_PERFECT,        // Thread class for BitPerfectThread
         // If you add any values here, also update ThreadBase::threadTypeToString()
     };
 
@@ -55,7 +56,8 @@
         CFG_EVENT_RELEASE_AUDIO_PATCH,
         CFG_EVENT_UPDATE_OUT_DEVICE,
         CFG_EVENT_RESIZE_BUFFER,
-        CFG_EVENT_CHECK_OUTPUT_STAGE_EFFECTS
+        CFG_EVENT_CHECK_OUTPUT_STAGE_EFFECTS,
+        CFG_EVENT_HAL_LATENCY_MODES_CHANGED,
     };
 
     class ConfigEventData: public RefBase {
@@ -162,7 +164,7 @@
 
     class SetParameterConfigEventData : public ConfigEventData {
     public:
-        explicit SetParameterConfigEventData(String8 keyValuePairs) :
+        explicit SetParameterConfigEventData(const String8& keyValuePairs) :
             mKeyValuePairs(keyValuePairs) {}
 
         virtual  void dump(char *buffer, size_t size) {
@@ -174,7 +176,7 @@
 
     class SetParameterConfigEvent : public ConfigEvent {
     public:
-        explicit SetParameterConfigEvent(String8 keyValuePairs) :
+        explicit SetParameterConfigEvent(const String8& keyValuePairs) :
             ConfigEvent(CFG_EVENT_SET_PARAMETER) {
             mData = new SetParameterConfigEventData(keyValuePairs);
             mWaitStatus = true;
@@ -282,6 +284,15 @@
         virtual ~CheckOutputStageEffectsEvent() {}
     };
 
+    class HalLatencyModesChangedEvent : public ConfigEvent {
+    public:
+        HalLatencyModesChangedEvent() :
+            ConfigEvent(CFG_EVENT_HAL_LATENCY_MODES_CHANGED) {
+        }
+
+        virtual ~HalLatencyModesChangedEvent() {}
+    };
+
 
     class PMDeathRecipient : public IBinder::DeathRecipient {
     public:
@@ -353,6 +364,7 @@
                 void        sendResizeBufferConfigEvent_l(int32_t maxSharedAudioHistoryMs);
                 void        sendCheckOutputStageEffectsEvent();
                 void        sendCheckOutputStageEffectsEvent_l();
+                void        sendHalLatencyModesChangedEvent_l();
 
                 void        processConfigEvents_l();
     virtual     void        setCheckOutputStageEffects() {}
@@ -365,8 +377,6 @@
 
     virtual     void        resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs);
 
-
-
                 // see note at declaration of mStandby, mOutDevice and mInDevice
                 bool        standby() const { return mStandby; }
                 const DeviceTypeSet outDeviceTypes() const {
@@ -419,8 +429,10 @@
                                             // track
                     FAST_SESSION = 0x4,     // the audio session corresponds to at least one
                                             // fast track
-                    SPATIALIZED_SESSION = 0x8 // the audio session corresponds to at least one
-                                              // spatialized track
+                    SPATIALIZED_SESSION = 0x8, // the audio session corresponds to at least one
+                                               // spatialized track
+                    BIT_PERFECT_SESSION = 0x10 // the audio session corresponds to at least one
+                                               // bit-perfect track
                 };
 
                 // get effect chain corresponding to session Id.
@@ -486,6 +498,9 @@
                             if (track->isSpatialized()) {
                                 result |= SPATIALIZED_SESSION;  // caution, only first track.
                             }
+                            if (track->isBitPerfect()) {
+                                result |= BIT_PERFECT_SESSION;
+                            }
                             break;
                         }
                     }
@@ -561,6 +576,9 @@
 
     virtual     bool isStreamInitialized() = 0;
 
+    virtual     void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor);
+    virtual     void stopMelComputation_l();
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -592,7 +610,11 @@
                 void checkSuspendOnAddEffectChain_l(const sp<EffectChain>& chain);
 
                 // sends the metadata of the active tracks to the HAL
-    virtual     void        updateMetadata_l() = 0;
+                struct MetadataUpdate {
+                    std::vector<playback_track_metadata_v7_t> playbackMetadataUpdate;
+                    std::vector<record_track_metadata_v7_t>   recordMetadataUpdate;
+                };
+    virtual     MetadataUpdate           updateMetadata_l() = 0;
 
                 String16 getWakeLockTag();
 
@@ -608,12 +630,14 @@
 
                 product_strategy_t getStrategyForStream(audio_stream_type_t stream) const;
 
+    virtual     void        onHalLatencyModesChanged_l() {}
+
     virtual     void        dumpInternals_l(int fd __unused, const Vector<String16>& args __unused)
                             { }
     virtual     void        dumpTracks_l(int fd __unused, const Vector<String16>& args __unused) { }
 
 
-    friend class AudioFlinger;      // for mEffectChains
+    friend class AudioFlinger;      // for mEffectChains and mAudioManager
 
                 const type_t            mType;
 
@@ -778,7 +802,7 @@
                     // ThreadBase thread.
                     void            clear();
                     // periodically called in the threadLoop() to update power state uids.
-                    void            updatePowerState(sp<ThreadBase> thread, bool force = false);
+                    void updatePowerState(const sp<ThreadBase>& thread, bool force = false);
 
                     /** @return true if one or move active tracks was added or removed since the
                      *          last time this function was called or the vector was created.
@@ -786,6 +810,11 @@
                      */
                     bool            readAndClearHasChanged();
 
+                    /** Force updating track metadata to audio HAL stream next time
+                     * readAndClearHasChanged() is called.
+                     */
+                    void            setHasChanged() { mHasChanged = true; }
+
                 private:
                     void            logTrack(const char *funcName, const sp<T> &track) const;
 
@@ -921,6 +950,8 @@
                             }
 
     virtual     void        checkOutputStageEffects() {}
+    virtual     void        setHalLatencyMode_l() {}
+
 
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
                 void        dumpTracks_l(int fd, const Vector<String16>& args) override;
@@ -964,7 +995,8 @@
                                 status_t *status /*non-NULL*/,
                                 audio_port_handle_t portId,
                                 const sp<media::IAudioTrackCallback>& callback,
-                                bool isSpatialized);
+                                bool isSpatialized,
+                                bool isBitPerfect);
 
                 AudioStreamOut* getOutput() const;
                 AudioStreamOut* clearOutput();
@@ -1011,7 +1043,11 @@
 
                 // called with AudioFlinger lock held
                         bool     invalidateTracks_l(audio_stream_type_t streamType);
+                        bool     invalidateTracks_l(std::set<audio_port_handle_t>& portIds);
                 virtual void     invalidateTracks(audio_stream_type_t streamType);
+                // Invalidate tracks by a set of port ids. The port id will be removed from
+                // the given set if the corresponding track is found and invalidated.
+                virtual void     invalidateTracks(std::set<audio_port_handle_t>& portIds);
 
     virtual     size_t      frameCount() const { return mNormalFrameCount; }
 
@@ -1064,6 +1100,48 @@
                 bool hasMixer() const {
                     return mType == MIXER || mType == DUPLICATING || mType == SPATIALIZER;
                 }
+
+    virtual     status_t setRequestedLatencyMode(
+            audio_latency_mode_t mode __unused) { return INVALID_OPERATION; }
+
+    virtual     status_t getSupportedLatencyModes(
+                        std::vector<audio_latency_mode_t>* modes __unused) {
+                    return INVALID_OPERATION;
+                }
+
+    virtual     status_t setBluetoothVariableLatencyEnabled(bool enabled __unused) {
+                    return INVALID_OPERATION;
+                }
+
+                void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
+                void stopMelComputation_l() override;
+
+                void setStandby() {
+                    Mutex::Autolock _l(mLock);
+                    setStandby_l();
+                }
+
+                void setStandby_l() {
+                    mStandby = true;
+                    mHalStarted = false;
+                    mKernelPositionOnStandby =
+                        mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+                }
+
+                bool waitForHalStart() {
+                    Mutex::Autolock _l(mLock);
+                    static const nsecs_t kWaitHalTimeoutNs = seconds(2);
+                    nsecs_t endWaitTimetNs = systemTime() + kWaitHalTimeoutNs;
+                    while (!mHalStarted) {
+                        nsecs_t timeNs = systemTime();
+                        if (timeNs >= endWaitTimetNs) {
+                            break;
+                        }
+                        nsecs_t waitTimeLeftNs = endWaitTimetNs - timeNs;
+                        mWaitHalStartCV.waitRelative(mLock, waitTimeLeftNs);
+                    }
+                    return mHalStarted;
+                }
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1133,6 +1211,9 @@
     // for any processing (including output processing).
     bool                            mEffectBufferValid;
 
+    // Set to "true" to enable when data has already copied to sink
+    bool                            mHasDataCopiedToSinkBuffer = false;
+
     // Frame size aligned buffer used as input and output to all post processing effects
     // except the Spatializer in a SPATIALIZER thread. Non spatialized tracks are mixed into
     // this buffer so that post processing effects can be applied.
@@ -1150,7 +1231,7 @@
     volatile int32_t                mSuspended;
 
     int64_t                         mBytesWritten;
-    int64_t                         mFramesWritten; // not reset on standby
+    std::atomic<int64_t>            mFramesWritten; // not reset on standby
     int64_t                         mLastFramesWritten = -1; // track changes in timestamp
                                                              // server frames written.
     int64_t                         mSuspendedFrames; // not reset on standby
@@ -1227,7 +1308,7 @@
     void        removeTrack_l(const sp<Track>& track);
 
     void        readOutputParameters_l();
-    void        updateMetadata_l() final;
+    MetadataUpdate          updateMetadata_l() final;
     virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
 
     void        collectTimestamps_l();
@@ -1236,7 +1317,7 @@
     template <typename T>
     class Tracks {
     public:
-        Tracks(bool saveDeletedTrackIds) :
+        explicit Tracks(bool saveDeletedTrackIds) :
             mSaveDeletedTrackIds(saveDeletedTrackIds) { }
 
         // SortedVector methods
@@ -1265,7 +1346,7 @@
             return mTracks.end();
         }
 
-        size_t          processDeletedTrackIds(std::function<void(int)> f) {
+        size_t          processDeletedTrackIds(const std::function<void(int)>& f) {
             for (const int trackId : mDeletedTrackIds) {
                 f(trackId);
             }
@@ -1360,10 +1441,19 @@
     // Downstream patch latency, available if mDownstreamLatencyStatMs.getN() > 0.
     audio_utils::Statistics<double> mDownstreamLatencyStatMs{0.999};
 
+    // output stream start detection based on render position returned by the kernel
+    // condition signalled when the output stream has started
+    Condition                mWaitHalStartCV;
+    // true when the output stream render position has moved, reset to false in standby
+    bool                     mHalStarted = false;
+    // last kernel render position saved when entering standby
+    int64_t                  mKernelPositionOnStandby = 0;
+
 public:
     virtual     bool        hasFastMixer() const = 0;
     virtual     FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex __unused) const
                                 { FastTrackUnderruns dummy; return dummy; }
+                const std::atomic<int64_t>& framesWritten() const { return mFramesWritten; }
 
 protected:
                 // accessed by both binder threads and within threadLoop(), lock on mutex needed
@@ -1381,16 +1471,42 @@
 
                 std::atomic_bool mCheckOutputStageEffects{};
 
-                // A differential check on the timestamps to see if there is a change in the
-                // timestamp frame position between the last call to checkRunningTimestamp.
-                uint64_t mLastCheckedTimestampPosition = ~0LL;
 
-                bool checkRunningTimestamp();
+                // Provides periodic checking for timestamp advancement for underrun detection.
+                class IsTimestampAdvancing {
+                public:
+                    // The timestamp will not be checked any faster than the specified time.
+                    explicit IsTimestampAdvancing(nsecs_t minimumTimeBetweenChecksNs)
+                        :   mMinimumTimeBetweenChecksNs(minimumTimeBetweenChecksNs)
+                    {
+                        clear();
+                    }
+                    // Check if the presentation position has advanced in the last periodic time.
+                    bool check(AudioStreamOut * output);
+                    // Clear the internal state when the playback state changes for the output
+                    // stream.
+                    void clear();
+                private:
+                    // The minimum time between timestamp checks.
+                    const nsecs_t mMinimumTimeBetweenChecksNs;
+                    // Add differential check on the timestamps to see if there is a change in the
+                    // timestamp frame position between the last call to check.
+                    uint64_t mPreviousPosition;
+                    // The time at which the last check occurred, to ensure we don't check too
+                    // frequently, giving the Audio HAL enough time to update its timestamps.
+                    nsecs_t mPreviousNs;
+                    // The valued is latched so we don't check timestamps too frequently.
+                    bool mLatchedValue;
+                };
+                IsTimestampAdvancing mIsTimestampAdvancing;
 
-    virtual     void flushHw_l() { mLastCheckedTimestampPosition = ~0LL; }
+    virtual     void flushHw_l() {
+                    mIsTimestampAdvancing.clear();
+                }
 };
 
-class MixerThread : public PlaybackThread {
+class MixerThread : public PlaybackThread,
+                    public StreamOutHalInterfaceLatencyModeCallback  {
 public:
     MixerThread(const sp<AudioFlinger>& audioFlinger,
                 AudioStreamOut* output,
@@ -1400,6 +1516,13 @@
                 audio_config_base_t *mixerConfig = nullptr);
     virtual             ~MixerThread();
 
+    // RefBase
+    virtual     void        onFirstRef();
+
+                // StreamOutHalInterfaceLatencyModeCallback
+                void        onRecommendedLatencyModeChanged(
+                                    std::vector<audio_latency_mode_t> modes) override;
+
     // Thread virtuals
 
     virtual     bool        checkForNewParameter_l(const String8& keyValuePair,
@@ -1436,6 +1559,17 @@
     virtual     status_t    releaseAudioPatch_l(const audio_patch_handle_t handle);
 
                 AudioMixer* mAudioMixer;    // normal mixer
+
+            // Support low latency mode by default as unless explicitly indicated by the audio HAL
+            // we assume the audio path is compatible with the head tracking latency requirements
+            std::vector<audio_latency_mode_t> mSupportedLatencyModes = {AUDIO_LATENCY_MODE_LOW};
+            // default to invalid value to force first update to the audio HAL
+            audio_latency_mode_t mSetLatencyMode =
+                    (audio_latency_mode_t)AUDIO_LATENCY_MODE_INVALID;
+
+            // Bluetooth Variable latency control logic is enabled or disabled for this thread
+            std::atomic_bool mBluetoothLatencyModesEnabled;
+
 private:
                 // one-time initialization, no locks required
                 sp<FastMixer>     mFastMixer;     // non-0 if there is also a fast mixer
@@ -1469,6 +1603,11 @@
                                 return INVALID_OPERATION;
                             }
 
+                status_t    getSupportedLatencyModes(
+                                    std::vector<audio_latency_mode_t>* modes) override;
+
+                status_t    setBluetoothVariableLatencyEnabled(bool enabled) override;
+
 protected:
     virtual     void       setMasterMono_l(bool mono) {
                                mMasterMono.store(mono);
@@ -1487,14 +1626,19 @@
                                    mFastMixer->setMasterBalance(balance);
                                }
                            }
+
+                void       updateHalSupportedLatencyModes_l();
+                void       onHalLatencyModesChanged_l() override;
+                void       setHalLatencyMode_l() override;
 };
 
 class DirectOutputThread : public PlaybackThread {
 public:
 
     DirectOutputThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
-                       audio_io_handle_t id, bool systemReady)
-        : DirectOutputThread(audioFlinger, output, id, DIRECT, systemReady) { }
+                       audio_io_handle_t id, bool systemReady,
+                       const audio_offload_info_t& offloadInfo)
+        : DirectOutputThread(audioFlinger, output, id, DIRECT, systemReady, offloadInfo) { }
 
     virtual                 ~DirectOutputThread();
 
@@ -1526,11 +1670,16 @@
 
     virtual     void        onAddNewTrack_l();
 
+    const       audio_offload_info_t mOffloadInfo;
+
+    audioflinger::MonotonicFrameCounter mMonotonicFrameCounter;  // for VolumeShaper
     bool mVolumeShaperActive = false;
 
     DirectOutputThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
-                       audio_io_handle_t id, ThreadBase::type_t type, bool systemReady);
+                       audio_io_handle_t id, ThreadBase::type_t type, bool systemReady,
+                       const audio_offload_info_t& offloadInfo);
     void processVolume_l(Track *track, bool lastTrack);
+    bool isTunerStream() const { return (mOffloadInfo.content_id > 0); }
 
     // prepareTracks_l() tells threadLoop_mix() the name of the single active track
     sp<Track>               mActiveTrack;
@@ -1568,7 +1717,8 @@
 public:
 
     OffloadThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
-                  audio_io_handle_t id, bool systemReady);
+                  audio_io_handle_t id, bool systemReady,
+                  const audio_offload_info_t& offloadInfo);
     virtual                 ~OffloadThread() {};
                 void        flushHw_l() override;
 
@@ -1580,6 +1730,7 @@
     virtual     bool        waitingAsyncCallback();
     virtual     bool        waitingAsyncCallback_l();
     virtual     void        invalidateTracks(audio_stream_type_t streamType);
+                void        invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
 
     virtual     bool        keepWakeLock() const { return (mKeepWakeLock || (mDrainSequence & 1)); }
 
@@ -1642,7 +1793,7 @@
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
 
 private:
-                bool        outputsReady(const SortedVector< sp<OutputTrack> > &outputTracks);
+                bool        outputsReady();
 protected:
     // threadLoop snippets
     virtual     void        threadLoop_mix();
@@ -1693,10 +1844,19 @@
 
             bool hasFastMixer() const override { return false; }
 
+            // RefBase
+            virtual void        onFirstRef();
+
+            status_t setRequestedLatencyMode(audio_latency_mode_t mode) override;
+
 protected:
             void checkOutputStageEffects() override;
+            void setHalLatencyMode_l() override;
 
 private:
+            // Do not request a specific mode by default
+            audio_latency_mode_t mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
+
             sp<EffectHandle> mFinalDownMixer;
 };
 
@@ -1857,12 +2017,13 @@
             // Sets the UID records silence
             void        setRecordSilenced(audio_port_handle_t portId, bool silenced);
 
-            status_t    getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
+            status_t    getActiveMicrophones(
+                    std::vector<media::MicrophoneInfoFw>* activeMicrophones);
 
             status_t    setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
             status_t    setPreferredMicrophoneFieldDimension(float zoom);
 
-            void        updateMetadata_l() override;
+            MetadataUpdate        updateMetadata_l() override;
 
             bool        fastTrackAvailable() const { return mFastTrackAvail; }
 
@@ -1981,7 +2142,7 @@
 #include "MmapTracks.h"
 
     MmapThread(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
-               AudioHwDevice *hwDev, sp<StreamHalInterface> stream, bool systemReady,
+               AudioHwDevice *hwDev, const sp<StreamHalInterface>& stream, bool systemReady,
                bool isOut);
     virtual     ~MmapThread();
 
@@ -2004,6 +2165,7 @@
     status_t stop(audio_port_handle_t handle);
     status_t standby();
     virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNaos) = 0;
+    virtual status_t reportData(const void* buffer, size_t frameCount);
 
     // RefBase
     virtual     void        onFirstRef();
@@ -2014,7 +2176,7 @@
     virtual     void        threadLoop_exit();
     virtual     void        threadLoop_standby();
     virtual     bool        shouldStandby_l() { return false; }
-    virtual     status_t    exitStandby();
+    virtual     status_t    exitStandby_l() REQUIRES(mLock);
 
     virtual     status_t    initCheck() const { return (mHalStream == 0) ? NO_INIT : NO_ERROR; }
     virtual     size_t      frameCount() const { return mFrameCount; }
@@ -2050,6 +2212,7 @@
     virtual     audio_stream_type_t streamType() { return AUDIO_STREAM_DEFAULT; }
 
     virtual     void        invalidateTracks(audio_stream_type_t streamType __unused) {}
+    virtual     void        invalidateTracks(std::set<audio_port_handle_t>& portIds __unused) {}
 
                 // Sets the UID records silence
     virtual     void        setRecordSilenced(audio_port_handle_t portId __unused,
@@ -2057,6 +2220,26 @@
 
     virtual     bool        isStreamInitialized() { return false; }
 
+                void        setClientSilencedState_l(audio_port_handle_t portId, bool silenced) {
+                                mClientSilencedStates[portId] = silenced;
+                            }
+
+                size_t      eraseClientSilencedState_l(audio_port_handle_t portId) {
+                                return mClientSilencedStates.erase(portId);
+                            }
+
+                bool        isClientSilenced_l(audio_port_handle_t portId) const {
+                                const auto it = mClientSilencedStates.find(portId);
+                                return it != mClientSilencedStates.end() ? it->second : false;
+                            }
+
+                void        setClientSilencedIfExists_l(audio_port_handle_t portId, bool silenced) {
+                                const auto it = mClientSilencedStates.find(portId);
+                                if (it != mClientSilencedStates.end()) {
+                                    it->second = silenced;
+                                }
+                            }
+
  protected:
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
                 void        dumpTracks_l(int fd, const Vector<String16>& args) override;
@@ -2076,6 +2259,7 @@
                 AudioHwDevice* const    mAudioHwDev;
                 ActiveTracks<MmapTrack> mActiveTracks;
                 float                   mHalVolFloat;
+                std::map<audio_port_handle_t, bool> mClientSilencedStates;
 
                 int32_t                 mNoCallbackWarningCount;
      static     constexpr int32_t       kMaxNoCallbackWarnings = 5;
@@ -2108,12 +2292,13 @@
                 void        setMasterMute_l(bool muted) { mMasterMute = muted; }
 
     virtual     void        invalidateTracks(audio_stream_type_t streamType);
+                void        invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
 
     virtual     audio_stream_type_t streamType() { return mStreamType; }
     virtual     void        checkSilentMode_l();
                 void        processVolume_l() override;
 
-                void        updateMetadata_l() override;
+                MetadataUpdate        updateMetadata_l() override;
 
     virtual     void        toAudioPortConfig(struct audio_port_config *config);
 
@@ -2123,6 +2308,11 @@
                                 return !(mOutput == nullptr || mOutput->stream == nullptr);
                             }
 
+                status_t    reportData(const void* buffer, size_t frameCount) override;
+
+                void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
+                void stopMelComputation_l() override;
+
 protected:
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
 
@@ -2132,6 +2322,8 @@
                 bool                        mMasterMute;
                 bool                        mStreamMute;
                 AudioStreamOut*             mOutput;
+
+                mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
 };
 
 class MmapCaptureThread : public MmapThread
@@ -2144,9 +2336,9 @@
 
                 AudioStreamIn* clearInput();
 
-                status_t       exitStandby() override;
+                status_t       exitStandby_l() REQUIRES(mLock) override;
 
-                void           updateMetadata_l() override;
+                MetadataUpdate           updateMetadata_l() override;
                 void           processVolume_l() override;
                 void           setRecordSilenced(audio_port_handle_t portId,
                                                  bool silenced) override;
@@ -2163,3 +2355,18 @@
 
                 AudioStreamIn*  mInput;
 };
+
+class BitPerfectThread : public MixerThread {
+public:
+    BitPerfectThread(const sp<AudioFlinger>& audioflinger, AudioStreamOut *output,
+                     audio_io_handle_t id, bool systemReady);
+
+protected:
+    mixer_state prepareTracks_l(Vector<sp<Track>> *tracksToRemove) override;
+    void threadLoop_mix() override;
+
+private:
+    bool mIsBitPerfect;
+    float mVolumeLeft = 0.f;
+    float mVolumeRight = 0.f;
+};
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 2677ab3..254fb91 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -109,6 +109,8 @@
 
     virtual bool        isSpatialized() const { return false; }
 
+    virtual bool        isBitPerfect() const { return false; }
+
 #ifdef TEE_SINK
            void         dumpTee(int fd, const std::string &reason) const {
                                 mTee.dump(fd, reason);
@@ -122,7 +124,7 @@
              * This may be called without the thread lock.
              */
     virtual double      bufferLatencyMs() const {
-                            return mServerProxy->framesReadySafe() * 1000 / sampleRate();
+                            return mServerProxy->framesReadySafe() * 1000. / sampleRate();
                         }
 
             /** returns whether the track supports server latency computation.
@@ -397,6 +399,8 @@
     int64_t             mLogStartFrames = 0;    // Timestamp frames at start()
     double              mLogLatencyMs = 0.;     // Track the last log latency
 
+    bool                mLogForceVolumeUpdate = true; // force volume update to TrackMetrics.
+
     TrackMetrics        mTrackMetrics;
 
     bool                mServerLatencySupported = false;
@@ -428,7 +432,7 @@
 {
 public:
     using Timeout = std::optional<std::chrono::nanoseconds>;
-                        PatchTrackBase(sp<ClientProxy> proxy, const ThreadBase& thread,
+                        PatchTrackBase(const sp<ClientProxy>& proxy, const ThreadBase& thread,
                                        const Timeout& timeout);
             void        setPeerTimeout(std::chrono::nanoseconds timeout);
             template <typename T>
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index 30d69ab..f3425df 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -17,6 +17,9 @@
 #ifndef ANDROID_AUDIO_TRACKMETRICS_H
 #define ANDROID_AUDIO_TRACKMETRICS_H
 
+#include <binder/IActivityManager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
 #include <mutex>
 
 namespace android {
@@ -38,10 +41,13 @@
  * We currently deliver metrics based on an AudioIntervalGroup.
  */
 class TrackMetrics final {
+
+
 public:
-    TrackMetrics(std::string metricsId, bool isOut)
+    TrackMetrics(std::string metricsId, bool isOut, int clientUid)
         : mMetricsId(std::move(metricsId))
         , mIsOut(isOut)
+        , mUid(clientUid)
         {}  // we don't log a constructor item, we wait for more info in logConstructor().
 
     ~TrackMetrics() {
@@ -64,7 +70,18 @@
                     AMEDIAMETRICS_PROP_EVENT_VALUE_BEGINAUDIOINTERVALGROUP, devices.c_str());
         }
         ++mIntervalCount;
-        mIntervalStartTimeNs = systemTime();
+        const auto& mActivityManager = getActivityManager();
+        if (mActivityManager) {
+            if (mIsOut) {
+                mActivityManager->logFgsApiBegin(AUDIO_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            } else {
+                mActivityManager->logFgsApiBegin(MICROPHONE_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            }
+        }
     }
 
     void logConstructor(pid_t creatorPid, uid_t creatorUid, int32_t internalTrackId,
@@ -90,11 +107,21 @@
     // Called when we are removed from the Thread.
     void logEndInterval() {
         std::lock_guard l(mLock);
-        if (mIntervalStartTimeNs != 0) {
-            const int64_t elapsedTimeNs = systemTime() - mIntervalStartTimeNs;
-            mIntervalStartTimeNs = 0;
-            mCumulativeTimeNs += elapsedTimeNs;
-            mDeviceTimeNs += elapsedTimeNs;
+        if (mLastVolumeChangeTimeNs != 0) {
+            logVolume_l(mVolume); // flush out the last volume.
+            mLastVolumeChangeTimeNs = 0;
+        }
+        const auto& mActivityManager = getActivityManager();
+        if (mActivityManager) {
+            if (mIsOut) {
+                mActivityManager->logFgsApiEnd(AUDIO_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            } else {
+                mActivityManager->logFgsApiEnd(MICROPHONE_API,
+                    mUid,
+                    IPCThreadState::self() -> getCallingPid());
+            }
         }
     }
 
@@ -116,7 +143,8 @@
         mDeviceStartupMs.add(startupMs);
     }
 
-    void updateMinMaxVolume(int64_t durationNs, double deviceVolume) {
+    void updateMinMaxVolume_l(int64_t durationNs, double deviceVolume)
+            REQUIRES(mLock) {
         if (deviceVolume > mMaxVolume) {
             mMaxVolume = deviceVolume;
             mMaxVolumeDurationNs = durationNs;
@@ -133,20 +161,8 @@
 
     // may be called multiple times during an interval
     void logVolume(float volume) {
-        const int64_t timeNs = systemTime();
         std::lock_guard l(mLock);
-        if (mStartVolumeTimeNs == 0) {
-            mDeviceVolume = mVolume = volume;
-            mLastVolumeChangeTimeNs = mStartVolumeTimeNs = timeNs;
-            updateMinMaxVolume(0, mVolume);
-            return;
-        }
-        const int64_t durationNs = timeNs - mLastVolumeChangeTimeNs;
-        updateMinMaxVolume(durationNs, mVolume);
-        mDeviceVolume = (mDeviceVolume * (mLastVolumeChangeTimeNs - mStartVolumeTimeNs) +
-            mVolume * durationNs) / (timeNs - mStartVolumeTimeNs);
-        mVolume = volume;
-        mLastVolumeChangeTimeNs = timeNs;
+        logVolume_l(volume);
     }
 
     // Use absolute numbers returned by AudioTrackShared.
@@ -158,6 +174,7 @@
     }
 
 private:
+
     // no lock required - all arguments and constants.
     void deliverDeviceMetrics(const char *eventName, const char *devices) const {
         mediametrics::LogItem(mMetricsId)
@@ -167,6 +184,23 @@
            .record();
     }
 
+    void logVolume_l(float volume) REQUIRES(mLock) {
+        const int64_t timeNs = systemTime();
+        const int64_t durationNs = mLastVolumeChangeTimeNs == 0
+                ? 0 : timeNs - mLastVolumeChangeTimeNs;
+        if (durationNs > 0) {
+            // See West's algorithm for weighted averages
+            // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
+            mDeviceVolume += (mVolume - mDeviceVolume) * durationNs
+                      / (durationNs + mDeviceTimeNs);
+            mDeviceTimeNs += durationNs;
+            mCumulativeTimeNs += durationNs;
+        }
+        updateMinMaxVolume_l(durationNs, mVolume); // always update.
+        mVolume = volume;
+        mLastVolumeChangeTimeNs = timeNs;
+    }
+
     void deliverCumulativeMetrics(const char *eventName) const REQUIRES(mLock) {
         if (mIntervalCount > 0) {
             mediametrics::LogItem item(mMetricsId);
@@ -199,14 +233,12 @@
         // mDevices is not reset by resetIntervalGroupMetrics.
 
         mIntervalCount = 0;
-        mIntervalStartTimeNs = 0;
         // mCumulativeTimeNs is not reset by resetIntervalGroupMetrics.
         mDeviceTimeNs = 0;
 
         mVolume = 0.f;
         mDeviceVolume = 0.f;
-        mStartVolumeTimeNs = 0;
-        mLastVolumeChangeTimeNs = 0;
+        mLastVolumeChangeTimeNs = 0;  // last time volume logged, cleared on endInterval
         mMinVolume = AMEDIAMETRICS_INITIAL_MIN_VOLUME;
         mMaxVolume = AMEDIAMETRICS_INITIAL_MAX_VOLUME;
         mMinVolumeDurationNs = 0;
@@ -220,9 +252,25 @@
         // do not reset mUnderrunCount - it keeps continuously running for tracks.
     }
 
+    // Meyer's singleton is thread-safe.
+    static const sp<IActivityManager>& getActivityManager() {
+        static const auto activityManager = []() -> sp<IActivityManager> {
+            const sp<IServiceManager> sm(defaultServiceManager());
+            if (sm != nullptr) {
+                 return interface_cast<IActivityManager>(sm->checkService(String16("activity")));
+            }
+            return nullptr;
+        }();
+        return activityManager;
+    }
+
     const std::string mMetricsId;
     const bool        mIsOut;  // if true, than a playback track, otherwise used for record.
 
+    static constexpr int AUDIO_API = 5;
+    static constexpr int MICROPHONE_API = 6;
+    const int         mUid;
+
     mutable           std::mutex mLock;
 
     // Devices in the interval group.
@@ -230,14 +278,12 @@
 
     // Number of intervals and playing time
     int32_t           mIntervalCount GUARDED_BY(mLock) = 0;
-    int64_t           mIntervalStartTimeNs GUARDED_BY(mLock) = 0;
-    int64_t           mCumulativeTimeNs GUARDED_BY(mLock) = 0;
-    int64_t           mDeviceTimeNs GUARDED_BY(mLock) = 0;
+    int64_t           mCumulativeTimeNs GUARDED_BY(mLock) = 0; // total time.
+    int64_t           mDeviceTimeNs GUARDED_BY(mLock) = 0;     // time on device.
 
     // Average volume
-    double            mVolume GUARDED_BY(mLock) = 0.f;
-    double            mDeviceVolume GUARDED_BY(mLock) = 0.f;
-    int64_t           mStartVolumeTimeNs GUARDED_BY(mLock) = 0;
+    double            mVolume GUARDED_BY(mLock) = 0.f;       // last set volume.
+    double            mDeviceVolume GUARDED_BY(mLock) = 0.f; // running average volume.
     int64_t           mLastVolumeChangeTimeNs GUARDED_BY(mLock) = 0;
 
     // Min/Max volume
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 6135020..7d2c4db 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -118,7 +118,7 @@
         mThreadIoHandle(thread ? thread->id() : AUDIO_IO_HANDLE_NONE),
         mPortId(portId),
         mIsInvalid(false),
-        mTrackMetrics(std::move(metricsId), isOut),
+        mTrackMetrics(std::move(metricsId), isOut, clientUid),
         mCreatorPid(creatorPid)
 {
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -162,11 +162,12 @@
     }
 
     if (client != 0) {
-        mCblkMemory = client->heap()->allocate(size);
+        mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{{size},
+                std::string("Track ID: ").append(std::to_string(mId))});
         if (mCblkMemory == 0 ||
                 (mCblk = static_cast<audio_track_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
             ALOGE("%s(%d): not enough memory for AudioTrack size=%zu", __func__, mId, size);
-            client->heap()->dump("AudioTrack");
+            ALOGE("%s", client->allocator().dump().c_str());
             mCblkMemory.clear();
             return;
         }
@@ -303,7 +304,7 @@
     return NO_ERROR;
 }
 
-AudioFlinger::ThreadBase::PatchTrackBase::PatchTrackBase(sp<ClientProxy> proxy,
+AudioFlinger::ThreadBase::PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
                                                          const ThreadBase& thread,
                                                          const Timeout& timeout)
     : mProxy(proxy)
@@ -334,6 +335,7 @@
     : BnAudioTrack(),
       mTrack(track)
 {
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 AudioFlinger::TrackHandle::~TrackHandle() {
@@ -438,7 +440,8 @@
     return Status::ok();
 }
 
-Status AudioFlinger::TrackHandle::getDualMonoMode(media::AudioDualMonoMode* _aidl_return)
+Status AudioFlinger::TrackHandle::getDualMonoMode(
+        media::audio::common::AudioDualMonoMode* _aidl_return)
 {
     audio_dual_mono_mode_t mode = AUDIO_DUAL_MONO_MODE_OFF;
     const status_t status = mTrack->getDualMonoMode(&mode)
@@ -451,7 +454,7 @@
 }
 
 Status AudioFlinger::TrackHandle::setDualMonoMode(
-        media::AudioDualMonoMode mode)
+        media::audio::common::AudioDualMonoMode mode)
 {
     const auto localMonoMode = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(mode));
@@ -475,7 +478,7 @@
 }
 
 Status AudioFlinger::TrackHandle::getPlaybackRateParameters(
-        media::AudioPlaybackRate* _aidl_return)
+        media::audio::common::AudioPlaybackRate* _aidl_return)
 {
     audio_playback_rate_t localPlaybackRate{};
     status_t status = mTrack->getPlaybackRateParameters(&localPlaybackRate)
@@ -488,7 +491,7 @@
 }
 
 Status AudioFlinger::TrackHandle::setPlaybackRateParameters(
-        const media::AudioPlaybackRate& playbackRate)
+        const media::audio::common::AudioPlaybackRate& playbackRate)
 {
     const audio_playback_rate_t localPlaybackRate = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(playbackRate));
@@ -529,10 +532,7 @@
             id, attr.flags);
         return nullptr;
     }
-
-    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
-            attributionSource);
-    return new OpPlayAudioMonitor(checkedAttributionSource, attr.usage, id);
+    return new OpPlayAudioMonitor(attributionSource, attr.usage, id);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
@@ -634,7 +634,8 @@
             audio_port_handle_t portId,
             size_t frameCountToBeReady,
             float speed,
-            bool isSpatialized)
+            bool isSpatialized,
+            bool isBitPerfect)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -669,7 +670,8 @@
     mFlushHwPending(false),
     mFlags(flags),
     mSpeed(speed),
-    mIsSpatialized(isSpatialized)
+    mIsSpatialized(isSpatialized),
+    mIsBitPerfect(isBitPerfect)
 {
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -791,7 +793,7 @@
                         "  Format Chn mask  SRate "
                         "ST Usg CT "
                         " G db  L dB  R dB  VS dB "
-                        "  Server FrmCnt  FrmRdy F Underruns  Flushed"
+                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect"
                         "%s\n",
                         isServerLatencySupported() ? "   Latency" : "");
 }
@@ -877,7 +879,7 @@
                         "%08X %08X %6u "
                         "%2u %3x %2x "
                         "%5.2g %5.2g %5.2g %5.2g%c "
-                        "%08X %6zu%c %6zu %c %9u%c %7u",
+                        "%08X %6zu%c %6zu %c %9u%c %7u %10s",
             active ? "yes" : "no",
             (mClient == 0) ? getpid() : mClient->pid(),
             mSessionId,
@@ -906,7 +908,8 @@
             fillingStatus,
             mAudioTrackServerProxy->getUnderrunFrames(),
             nowInUnderrun,
-            (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000
+            (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000,
+            isBitPerfect() ? "true" : "false"
             );
 
     if (isServerLatencySupported()) {
@@ -1094,12 +1097,22 @@
                     __func__, mId, (int)mThreadIoHandle);
         }
 
-        // states to reset position info for non-offloaded/direct tracks
-        if (!isOffloaded() && !isDirect()
+        PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
+
+        // states to reset position info for pcm tracks
+        if (audio_is_linear_pcm(mFormat)
                 && (state == IDLE || state == STOPPED || state == FLUSHED)) {
             mFrameMap.reset();
+
+            if (!isFastTrack() && (isDirect() || isOffloaded())) {
+                // Start point of track -> sink frame map. If the HAL returns a
+                // frame position smaller than the first written frame in
+                // updateTrackFrameInfo, the timestamp can be interpolated
+                // instead of using a larger value.
+                mFrameMap.push(mAudioTrackServerProxy->framesReleased(),
+                               playbackThread->framesWritten());
+            }
         }
-        PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
         if (isFastTrack()) {
             // refresh fast track underruns on start because that field is never cleared
             // by the fast mixer; furthermore, the same track can be recycled, i.e. start
@@ -1107,10 +1120,10 @@
             mObservedUnderruns = playbackThread->getFastTrackUnderruns(mFastIndex);
         }
         status = playbackThread->addTrack_l(this);
-        if (status == INVALID_OPERATION || status == PERMISSION_DENIED) {
+        if (status == INVALID_OPERATION || status == PERMISSION_DENIED || status == DEAD_OBJECT) {
             triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
             //  restore previous state if start was rejected by policy manager
-            if (status == PERMISSION_DENIED) {
+            if (status == PERMISSION_DENIED || status == DEAD_OBJECT) {
                 mState = state;
             }
         }
@@ -1123,6 +1136,7 @@
                     .mPosition[ExtendedTimestamp::LOCATION_KERNEL];
             mLogLatencyMs = 0.;
         }
+        mLogForceVolumeUpdate = true;  // at least one volume logged for metrics when starting.
 
         if (status == NO_ERROR || status == ALREADY_EXISTS) {
             // for streaming tracks, remove the buffer read stop limit.
@@ -1148,6 +1162,23 @@
     }
     if (status == NO_ERROR) {
         forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+
+        // send format to AudioManager for playback activity monitoring
+        sp<IAudioManager> audioManager = thread->mAudioFlinger->getOrCreateAudioManager();
+        if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+            std::unique_ptr<os::PersistableBundle> bundle =
+                    std::make_unique<os::PersistableBundle>();
+        bundle->putBoolean(String16(kExtraPlayerEventSpatializedKey),
+                           isSpatialized());
+        bundle->putInt(String16(kExtraPlayerEventSampleRateKey), mSampleRate);
+        bundle->putInt(String16(kExtraPlayerEventChannelMaskKey), mChannelMask);
+        status_t result = audioManager->portEvent(mPortId,
+                                                  PLAYER_UPDATE_FORMAT, bundle);
+        if (result != OK) {
+            ALOGE("%s: unable to send playback format for port ID %d, status error %d",
+                  __func__, mPortId, result);
+        }
+      }
     }
     return status;
 }
@@ -1288,8 +1319,9 @@
 // must be called with thread lock held
 void AudioFlinger::PlaybackThread::Track::flushAck()
 {
-    if (!isOffloaded() && !isDirect())
+    if (!isOffloaded() && !isDirect()) {
         return;
+    }
 
     // Clear the client ring buffer so that the app can prime the buffer while paused.
     // Otherwise it might not get cleared until playback is resumed and obtainBuffer() is called.
@@ -1350,25 +1382,7 @@
         const sp<VolumeShaper::Configuration>& configuration,
         const sp<VolumeShaper::Operation>& operation)
 {
-    sp<VolumeShaper::Configuration> newConfiguration;
-
-    if (isOffloadedOrDirect()) {
-        const VolumeShaper::Configuration::OptionFlag optionFlag
-            = configuration->getOptionFlags();
-        if ((optionFlag & VolumeShaper::Configuration::OPTION_FLAG_CLOCK_TIME) == 0) {
-            ALOGW("%s(%d): %s tracks do not support frame counted VolumeShaper,"
-                    " using clock time instead",
-                    __func__, mId,
-                    isOffloaded() ? "Offload" : "Direct");
-            newConfiguration = new VolumeShaper::Configuration(*configuration);
-            newConfiguration->setOptionFlags(
-                VolumeShaper::Configuration::OptionFlag(optionFlag
-                        | VolumeShaper::Configuration::OPTION_FLAG_CLOCK_TIME));
-        }
-    }
-
-    VolumeShaper::Status status = mVolumeHandler->applyVolumeShaper(
-            (newConfiguration.get() != nullptr ? newConfiguration : configuration), operation);
+    VolumeShaper::Status status = mVolumeHandler->applyVolumeShaper(configuration, operation);
 
     if (isOffloadedOrDirect()) {
         // Signal thread to fetch new volume.
@@ -1389,17 +1403,29 @@
     return mVolumeHandler->getVolumeShaperState(id);
 }
 
-void AudioFlinger::PlaybackThread::Track::setFinalVolume(float volume)
+void AudioFlinger::PlaybackThread::Track::setFinalVolume(float volumeLeft, float volumeRight)
 {
+    mFinalVolumeLeft = volumeLeft;
+    mFinalVolumeRight = volumeRight;
+    const float volume = (volumeLeft + volumeRight) * 0.5f;
     if (mFinalVolume != volume) { // Compare to an epsilon if too many meaningless updates
         mFinalVolume = volume;
         setMetadataHasChanged();
-        mTrackMetrics.logVolume(volume);
+        mLogForceVolumeUpdate = true;
+    }
+    if (mLogForceVolumeUpdate) {
+        mLogForceVolumeUpdate = false;
+        mTrackMetrics.logVolume(mFinalVolume);
     }
 }
 
 void AudioFlinger::PlaybackThread::Track::copyMetadataTo(MetadataInserter& backInserter) const
 {
+    // Do not forward metadata for PatchTrack with unspecified stream type
+    if (mStreamType == AUDIO_STREAM_PATCH) {
+        return;
+    }
+
     playback_track_metadata_v7_t metadata;
     metadata.base = {
             .usage = mAttr.usage,
@@ -1460,17 +1486,59 @@
         }
     }
 
-    metadata.channel_mask = mChannelMask,
+    metadata.channel_mask = mChannelMask;
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
     *backInserter++ = metadata;
 }
 
-void AudioFlinger::PlaybackThread::Track::setTeePatches(TeePatches teePatches) {
-    forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
-    mTeePatches = std::move(teePatches);
-    if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
-            mState == TrackBase::STOPPING_1) {
-        forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+void AudioFlinger::PlaybackThread::Track::updateTeePatches() {
+    if (mTeePatchesToUpdate.has_value()) {
+        forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
+        mTeePatches = mTeePatchesToUpdate.value();
+        if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
+                mState == TrackBase::STOPPING_1) {
+            forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+        }
+        mTeePatchesToUpdate.reset();
+    }
+}
+
+void AudioFlinger::PlaybackThread::Track::setTeePatchesToUpdate(TeePatches teePatchesToUpdate) {
+    ALOGW_IF(mTeePatchesToUpdate.has_value(),
+             "%s, existing tee patches to update will be ignored", __func__);
+    mTeePatchesToUpdate = std::move(teePatchesToUpdate);
+}
+
+// must be called with player thread lock held
+void AudioFlinger::PlaybackThread::Track::processMuteEvent_l(const sp<
+    IAudioManager>& audioManager, mute_state_t muteState)
+{
+    if (mMuteState == muteState) {
+        // mute state did not change, do nothing
+        return;
+    }
+
+    status_t result = UNKNOWN_ERROR;
+    if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+        if (mMuteEventExtras == nullptr) {
+            mMuteEventExtras = std::make_unique<os::PersistableBundle>();
+        }
+        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
+                                 static_cast<int>(muteState));
+
+        result = audioManager->portEvent(mPortId,
+                                         PLAYER_UPDATE_MUTED,
+                                         mMuteEventExtras);
+    }
+
+    if (result == OK) {
+        mMuteState = muteState;
+    } else {
+        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
+              __func__,
+              id(),
+              mPortId,
+              result);
     }
 }
 
@@ -1793,23 +1861,23 @@
 
 //To be called with thread lock held
 bool AudioFlinger::PlaybackThread::Track::isResumePending() {
-
-    if (mState == RESUMING)
+    if (mState == RESUMING) {
         return true;
+    }
     /* Resume is pending if track was stopping before pause was called */
     if (mState == STOPPING_1 &&
-        mResumeToStopping)
+        mResumeToStopping) {
         return true;
+    }
 
     return false;
 }
 
 //To be called with thread lock held
 void AudioFlinger::PlaybackThread::Track::resumeAck() {
-
-
-    if (mState == RESUMING)
+    if (mState == RESUMING) {
         mState = ACTIVE;
+    }
 
     // Other possibility of  pending resume is stopping_1 state
     // Do not update the state from stopping as this prevents
@@ -1898,9 +1966,7 @@
     }
 }
 
-binder::Status AudioFlinger::PlaybackThread::Track::AudioVibrationController::mute(
-        /*out*/ bool *ret) {
-    *ret = false;
+bool AudioFlinger::PlaybackThread::Track::AudioVibrationController::setMute(bool muted) {
     sp<ThreadBase> thread = mTrack->mThread.promote();
     if (thread != 0) {
         // Lock for updating mHapticPlaybackEnabled.
@@ -1908,27 +1974,24 @@
         PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
         if ((mTrack->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
                 && playbackThread->mHapticChannelCount > 0) {
-            mTrack->setHapticPlaybackEnabled(false);
-            *ret = true;
+            ALOGD("%s, haptic playback was %s for track %d",
+                    __func__, muted ? "muted" : "unmuted", mTrack->id());
+            mTrack->setHapticPlaybackEnabled(!muted);
+            return true;
         }
     }
+    return false;
+}
+
+binder::Status AudioFlinger::PlaybackThread::Track::AudioVibrationController::mute(
+        /*out*/ bool *ret) {
+    *ret = setMute(true);
     return binder::Status::ok();
 }
 
 binder::Status AudioFlinger::PlaybackThread::Track::AudioVibrationController::unmute(
         /*out*/ bool *ret) {
-    *ret = false;
-    sp<ThreadBase> thread = mTrack->mThread.promote();
-    if (thread != 0) {
-        // Lock for updating mHapticPlaybackEnabled.
-        Mutex::Autolock _l(thread->mLock);
-        PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
-        if ((mTrack->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                && playbackThread->mHapticChannelCount > 0) {
-            mTrack->setHapticPlaybackEnabled(true);
-            *ret = true;
-        }
-    }
+    *ret = setMute(false);
     return binder::Status::ok();
 }
 
@@ -2002,18 +2065,50 @@
 
 ssize_t AudioFlinger::PlaybackThread::OutputTrack::write(void* data, uint32_t frames)
 {
-    Buffer *pInBuffer;
-    Buffer inBuffer;
-    bool outputBufferFull = false;
-    inBuffer.frameCount = frames;
-    inBuffer.raw = data;
-
-    uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs();
-
     if (!mActive && frames != 0) {
-        (void) start();
+        sp<ThreadBase> thread = mThread.promote();
+        if (thread != nullptr && thread->standby()) {
+            // preload one silent buffer to trigger mixer on start()
+            ClientProxy::Buffer buf { .mFrameCount = mClientProxy->getStartThresholdInFrames() };
+            status_t status = mClientProxy->obtainBuffer(&buf);
+            if (status != NO_ERROR && status != NOT_ENOUGH_DATA && status != WOULD_BLOCK) {
+                ALOGE("%s(%d): could not obtain buffer on start", __func__, mId);
+                return 0;
+            }
+            memset(buf.mRaw, 0, buf.mFrameCount * mFrameSize);
+            mClientProxy->releaseBuffer(&buf);
+
+            (void) start();
+
+            // wait for HAL stream to start before sending actual audio. Doing this on each
+            // OutputTrack makes that playback start on all output streams is synchronized.
+            // If another OutputTrack has already started it can underrun but this is OK
+            // as only silence has been played so far and the retry count is very high on
+            // OutputTrack.
+            auto pt = static_cast<PlaybackThread *>(thread.get());
+            if (!pt->waitForHalStart()) {
+                ALOGW("%s(%d): timeout waiting for thread to exit standby", __func__, mId);
+                stop();
+                return 0;
+            }
+
+            // enqueue the first buffer and exit so that other OutputTracks will also start before
+            // write() is called again and this buffer actually consumed.
+            Buffer firstBuffer;
+            firstBuffer.frameCount = frames;
+            firstBuffer.raw = data;
+            queueBuffer(firstBuffer);
+            return frames;
+        } else {
+            (void) start();
+        }
     }
 
+    Buffer *pInBuffer;
+    Buffer inBuffer;
+    inBuffer.frameCount = frames;
+    inBuffer.raw = data;
+    uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs();
     while (waitTimeLeftMs) {
         // First write pending buffers, then new data
         if (mBufferQueue.size()) {
@@ -2034,7 +2129,6 @@
                 ALOGV("%s(%d): thread %d no more output buffers; status %d",
                         __func__, mId,
                         (int)mThreadIoHandle, status);
-                outputBufferFull = true;
                 break;
             }
             uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime);
@@ -2082,22 +2176,7 @@
     if (inBuffer.frameCount) {
         sp<ThreadBase> thread = mThread.promote();
         if (thread != 0 && !thread->standby()) {
-            if (mBufferQueue.size() < kMaxOverFlowBuffers) {
-                pInBuffer = new Buffer;
-                pInBuffer->mBuffer = malloc(inBuffer.frameCount * mFrameSize);
-                pInBuffer->frameCount = inBuffer.frameCount;
-                pInBuffer->raw = pInBuffer->mBuffer;
-                memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
-                mBufferQueue.add(pInBuffer);
-                ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
-                        (int)mThreadIoHandle, mBufferQueue.size());
-                // audio data is consumed (stored locally); set frameCount to 0.
-                inBuffer.frameCount = 0;
-            } else {
-                ALOGW("%s(%d): thread %d no more overflow buffers",
-                        __func__, mId, (int)mThreadIoHandle);
-                // TODO: return error for this.
-            }
+            queueBuffer(inBuffer);
         }
     }
 
@@ -2110,6 +2189,29 @@
     return frames - inBuffer.frameCount;  // number of frames consumed.
 }
 
+void AudioFlinger::PlaybackThread::OutputTrack::queueBuffer(Buffer& inBuffer) {
+
+    if (mBufferQueue.size() < kMaxOverFlowBuffers) {
+        Buffer *pInBuffer = new Buffer;
+        const size_t bufferSize = inBuffer.frameCount * mFrameSize;
+        pInBuffer->mBuffer = malloc(bufferSize);
+        LOG_ALWAYS_FATAL_IF(pInBuffer->mBuffer == nullptr,
+                "%s: Unable to malloc size %zu", __func__, bufferSize);
+        pInBuffer->frameCount = inBuffer.frameCount;
+        pInBuffer->raw = pInBuffer->mBuffer;
+        memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
+        mBufferQueue.add(pInBuffer);
+        ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
+                (int)mThreadIoHandle, mBufferQueue.size());
+        // audio data is consumed (stored locally); set frameCount to 0.
+        inBuffer.frameCount = 0;
+    } else {
+        ALOGW("%s(%d): thread %d no more overflow buffers",
+                __func__, mId, (int)mThreadIoHandle);
+        // TODO: return error for this.
+    }
+}
+
 void AudioFlinger::PlaybackThread::OutputTrack::copyMetadataTo(MetadataInserter& backInserter) const
 {
     std::lock_guard<std::mutex> lock(mTrackMetadatasMutex);
@@ -2248,7 +2350,7 @@
     buf.mFrameCount = buffer->frameCount;
     buf.mRaw = buffer->raw;
     mPeerProxy->releaseBuffer(&buf);
-    TrackBase::releaseBuffer(buffer);
+    TrackBase::releaseBuffer(buffer); // Note: this is the base class.
 }
 
 status_t AudioFlinger::PlaybackThread::PatchTrack::obtainBuffer(Proxy::Buffer* buffer,
@@ -2314,6 +2416,7 @@
     : BnAudioRecord(),
     mRecordTrack(recordTrack)
 {
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 AudioFlinger::RecordHandle::~RecordHandle() {
@@ -2339,15 +2442,9 @@
 }
 
 binder::Status AudioFlinger::RecordHandle::getActiveMicrophones(
-        std::vector<media::MicrophoneInfoData>* activeMicrophones) {
+        std::vector<media::MicrophoneInfoFw>* activeMicrophones) {
     ALOGV("%s()", __func__);
-    std::vector<media::MicrophoneInfo> mics;
-    status_t status = mRecordTrack->getActiveMicrophones(&mics);
-    activeMicrophones->resize(mics.size());
-    for (size_t i = 0; status == OK && i < mics.size(); ++i) {
-       status = mics[i].writeToParcelable(&activeMicrophones->at(i));
-    }
-    return binderStatusFromStatusT(status);
+    return binderStatusFromStatusT(mRecordTrack->getActiveMicrophones(activeMicrophones));
 }
 
 binder::Status AudioFlinger::RecordHandle::setPreferredMicrophoneDirection(
@@ -2667,7 +2764,7 @@
 }
 
 status_t AudioFlinger::RecordThread::RecordTrack::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo>* activeMicrophones)
+        std::vector<media::MicrophoneInfoFw>* activeMicrophones)
 {
     sp<ThreadBase> thread = mThread.promote();
     if (thread != 0) {
@@ -2730,6 +2827,25 @@
     }
 }
 
+void AudioFlinger::RecordThread::RecordTrack::copyMetadataTo(MetadataInserter& backInserter) const
+{
+
+    // Do not forward PatchRecord metadata with unspecified audio source
+    if (mAttr.source == AUDIO_SOURCE_DEFAULT) {
+        return;
+    }
+
+    // No track is invalid as this is called after prepareTrack_l in the same critical section
+    record_track_metadata_v7_t metadata;
+    metadata.base = {
+            .source = mAttr.source,
+            .gain = 1, // capture tracks do not have volumes
+    };
+    metadata.channel_mask = mChannelMask;
+    strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+    *backInserter++ = metadata;
+}
 
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
@@ -2743,9 +2859,10 @@
                                                      void *buffer,
                                                      size_t bufferSize,
                                                      audio_input_flags_t flags,
-                                                     const Timeout& timeout)
+                                                     const Timeout& timeout,
+                                                     audio_source_t source)
     :   RecordTrack(recordThread, NULL,
-                audio_attributes_t{} /* currently unused for patch track */,
+                audio_attributes_t{ .source = source } ,
                 sampleRate, format, channelMask, frameCount,
                 buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
                 audioServerAttributionSource(getpid()), flags, TYPE_PATCH),
@@ -2847,7 +2964,7 @@
 {
     void *ptr = nullptr;
     (void)posix_memalign(&ptr, alignment, size);
-    return std::unique_ptr<void, decltype(free)*>(ptr, free);
+    return {ptr, free};
 }
 
 AudioFlinger::RecordThread::PassthruPatchRecord::PassthruPatchRecord(
@@ -2856,9 +2973,10 @@
         audio_channel_mask_t channelMask,
         audio_format_t format,
         size_t frameCount,
-        audio_input_flags_t flags)
+        audio_input_flags_t flags,
+        audio_source_t source)
         : PatchRecord(recordThread, sampleRate, channelMask, format, frameCount,
-                nullptr /*buffer*/, 0 /*bufferSize*/, flags),
+                nullptr /*buffer*/, 0 /*bufferSize*/, flags, {} /* timeout */, source),
           mPatchRecordAudioBufferProvider(*this),
           mSinkBuffer(allocAligned(32, mFrameCount * mFrameSize)),
           mStubBuffer(allocAligned(32, mFrameCount * mFrameSize))
@@ -3085,6 +3203,38 @@
 {
 }
 
+void AudioFlinger::MmapThread::MmapTrack::processMuteEvent_l(const sp<
+    IAudioManager>& audioManager, mute_state_t muteState)
+{
+    if (mMuteState == muteState) {
+        // mute state did not change, do nothing
+        return;
+    }
+
+    status_t result = UNKNOWN_ERROR;
+    if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+        if (mMuteEventExtras == nullptr) {
+            mMuteEventExtras = std::make_unique<os::PersistableBundle>();
+        }
+        mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
+                                 static_cast<int>(muteState));
+
+        result = audioManager->portEvent(mPortId,
+                                         PLAYER_UPDATE_MUTED,
+                                         mMuteEventExtras);
+    }
+
+    if (result == OK) {
+        mMuteState = muteState;
+    } else {
+        ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
+              __func__,
+              id(),
+              mPortId,
+              result);
+    }
+}
+
 void AudioFlinger::MmapThread::MmapTrack::appendDumpHeader(String8& result)
 {
     result.appendFormat("Client Session Port Id  Format Chn mask  SRate Flags %s\n",
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
new file mode 100644
index 0000000..0a8c8be
--- /dev/null
+++ b/services/audioflinger/sounddose/Android.bp
@@ -0,0 +1,53 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_library {
+    name: "libsounddose",
+
+    double_loadable: true,
+
+    defaults: [
+        "latest_android_media_audio_common_types_ndk_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+        "latest_android_hardware_audio_sounddose_ndk_shared",
+    ],
+
+    srcs: [
+        "SoundDoseManager.cpp",
+    ],
+
+    shared_libs: [
+        "audioflinger-aidl-cpp",
+        "libaudio_aidl_conversion_common_ndk",
+        "libaudiofoundation",
+        "libaudioutils",
+        "libbase",
+        "libbinder",
+        "libbinder_ndk",
+        "liblog",
+        "libutils",
+    ],
+
+    header_libs: [
+        "libaudioutils_headers",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-DBACKEND_NDK",
+    ],
+}
+
+cc_library_headers {
+    name: "libsounddose_headers",
+    host_supported: true,
+    device_supported: true,
+    export_include_dirs: ["."],
+}
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
new file mode 100644
index 0000000..a114a38
--- /dev/null
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -0,0 +1,568 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoundDoseManager"
+
+#include "SoundDoseManager.h"
+
+#include "android/media/SoundDoseRecord.h"
+#include <android-base/stringprintf.h>
+#include <media/AidlConversionCppNdk.h>
+#include <cinttypes>
+#include <ctime>
+#include <utils/Log.h>
+
+namespace android {
+
+using aidl::android::media::audio::common::AudioDevice;
+using aidl::android::media::audio::common::AudioDeviceAddress;
+
+namespace {
+
+int64_t getMonotonicSecond() {
+    struct timespec now_ts;
+    if (clock_gettime(CLOCK_MONOTONIC, &now_ts) != 0) {
+        ALOGE("%s: cannot get timestamp", __func__);
+        return -1;
+    }
+    return now_ts.tv_sec;
+}
+
+}  // namespace
+
+sp<audio_utils::MelProcessor> SoundDoseManager::getOrCreateProcessorForDevice(
+        audio_port_handle_t deviceId, audio_io_handle_t streamHandle, uint32_t sampleRate,
+        size_t channelCount, audio_format_t format) {
+    std::lock_guard _l(mLock);
+
+    if (mHalSoundDose != nullptr && mEnabledCsd) {
+        ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
+        return nullptr;
+    }
+
+    auto streamProcessor = mActiveProcessors.find(streamHandle);
+    if (streamProcessor != mActiveProcessors.end()) {
+        auto processor = streamProcessor->second.promote();
+        // if processor is nullptr it means it was removed by the playback
+        // thread and can be replaced in the mActiveProcessors map
+        if (processor != nullptr) {
+            ALOGV("%s: found callback for stream id %d", __func__, streamHandle);
+            const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+            if (activeTypeIt != mActiveDeviceTypes.end()) {
+                processor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+            }
+            processor->setDeviceId(deviceId);
+            processor->setOutputRs2UpperBound(mRs2UpperBound);
+            return processor;
+        }
+    }
+
+    ALOGV("%s: creating new callback for stream id %d", __func__, streamHandle);
+    sp<audio_utils::MelProcessor> melProcessor = sp<audio_utils::MelProcessor>::make(
+            sampleRate, channelCount, format, this, deviceId, mRs2UpperBound);
+    const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+    if (activeTypeIt != mActiveDeviceTypes.end()) {
+        melProcessor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+    }
+    mActiveProcessors[streamHandle] = melProcessor;
+    return melProcessor;
+}
+
+bool SoundDoseManager::setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose) {
+    ALOGV("%s", __func__);
+
+    {
+        std::lock_guard _l(mLock);
+
+        mHalSoundDose = halSoundDose;
+        if (halSoundDose == nullptr) {
+            ALOGI("%s: passed ISoundDose object is null, switching to internal CSD", __func__);
+            return false;
+        }
+
+        if (!mHalSoundDose->setOutputRs2UpperBound(mRs2UpperBound).isOk()) {
+            ALOGW("%s: Cannot set RS2 value for momentary exposure %f",
+                  __func__,
+                  mRs2UpperBound);
+        }
+
+        // initialize the HAL sound dose callback lazily
+        if (mHalSoundDoseCallback == nullptr) {
+            mHalSoundDoseCallback =
+                ndk::SharedRefBase::make<HalSoundDoseCallback>(this);
+        }
+    }
+
+    auto status = halSoundDose->registerSoundDoseCallback(mHalSoundDoseCallback);
+    if (!status.isOk()) {
+        // Not a warning since this can happen if the callback was registered before
+        ALOGI("%s: Cannot register HAL sound dose callback with status message: %s",
+              __func__,
+              status.getMessage());
+    }
+
+    return true;
+}
+
+void SoundDoseManager::setOutputRs2UpperBound(float rs2Value) {
+    ALOGV("%s", __func__);
+    std::lock_guard _l(mLock);
+
+    if (mHalSoundDose != nullptr) {
+        // using the HAL sound dose interface
+        if (!mHalSoundDose->setOutputRs2UpperBound(rs2Value).isOk()) {
+            ALOGE("%s: Cannot set RS2 value for momentary exposure %f", __func__, rs2Value);
+            return;
+        }
+        mRs2UpperBound = rs2Value;
+        return;
+    }
+
+    for (auto& streamProcessor : mActiveProcessors) {
+        sp<audio_utils::MelProcessor> processor = streamProcessor.second.promote();
+        if (processor != nullptr) {
+            status_t result = processor->setOutputRs2UpperBound(rs2Value);
+            if (result != NO_ERROR) {
+                ALOGW("%s: could not set RS2 upper bound %f for stream %d", __func__, rs2Value,
+                      streamProcessor.first);
+                return;
+            }
+            mRs2UpperBound = rs2Value;
+        }
+    }
+}
+
+void SoundDoseManager::removeStreamProcessor(audio_io_handle_t streamHandle) {
+    std::lock_guard _l(mLock);
+    auto callbackToRemove = mActiveProcessors.find(streamHandle);
+    if (callbackToRemove != mActiveProcessors.end()) {
+        mActiveProcessors.erase(callbackToRemove);
+    }
+}
+
+audio_port_handle_t SoundDoseManager::getIdForAudioDevice(const AudioDevice& audioDevice) const {
+    std::lock_guard _l(mLock);
+
+    audio_devices_t type;
+    std::string address;
+    auto result = aidl::android::aidl2legacy_AudioDevice_audio_device(
+            audioDevice, &type, &address);
+    if (result != NO_ERROR) {
+        ALOGE("%s: could not convert from AudioDevice to AudioDeviceTypeAddr", __func__);
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+
+    auto adt = AudioDeviceTypeAddr(type, address);
+    auto deviceIt = mActiveDevices.find(adt);
+    if (deviceIt == mActiveDevices.end()) {
+        ALOGI("%s: could not find port id for device %s", __func__, adt.toString().c_str());
+        return AUDIO_PORT_HANDLE_NONE;
+    }
+    return deviceIt->second;
+}
+
+void SoundDoseManager::mapAddressToDeviceId(const AudioDeviceTypeAddr& adt,
+                                            const audio_port_handle_t deviceId) {
+    std::lock_guard _l(mLock);
+    ALOGI("%s: map address: %d to device id: %d", __func__, adt.mType, deviceId);
+    mActiveDevices[adt] = deviceId;
+    mActiveDeviceTypes[deviceId] = adt.mType;
+}
+
+void SoundDoseManager::clearMapDeviceIdEntries(audio_port_handle_t deviceId) {
+    std::lock_guard _l(mLock);
+    for (auto activeDevice = mActiveDevices.begin(); activeDevice != mActiveDevices.end();) {
+        if (activeDevice->second == deviceId) {
+            ALOGI("%s: clear mapping type: %d to deviceId: %d",
+                  __func__, activeDevice->first.mType, deviceId);
+            activeDevice = mActiveDevices.erase(activeDevice);
+            continue;
+        }
+        ++activeDevice;
+    }
+    mActiveDeviceTypes.erase(deviceId);
+}
+
+ndk::ScopedAStatus SoundDoseManager::HalSoundDoseCallback::onMomentaryExposureWarning(
+        float in_currentDbA, const AudioDevice& in_audioDevice) {
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager == nullptr) {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+    }
+
+    std::shared_ptr<ISoundDose> halSoundDose;
+    soundDoseManager->getHalSoundDose(&halSoundDose);
+    if(halSoundDose == nullptr) {
+        ALOGW("%s: HAL sound dose interface deactivated. Ignoring", __func__);
+        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+    }
+
+    auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
+    if (id == AUDIO_PORT_HANDLE_NONE) {
+        ALOGI("%s: no mapped id for audio device with type %d and address %s",
+                __func__, in_audioDevice.type.type,
+                in_audioDevice.address.toString().c_str());
+        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+    }
+    soundDoseManager->onMomentaryExposure(in_currentDbA, id);
+
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus SoundDoseManager::HalSoundDoseCallback::onNewMelValues(
+        const ISoundDose::IHalSoundDoseCallback::MelRecord& in_melRecord,
+        const AudioDevice& in_audioDevice) {
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager == nullptr) {
+        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+    }
+
+    std::shared_ptr<ISoundDose> halSoundDose;
+    soundDoseManager->getHalSoundDose(&halSoundDose);
+    if(halSoundDose == nullptr) {
+        ALOGW("%s: HAL sound dose interface deactivated. Ignoring", __func__);
+        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+    }
+
+    auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
+    if (id == AUDIO_PORT_HANDLE_NONE) {
+        ALOGI("%s: no mapped id for audio device with type %d and address %s",
+                __func__, in_audioDevice.type.type,
+                in_audioDevice.address.toString().c_str());
+        return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+    }
+    // TODO: introduce timestamp in onNewMelValues callback
+    soundDoseManager->onNewMelValues(in_melRecord.melValues, 0,
+                                     in_melRecord.melValues.size(), id);
+
+    return ndk::ScopedAStatus::ok();
+}
+
+void SoundDoseManager::SoundDose::binderDied(__unused const wp<IBinder>& who) {
+    ALOGV("%s", __func__);
+
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->resetSoundDose();
+    }
+}
+
+binder::Status SoundDoseManager::SoundDose::setOutputRs2UpperBound(float value) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setOutputRs2UpperBound(value);
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::resetCsd(
+        float currentCsd, const std::vector<media::SoundDoseRecord>& records) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->resetCsd(currentCsd, records);
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::updateAttenuation(float attenuationDB, int device) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->updateAttenuation(attenuationDB, static_cast<audio_devices_t>(device));
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::setCsdEnabled(bool enabled) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setCsdEnabled(enabled);
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::getOutputRs2UpperBound(float* value) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        std::lock_guard _l(soundDoseManager->mLock);
+        *value = soundDoseManager->mRs2UpperBound;
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::getCsd(float* value) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        *value = soundDoseManager->mMelAggregator->getCsd();
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::forceUseFrameworkMel(bool useFrameworkMel) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setUseFrameworkMel(useFrameworkMel);
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::forceComputeCsdOnAllDevices(
+        bool computeCsdOnAllDevices) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setComputeCsdOnAllDevices(computeCsdOnAllDevices);
+    }
+    return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::isSoundDoseHalSupported(bool* value) {
+    ALOGV("%s", __func__);
+    *value = false;
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        *value = soundDoseManager->isSoundDoseHalSupported();
+    }
+    return binder::Status::ok();
+}
+
+void SoundDoseManager::updateAttenuation(float attenuationDB, audio_devices_t deviceType) {
+    std::lock_guard _l(mLock);
+    ALOGV("%s: updating MEL processor attenuation for device type %d to %f",
+            __func__, deviceType, attenuationDB);
+    mMelAttenuationDB[deviceType] = attenuationDB;
+    for (const auto& mp : mActiveProcessors) {
+        auto melProcessor = mp.second.promote();
+        if (melProcessor != nullptr) {
+            auto deviceId = melProcessor->getDeviceId();
+            if (mActiveDeviceTypes[deviceId] == deviceType) {
+                ALOGV("%s: set attenuation for deviceId %d to %f",
+                        __func__, deviceId, attenuationDB);
+                melProcessor->setAttenuation(attenuationDB);
+            }
+        }
+    }
+}
+
+void SoundDoseManager::setCsdEnabled(bool enabled) {
+    ALOGV("%s",  __func__);
+
+    std::lock_guard _l(mLock);
+    mEnabledCsd = enabled;
+
+    for (auto& activeEntry : mActiveProcessors) {
+        auto melProcessor = activeEntry.second.promote();
+        if (melProcessor != nullptr) {
+            if (enabled) {
+                melProcessor->resume();
+            } else {
+                melProcessor->pause();
+            }
+        }
+    }
+}
+
+bool SoundDoseManager::isCsdEnabled() {
+    std::lock_guard _l(mLock);
+    return mEnabledCsd;
+}
+
+void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
+    // invalidate any HAL sound dose interface used
+    setHalSoundDoseInterface(nullptr);
+
+    std::lock_guard _l(mLock);
+    mUseFrameworkMel = useFrameworkMel;
+}
+
+bool SoundDoseManager::forceUseFrameworkMel() const {
+    std::lock_guard _l(mLock);
+    return mUseFrameworkMel;
+}
+
+void SoundDoseManager::setComputeCsdOnAllDevices(bool computeCsdOnAllDevices) {
+    std::lock_guard _l(mLock);
+    mComputeCsdOnAllDevices = computeCsdOnAllDevices;
+}
+
+bool SoundDoseManager::forceComputeCsdOnAllDevices() const {
+    std::lock_guard _l(mLock);
+    return mComputeCsdOnAllDevices;
+}
+
+bool SoundDoseManager::isSoundDoseHalSupported() const {
+    if (!mEnabledCsd) {
+        return false;
+    }
+
+    std::shared_ptr<ISoundDose> halSoundDose;
+    getHalSoundDose(&halSoundDose);
+    if (mHalSoundDose == nullptr) {
+        return false;
+    }
+    return true;
+}
+
+void SoundDoseManager::getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const {
+    std::lock_guard _l(mLock);
+    *halSoundDose = mHalSoundDose;
+}
+
+void SoundDoseManager::resetSoundDose() {
+    std::lock_guard lock(mLock);
+    mSoundDose = nullptr;
+}
+
+void SoundDoseManager::resetCsd(float currentCsd,
+                                const std::vector<media::SoundDoseRecord>& records) {
+    std::lock_guard lock(mLock);
+    std::vector<audio_utils::CsdRecord> resetRecords;
+    for (const auto& record : records) {
+        resetRecords.emplace_back(record.timestamp, record.duration, record.value,
+                                  record.averageMel);
+    }
+
+    mMelAggregator->reset(currentCsd, resetRecords);
+}
+
+void SoundDoseManager::onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
+                                      audio_port_handle_t deviceId) const {
+    ALOGV("%s", __func__);
+
+
+    sp<media::ISoundDoseCallback> soundDoseCallback;
+    std::vector<audio_utils::CsdRecord> records;
+    float currentCsd;
+    {
+        std::lock_guard _l(mLock);
+        if (!mEnabledCsd) {
+            return;
+        }
+
+
+        int64_t timestampSec = getMonotonicSecond();
+
+        // only for internal callbacks
+        records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
+                deviceId, std::vector<float>(mels.begin() + offset, mels.begin() + offset + length),
+                timestampSec - length));
+
+        currentCsd = mMelAggregator->getCsd();
+    }
+
+    soundDoseCallback = getSoundDoseCallback();
+
+    if (records.size() > 0 && soundDoseCallback != nullptr) {
+        std::vector<media::SoundDoseRecord> newRecordsToReport;
+        for (const auto& record : records) {
+            newRecordsToReport.emplace_back(csdRecordToSoundDoseRecord(record));
+        }
+
+        soundDoseCallback->onNewCsdValue(currentCsd, newRecordsToReport);
+    }
+}
+
+sp<media::ISoundDoseCallback> SoundDoseManager::getSoundDoseCallback() const {
+    std::lock_guard _l(mLock);
+    if (mSoundDose == nullptr) {
+        return nullptr;
+    }
+
+    return mSoundDose->mSoundDoseCallback;
+}
+
+void SoundDoseManager::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const {
+    ALOGV("%s: Momentary exposure for device %d triggered: %f MEL", __func__, deviceId, currentMel);
+
+    {
+        std::lock_guard _l(mLock);
+        if (!mEnabledCsd) {
+            return;
+        }
+    }
+
+    auto soundDoseCallback = getSoundDoseCallback();
+    if (soundDoseCallback != nullptr) {
+        soundDoseCallback->onMomentaryExposure(currentMel, deviceId);
+    }
+}
+
+sp<media::ISoundDose> SoundDoseManager::getSoundDoseInterface(
+        const sp<media::ISoundDoseCallback>& callback) {
+    ALOGV("%s: Register ISoundDoseCallback", __func__);
+
+    std::lock_guard _l(mLock);
+    if (mSoundDose == nullptr) {
+        mSoundDose = sp<SoundDose>::make(this, callback);
+    }
+    return mSoundDose;
+}
+
+std::string SoundDoseManager::dump() const {
+    std::string output;
+    {
+        std::lock_guard _l(mLock);
+        if (!mEnabledCsd) {
+            base::StringAppendF(&output, "CSD is disabled");
+            return output;
+        }
+    }
+
+    mMelAggregator->foreachCsd([&output](audio_utils::CsdRecord csdRecord) {
+        base::StringAppendF(&output,
+                            "CSD %f with average MEL %f in interval [%" PRId64 ", %" PRId64 "]",
+                            csdRecord.value, csdRecord.averageMel, csdRecord.timestamp,
+                            csdRecord.timestamp + csdRecord.duration);
+        base::StringAppendF(&output, "\n");
+    });
+
+    base::StringAppendF(&output, "\nCached Mel Records:\n");
+    mMelAggregator->foreachCachedMel([&output](const audio_utils::MelRecord& melRecord) {
+        base::StringAppendF(&output, "Continuous MELs for portId=%d, ", melRecord.portId);
+        base::StringAppendF(&output, "starting at timestamp %" PRId64 ": ", melRecord.timestamp);
+
+        for (const auto& mel : melRecord.mels) {
+            base::StringAppendF(&output, "%.2f ", mel);
+        }
+        base::StringAppendF(&output, "\n");
+    });
+
+    return output;
+}
+
+size_t SoundDoseManager::getCachedMelRecordsSize() const {
+    return mMelAggregator->getCachedMelRecordsSize();
+}
+
+media::SoundDoseRecord SoundDoseManager::csdRecordToSoundDoseRecord(
+        const audio_utils::CsdRecord& legacy) {
+    media::SoundDoseRecord soundDoseRecord{};
+    soundDoseRecord.timestamp = legacy.timestamp;
+    soundDoseRecord.duration = legacy.duration;
+    soundDoseRecord.value = legacy.value;
+    soundDoseRecord.averageMel = legacy.averageMel;
+    return soundDoseRecord;
+}
+
+}  // namespace android
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
new file mode 100644
index 0000000..6c02afb
--- /dev/null
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -0,0 +1,208 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#pragma once
+
+#include <aidl/android/hardware/audio/core/sounddose/ISoundDose.h>
+#include <aidl/android/media/audio/common/AudioDevice.h>
+#include <android/media/BnSoundDose.h>
+#include <android/media/ISoundDoseCallback.h>
+#include <media/AudioDeviceTypeAddr.h>
+#include <audio_utils/MelAggregator.h>
+#include <audio_utils/MelProcessor.h>
+#include <binder/Status.h>
+#include <mutex>
+#include <unordered_map>
+
+namespace android {
+
+using aidl::android::hardware::audio::core::sounddose::ISoundDose;
+
+class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
+public:
+    /** CSD is computed with a rolling window of 7 days. */
+    static constexpr int64_t kCsdWindowSeconds = 604800;  // 60s * 60m * 24h * 7d
+    /** Default RS2 upper bound in dBA as defined in IEC 62368-1 3rd edition. */
+    static constexpr float kDefaultRs2UpperBound = 100.f;
+
+    SoundDoseManager()
+        : mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
+          mRs2UpperBound(kDefaultRs2UpperBound) {};
+
+    /**
+     * \brief Creates or gets the MelProcessor assigned to the streamHandle
+     *
+     * \param deviceId          id for the devices where the stream is active.
+     * \param streamHandle      handle to the stream
+     * \param sampleRate        sample rate for the processor
+     * \param channelCount      number of channels to be processed.
+     * \param format            format of the input samples.
+     *
+     * \return MelProcessor assigned to the stream and device id.
+     */
+    sp<audio_utils::MelProcessor> getOrCreateProcessorForDevice(audio_port_handle_t deviceId,
+                                                                audio_io_handle_t streamHandle,
+                                                                uint32_t sampleRate,
+                                                                size_t channelCount,
+                                                                audio_format_t format);
+
+    /**
+     * \brief Removes stream processor when MEL computation is not needed anymore
+     *
+     * \param streamHandle      handle to the stream
+     */
+    void removeStreamProcessor(audio_io_handle_t streamHandle);
+
+    /**
+     * Sets the output RS2 upper bound for momentary exposure warnings. Must not be
+     * higher than 100dBA and not lower than 80dBA.
+     *
+     * \param rs2Value value to use for momentary exposure
+     */
+    void setOutputRs2UpperBound(float rs2Value);
+
+    /**
+     * \brief Registers the interface for passing callbacks to the AudioService and gets
+     * the ISoundDose interface.
+     *
+     * \returns the sound dose binder to send commands to the SoundDoseManager
+     **/
+    sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
+
+    /**
+     * Sets the HAL sound dose interface to use for the MEL computation. Use nullptr
+     * for using the internal MEL computation.
+     *
+     * @return true if setting the HAL sound dose value was successful, false otherwise.
+     */
+    bool setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose);
+
+    /** Returns the cached audio port id from the active devices. */
+    audio_port_handle_t getIdForAudioDevice(
+            const aidl::android::media::audio::common::AudioDevice& audioDevice) const;
+
+    /** Caches mapping between address, device port id and device type. */
+    void mapAddressToDeviceId(const AudioDeviceTypeAddr& adt, const audio_port_handle_t deviceId);
+
+    /** Clear all map entries with passed audio_port_handle_t. */
+    void clearMapDeviceIdEntries(audio_port_handle_t deviceId);
+
+    /** Returns true if CSD is enabled. */
+    bool isCsdEnabled();
+
+    std::string dump() const;
+
+    // used for testing only
+    size_t getCachedMelRecordsSize() const;
+    bool forceUseFrameworkMel() const;
+    bool forceComputeCsdOnAllDevices() const;
+
+    /** Method for converting from audio_utils::CsdRecord to media::SoundDoseRecord. */
+    static media::SoundDoseRecord csdRecordToSoundDoseRecord(const audio_utils::CsdRecord& legacy);
+
+    // ------ Override audio_utils::MelProcessor::MelCallback ------
+    void onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
+                        audio_port_handle_t deviceId) const override;
+
+    void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
+
+private:
+    class SoundDose : public media::BnSoundDose,
+                      public IBinder::DeathRecipient {
+    public:
+        SoundDose(SoundDoseManager* manager, const sp<media::ISoundDoseCallback>& callback)
+            : mSoundDoseManager(manager),
+              mSoundDoseCallback(callback) {}
+
+        /** IBinder::DeathRecipient. Listen to the death of ISoundDoseCallback. */
+        virtual void binderDied(const wp<IBinder>& who);
+
+        /** BnSoundDose override */
+        binder::Status setOutputRs2UpperBound(float value) override;
+        binder::Status resetCsd(float currentCsd,
+                                const std::vector<media::SoundDoseRecord>& records) override;
+        binder::Status updateAttenuation(float attenuationDB, int device) override;
+        binder::Status getOutputRs2UpperBound(float* value) override;
+        binder::Status setCsdEnabled(bool enabled) override;
+
+        binder::Status getCsd(float* value) override;
+        binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
+        binder::Status forceComputeCsdOnAllDevices(bool computeCsdOnAllDevices) override;
+        binder::Status isSoundDoseHalSupported(bool* value) override;
+
+        wp<SoundDoseManager> mSoundDoseManager;
+        const sp<media::ISoundDoseCallback> mSoundDoseCallback;
+    };
+
+    class HalSoundDoseCallback : public ISoundDose::BnHalSoundDoseCallback {
+    public:
+        explicit HalSoundDoseCallback(SoundDoseManager* manager)
+            : mSoundDoseManager(manager) {}
+
+        ndk::ScopedAStatus onMomentaryExposureWarning(
+                float in_currentDbA,
+                const aidl::android::media::audio::common::AudioDevice& in_audioDevice) override;
+        ndk::ScopedAStatus onNewMelValues(
+                const ISoundDose::IHalSoundDoseCallback::MelRecord& in_melRecord,
+                const aidl::android::media::audio::common::AudioDevice& in_audioDevice) override;
+
+        wp<SoundDoseManager> mSoundDoseManager;
+    };
+
+    void resetSoundDose();
+
+    void resetCsd(float currentCsd, const std::vector<media::SoundDoseRecord>& records);
+
+    sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
+
+    void updateAttenuation(float attenuationDB, audio_devices_t deviceType);
+    void setCsdEnabled(bool enabled);
+    void setUseFrameworkMel(bool useFrameworkMel);
+    void setComputeCsdOnAllDevices(bool computeCsdOnAllDevices);
+    bool isSoundDoseHalSupported() const;
+    /** Returns the HAL sound dose interface or null if internal MEL computation is used. */
+    void getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const;
+
+    mutable std::mutex mLock;
+
+    // no need for lock since MelAggregator is thread-safe
+    const sp<audio_utils::MelAggregator> mMelAggregator;
+
+    std::unordered_map<audio_io_handle_t, wp<audio_utils::MelProcessor>> mActiveProcessors
+            GUARDED_BY(mLock);
+
+    // map active device address and type to device id, used also for managing the pause/resume
+    // logic for deviceId's that should not report MEL values (e.g.: do not have an active MUSIC
+    // or GAME stream).
+    std::map<AudioDeviceTypeAddr, audio_port_handle_t> mActiveDevices GUARDED_BY(mLock);
+    std::unordered_map<audio_port_handle_t, audio_devices_t> mActiveDeviceTypes GUARDED_BY(mLock);
+
+    float mRs2UpperBound GUARDED_BY(mLock);
+    std::unordered_map<audio_devices_t, float> mMelAttenuationDB GUARDED_BY(mLock);
+
+    sp<SoundDose> mSoundDose GUARDED_BY(mLock);
+
+    std::shared_ptr<ISoundDose> mHalSoundDose GUARDED_BY(mLock);
+    std::shared_ptr<HalSoundDoseCallback> mHalSoundDoseCallback GUARDED_BY(mLock);
+
+    bool mUseFrameworkMel GUARDED_BY(mLock) = true;
+    bool mComputeCsdOnAllDevices GUARDED_BY(mLock) = false;
+
+    bool mEnabledCsd GUARDED_BY(mLock) = true;
+};
+
+}  // namespace android
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
new file mode 100644
index 0000000..2a2addf
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -0,0 +1,54 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_test {
+    name: "sounddosemanager_tests",
+
+    srcs: [
+        "sounddosemanager_tests.cpp"
+    ],
+
+    defaults: [
+        "latest_android_media_audio_common_types_ndk_static",
+        "latest_android_hardware_audio_core_sounddose_ndk_static",
+        "latest_android_hardware_audio_sounddose_ndk_static",
+    ],
+
+    shared_libs: [
+        "audioflinger-aidl-cpp",
+        "libaudiofoundation",
+        "libaudioutils",
+        "libbase",
+        "libbinder_ndk",
+        "liblog",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libaudio_aidl_conversion_common_ndk",
+        "libgmock",
+        "libsounddose",
+    ],
+
+    header_libs: [
+        "libaudioutils_headers",
+        "libsounddose_headers",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+        "-DBACKEND_NDK",
+    ],
+
+    test_suites: [
+        "general-tests",
+    ],
+}
diff --git a/services/audioflinger/sounddose/tests/TEST_MAPPING b/services/audioflinger/sounddose/tests/TEST_MAPPING
new file mode 100644
index 0000000..dd7fe4d
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+      "name": "sounddosemanager_tests"
+    }
+  ]
+}
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
new file mode 100644
index 0000000..9fab77d
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoundDoseManager_tests"
+
+#include <SoundDoseManager.h>
+
+#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <media/AidlConversionCppNdk.h>
+
+namespace android {
+namespace {
+
+using aidl::android::hardware::audio::core::sounddose::BnSoundDose;
+using aidl::android::media::audio::common::AudioDevice;
+using aidl::android::media::audio::common::AudioDeviceAddress;
+
+class HalSoundDoseMock : public BnSoundDose {
+public:
+    MOCK_METHOD(ndk::ScopedAStatus, getOutputRs2UpperBound, (float*), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, setOutputRs2UpperBound, (float), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, registerSoundDoseCallback,
+                (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&), (override));
+};
+
+class SoundDoseManagerTest : public ::testing::Test {
+protected:
+    void SetUp() override {
+        mSoundDoseManager = sp<SoundDoseManager>::make();
+        mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
+
+        ON_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound)
+            .WillByDefault([] (float rs2) {
+                EXPECT_EQ(rs2, ISoundDose::DEFAULT_MAX_RS2);
+                return ndk::ScopedAStatus::ok();
+            });
+    }
+
+    sp<SoundDoseManager> mSoundDoseManager;
+    std::shared_ptr<HalSoundDoseMock> mHalSoundDose;
+};
+
+TEST_F(SoundDoseManagerTest, GetProcessorForExistingStream) {
+    sp<audio_utils::MelProcessor> processor1 =
+        mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/1,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+    sp<audio_utils::MelProcessor> processor2 =
+        mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/2,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+    EXPECT_EQ(processor1, processor2);
+}
+
+TEST_F(SoundDoseManagerTest, RemoveExistingStream) {
+    sp<audio_utils::MelProcessor> processor1 =
+        mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/1,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+    mSoundDoseManager->removeStreamProcessor(1);
+    sp<audio_utils::MelProcessor> processor2 =
+        mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/2,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+    EXPECT_NE(processor1, processor2);
+}
+
+TEST_F(SoundDoseManagerTest, NewMelValuesCacheNewRecord) {
+    std::vector<float>mels{1, 1};
+
+    mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1);
+
+    EXPECT_EQ(mSoundDoseManager->getCachedMelRecordsSize(), size_t{1});
+}
+
+TEST_F(SoundDoseManagerTest, InvalidHalInterfaceIsNotSet) {
+    EXPECT_FALSE(mSoundDoseManager->setHalSoundDoseInterface(nullptr));
+}
+
+TEST_F(SoundDoseManagerTest, SetHalSoundDoseDisablesNewMelProcessorCallbacks) {
+    EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+    EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+        .Times(1)
+        .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+            EXPECT_NE(nullptr, callback);
+            return ndk::ScopedAStatus::ok();
+        });
+
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+
+    EXPECT_EQ(nullptr, mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/2,
+            /*streamHandle=*/1,
+            /*sampleRate*/44100,
+            /*channelCount*/2,
+            /*format*/AUDIO_FORMAT_PCM_FLOAT));
+}
+
+TEST_F(SoundDoseManagerTest, SetHalSoundDoseRegistersHalCallbacks) {
+    EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+    EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+        .Times(1)
+        .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+            EXPECT_NE(nullptr, callback);
+            return ndk::ScopedAStatus::ok();
+        });
+
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+}
+
+TEST_F(SoundDoseManagerTest, MomentaryExposureFromHalWithNoAddressIllegalArgument) {
+    std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
+
+    EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+    EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+       .Times(1)
+       .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+           halCallback = callback;
+           return ndk::ScopedAStatus::ok();
+       });
+
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+
+    EXPECT_NE(nullptr, halCallback);
+    AudioDevice audioDevice = {};
+    audioDevice.address.set<AudioDeviceAddress::id>("test");
+    auto status = halCallback->onMomentaryExposureWarning(
+        /*in_currentDbA=*/101.f, audioDevice);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(SoundDoseManagerTest, MomentaryExposureFromHalAfterInternalSelectedReturnsException) {
+    std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
+
+    EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+    EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+       .Times(1)
+       .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+           halCallback = callback;
+           return ndk::ScopedAStatus::ok();
+       });
+
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+    EXPECT_NE(nullptr, halCallback);
+    EXPECT_FALSE(mSoundDoseManager->setHalSoundDoseInterface(nullptr));
+
+    AudioDevice audioDevice = {};
+    audioDevice.address.set<AudioDeviceAddress::id>("test");
+    auto status = halCallback->onMomentaryExposureWarning(
+        /*in_currentDbA=*/101.f, audioDevice);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(SoundDoseManagerTest, OnNewMelValuesFromHalWithNoAddressIllegalArgument) {
+    std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
+
+    EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+    EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+       .Times(1)
+       .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+           halCallback = callback;
+           return ndk::ScopedAStatus::ok();
+       });
+
+    EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+
+    EXPECT_NE(nullptr, halCallback);
+    AudioDevice audioDevice = {};
+    audioDevice.address.set<AudioDeviceAddress::id>("test");
+    auto status = halCallback->onNewMelValues(/*in_melRecord=*/{}, audioDevice);
+    EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(SoundDoseManagerTest, GetIdReturnsMappedAddress) {
+    const std::string address = "testAddress";
+    const audio_port_handle_t deviceId = 2;
+    const audio_devices_t deviceType = AUDIO_DEVICE_OUT_WIRED_HEADSET;
+    const AudioDeviceTypeAddr adt{deviceType, address};
+    auto audioDevice = aidl::android::legacy2aidl_audio_device_AudioDevice(
+            deviceType, address.c_str());
+    ASSERT_TRUE(audioDevice.ok());
+
+    mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
+
+    EXPECT_EQ(deviceId, mSoundDoseManager->getIdForAudioDevice(audioDevice.value()));
+}
+
+TEST_F(SoundDoseManagerTest, GetAfterClearIdReturnsNone) {
+    const std::string address = "testAddress";
+    const audio_devices_t deviceType = AUDIO_DEVICE_OUT_WIRED_HEADSET;
+    const AudioDeviceTypeAddr adt{deviceType, address};
+    const audio_port_handle_t deviceId = 2;
+    auto audioDevice = aidl::android::legacy2aidl_audio_device_AudioDevice(
+            deviceType, address.c_str());
+    ASSERT_TRUE(audioDevice.ok());
+
+    mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
+    mSoundDoseManager->clearMapDeviceIdEntries(deviceId);
+
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, mSoundDoseManager->getIdForAudioDevice(audioDevice.value()));
+}
+
+TEST_F(SoundDoseManagerTest, GetUnmappedIdReturnsHandleNone) {
+    const std::string address = "testAddress";
+    AudioDevice audioDevice;
+    audioDevice.address.set<AudioDeviceAddress::id>(address);
+
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, mSoundDoseManager->getIdForAudioDevice(audioDevice));
+}
+
+TEST_F(SoundDoseManagerTest, GetDefaultForceComputeCsdOnAllDevices) {
+    EXPECT_FALSE(mSoundDoseManager->forceComputeCsdOnAllDevices());
+}
+
+TEST_F(SoundDoseManagerTest, GetDefaultForceUseFrameworkMel) {
+    // TODO: for now dogfooding with internal MEL. Revert to false when using the HAL MELs
+    EXPECT_TRUE(mSoundDoseManager->forceUseFrameworkMel());
+}
+
+}  // namespace
+}  // namespace android
diff --git a/services/audioflinger/timing/Android.bp b/services/audioflinger/timing/Android.bp
new file mode 100644
index 0000000..17ce8bd
--- /dev/null
+++ b/services/audioflinger/timing/Android.bp
@@ -0,0 +1,28 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_library {
+    name: "libaudioflinger_timing",
+
+    host_supported: true,
+
+    srcs: [
+        "MonotonicFrameCounter.cpp",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
diff --git a/services/audioflinger/timing/MonotonicFrameCounter.cpp b/services/audioflinger/timing/MonotonicFrameCounter.cpp
new file mode 100644
index 0000000..286f549
--- /dev/null
+++ b/services/audioflinger/timing/MonotonicFrameCounter.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MonotonicFrameCounter"
+
+#include <utils/Log.h>
+#include "MonotonicFrameCounter.h"
+
+namespace android::audioflinger {
+
+int64_t MonotonicFrameCounter::updateAndGetMonotonicFrameCount(
+        int64_t newFrameCount, int64_t newTime) {
+    if (newFrameCount < 0 || newTime < 0) {
+        const auto result = getLastReportedFrameCount();
+        ALOGW("%s: invalid (frame, time) pair newFrameCount:%lld newFrameCount:%lld,"
+                " using %lld as frameCount",
+                __func__, (long long) newFrameCount, (long long)newFrameCount,
+                (long long)result);
+        return result;
+    }
+    if (newFrameCount < mLastReceivedFrameCount) {
+        const auto result = getLastReportedFrameCount();
+        ALOGW("%s: retrograde newFrameCount:%lld < mLastReceivedFrameCount:%lld,"
+                " ignoring, returning %lld as frameCount",
+                __func__, (long long) newFrameCount, (long long)mLastReceivedFrameCount,
+                (long long)result);
+        return result;
+    }
+    // Input looks fine.
+    // For better granularity, we could consider extrapolation on newTime.
+    mLastReceivedFrameCount = newFrameCount;
+    return getLastReportedFrameCount();
+}
+
+int64_t MonotonicFrameCounter::onFlush() {
+    ALOGV("%s: Updating mOffsetFrameCount:%lld with mLastReceivedFrameCount:%lld",
+            __func__, (long long)mOffsetFrameCount, (long long)mLastReceivedFrameCount);
+    mOffsetFrameCount += mLastReceivedFrameCount;
+    mLastReceivedFrameCount = 0;
+    return mOffsetFrameCount;
+}
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/timing/MonotonicFrameCounter.h b/services/audioflinger/timing/MonotonicFrameCounter.h
new file mode 100644
index 0000000..0ea9510
--- /dev/null
+++ b/services/audioflinger/timing/MonotonicFrameCounter.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+namespace android::audioflinger {
+
+/**
+ * MonotonicFrameCounter
+ *
+ * Advances a monotonic frame count based on input timestamp pairs (frames, time).
+ * It takes into account a possible flush, which will "reset" the frames to 0.
+ *
+ * This class is used to drive VolumeShaper volume automation.
+ *
+ * The timestamps provided in updateAndGetMonotonicFrameCount should
+ * be of sufficient granularity for the purpose at hand.  Currently no temporal
+ * extrapolation is done.
+ *
+ * This class is not thread safe.
+ */
+class MonotonicFrameCounter {
+public:
+    /**
+     * Receives a new timestamp pair (frames, time) and returns a monotonic frameCount.
+     *
+     * \param newFrameCount the frameCount currently played.
+     * \param newTime       the time corresponding to the frameCount.
+     * \return              a monotonic frame count usable for automation timing.
+     */
+    int64_t updateAndGetMonotonicFrameCount(int64_t newFrameCount, int64_t newTime);
+
+    /**
+     * Notifies when a flush occurs, whereupon the received frameCount sequence restarts at 0.
+     *
+     * \return the last reported frameCount.
+     */
+    int64_t onFlush();
+
+    /**
+     * Returns the received (input) frameCount to reported (output) frameCount offset.
+     *
+     * This offset is sufficient to ensure monotonicity after flush is called,
+     * suitability for any other purpose is *not* guaranteed.
+     */
+    int64_t getOffsetFrameCount() const { return mOffsetFrameCount; }
+
+    /**
+     * Returns the last received frameCount.
+     */
+    int64_t getLastReceivedFrameCount() const {
+        return mLastReceivedFrameCount;
+    }
+
+    /**
+     * Returns the last reported frameCount from updateAndGetMonotonicFrameCount().
+     */
+    int64_t getLastReportedFrameCount() const {
+        // This is consistent after onFlush().
+        return mOffsetFrameCount + mLastReceivedFrameCount;
+    }
+
+private:
+    int64_t mOffsetFrameCount = 0;
+    int64_t mLastReceivedFrameCount = 0;
+};
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
new file mode 100644
index 0000000..29267a6
--- /dev/null
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -0,0 +1,29 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_base_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_test {
+    name: "monotonicframecounter_tests",
+
+    host_supported: true,
+
+    srcs: [
+        "monotonicframecounter_tests.cpp"
+    ],
+
+    static_libs: [
+        "libaudioflinger_timing",
+        "liblog",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
\ No newline at end of file
diff --git a/services/audioflinger/timing/tests/monotonicframecounter_tests.cpp b/services/audioflinger/timing/tests/monotonicframecounter_tests.cpp
new file mode 100644
index 0000000..7aaa4fa
--- /dev/null
+++ b/services/audioflinger/timing/tests/monotonicframecounter_tests.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "monotonicframecounter_tests"
+
+#include "../MonotonicFrameCounter.h"
+
+#include <gtest/gtest.h>
+
+using namespace android::audioflinger;
+
+namespace {
+
+TEST(MonotonicFrameCounterTest, SimpleProgression) {
+    MonotonicFrameCounter monotonicFrameCounter;
+
+    const std::vector<std::pair<int64_t, int64_t>> frametimes{
+        {0, 0}, {100, 100}, {200, 200},
+    };
+
+    int64_t maxReceivedFrameCount = 0;
+    for (const auto& p : frametimes) {
+        maxReceivedFrameCount = std::max(maxReceivedFrameCount, p.first);
+        ASSERT_EQ(p.first,
+                monotonicFrameCounter.updateAndGetMonotonicFrameCount(p.first, p.second));
+    }
+    ASSERT_EQ(maxReceivedFrameCount, monotonicFrameCounter.getLastReportedFrameCount());
+}
+
+TEST(MonotonicFrameCounterTest, InvalidData) {
+    MonotonicFrameCounter monotonicFrameCounter;
+
+    const std::vector<std::pair<int64_t, int64_t>> frametimes{
+        {-1, -1}, {100, 100}, {-1, -1}, {90, 90}, {200, 200},
+    };
+
+    int64_t prevFrameCount = 0;
+    int64_t maxReceivedFrameCount = 0;
+    for (const auto& p : frametimes) {
+        maxReceivedFrameCount = std::max(maxReceivedFrameCount, p.first);
+        const int64_t frameCount =
+                monotonicFrameCounter.updateAndGetMonotonicFrameCount(p.first, p.second);
+        // we must be monotonic
+        ASSERT_GE(frameCount, prevFrameCount);
+        prevFrameCount = frameCount;
+    }
+    ASSERT_EQ(maxReceivedFrameCount, monotonicFrameCounter.getLastReportedFrameCount());
+}
+
+TEST(MonotonicFrameCounterTest, Flush) {
+    MonotonicFrameCounter monotonicFrameCounter;
+
+    // Different playback sequences are separated by a flush.
+    const std::vector<std::vector<std::pair<int64_t, int64_t>>> frameset{
+        {{-1, -1}, {100, 10}, {200, 20}, {300, 30},},
+        {{-1, -1}, {100, 10}, {200, 20}, {300, 30},},
+        {{-1, -1}, {100, 100}, {-1, -1}, {90, 90}, {200, 200},},
+    };
+
+    int64_t prevFrameCount = 0;
+    int64_t maxReceivedFrameCount = 0;
+    int64_t sumMaxReceivedFrameCount = 0;
+    for (const auto& v : frameset) {
+        for (const auto& p : v) {
+            maxReceivedFrameCount = std::max(maxReceivedFrameCount, p.first);
+            const int64_t frameCount =
+                    monotonicFrameCounter.updateAndGetMonotonicFrameCount(p.first, p.second);
+            // we must be monotonic
+            ASSERT_GE(frameCount, prevFrameCount);
+            prevFrameCount = frameCount;
+        }
+        monotonicFrameCounter.onFlush();
+        sumMaxReceivedFrameCount += maxReceivedFrameCount;
+        maxReceivedFrameCount = 0;
+    }
+
+    // On flush we keep a monotonic reported framecount
+    // even though the received framecount resets to 0.
+    // The requirement of equality here is implementation dependent.
+    ASSERT_EQ(sumMaxReceivedFrameCount, monotonicFrameCounter.getLastReportedFrameCount());
+}
+
+}  // namespace
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 496591a..da0df5f 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_AUDIOPOLICY_INTERFACE_H
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
+#include <android/media/DeviceConnectedState.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -134,17 +135,18 @@
     // request an output appropriate for playback of the supplied stream type and parameters
     virtual audio_io_handle_t getOutput(audio_stream_type_t stream) = 0;
     virtual status_t getOutputForAttr(const audio_attributes_t *attr,
-                                        audio_io_handle_t *output,
-                                        audio_session_t session,
-                                        audio_stream_type_t *stream,
-                                        const AttributionSourceState& attributionSouce,
-                                        const audio_config_t *config,
-                                        audio_output_flags_t *flags,
-                                        audio_port_handle_t *selectedDeviceId,
-                                        audio_port_handle_t *portId,
-                                        std::vector<audio_io_handle_t> *secondaryOutputs,
-                                        output_type_t *outputType,
-                                        bool *isSpatialized) = 0;
+                                      audio_io_handle_t *output,
+                                      audio_session_t session,
+                                      audio_stream_type_t *stream,
+                                      const AttributionSourceState& attributionSource,
+                                      audio_config_t *config,
+                                      audio_output_flags_t *flags,
+                                      audio_port_handle_t *selectedDeviceId,
+                                      audio_port_handle_t *portId,
+                                      std::vector<audio_io_handle_t> *secondaryOutputs,
+                                      output_type_t *outputType,
+                                      bool *isSpatialized,
+                                      bool *isBitPerfect) = 0;
     // indicates to the audio policy manager that the output starts being used by corresponding
     // stream.
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
@@ -159,8 +161,8 @@
                                      audio_io_handle_t *input,
                                      audio_unique_id_t riid,
                                      audio_session_t session,
-                                     const AttributionSourceState& attributionSouce,
-                                     const audio_config_base_t *config,
+                                     const AttributionSourceState& attributionSource,
+                                     audio_config_base_t *config,
                                      audio_input_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
                                      input_type_t *inputType,
@@ -245,6 +247,8 @@
                                     unsigned int *num_ports,
                                     struct audio_port_v7 *ports,
                                     unsigned int *generation) = 0;
+    virtual status_t listDeclaredDevicePorts(media::AudioPortRole role,
+                                             std::vector<media::AudioPortFw>* result) = 0;
     virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
     virtual status_t createAudioPatch(const struct audio_patch *patch,
                                        audio_patch_handle_t *handle,
@@ -299,6 +303,8 @@
 
     virtual bool     isUltrasoundSupported() = 0;
 
+    virtual bool     isHotwordStreamSupported(bool lookbackAudio) = 0;
+
     virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
                 audio_devices_t device, std::vector<audio_format_t> *formats) = 0;
 
@@ -307,13 +313,13 @@
     virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies) = 0;
 
     virtual status_t getProductStrategyFromAudioAttributes(
-            const AudioAttributes &aa, product_strategy_t &productStrategy,
+            const audio_attributes_t &aa, product_strategy_t &productStrategy,
             bool fallbackOnDefault) = 0;
 
     virtual status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) = 0;
 
     virtual status_t getVolumeGroupFromAudioAttributes(
-            const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault) = 0;
+            const audio_attributes_t &aa, volume_group_t &volumeGroup, bool fallbackOnDefault) = 0;
 
     virtual bool     isCallScreenModeSupported() = 0;
 
@@ -322,8 +328,11 @@
                                                const AudioDeviceTypeAddrVector &devices) = 0;
 
     virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
-                                                  device_role_t role) = 0;
+                                                  device_role_t role,
+                                                  const AudioDeviceTypeAddrVector &devices) = 0;
 
+    virtual status_t clearDevicesRoleForStrategy(product_strategy_t strategy,
+                                                     device_role_t role) = 0;
 
     virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
                                                   device_role_t role,
@@ -407,6 +416,20 @@
     // retrieves the list of available direct audio profiles for the given audio attributes
     virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                                     AudioProfileVector& audioProfiles) = 0;
+
+    virtual status_t getSupportedMixerAttributes(
+            audio_port_handle_t portId, std::vector<audio_mixer_attributes_t>& mixerAttrs) = 0;
+    virtual status_t setPreferredMixerAttributes(
+            const audio_attributes_t* attr,
+            audio_port_handle_t portId,
+            uid_t uid,
+            const audio_mixer_attributes_t* mixerAttributes) = 0;
+    virtual status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                 audio_port_handle_t portId,
+                                                 audio_mixer_attributes_t* mixerAttributes) = 0;
+    virtual status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+                                                   audio_port_handle_t portId,
+                                                   uid_t uid) = 0;
 };
 
 // Audio Policy client Interface
@@ -415,6 +438,8 @@
 public:
     virtual ~AudioPolicyClientInterface() {}
 
+    virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig *config) = 0;
+
     //
     // Audio HW module functions
     //
@@ -477,9 +502,6 @@
     virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
                                      audio_io_handle_t output, int delayMs = 0) = 0;
 
-    // invalidate a stream type, causing a reroute to an unspecified new output
-    virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
-
     // function enabling to send proprietary informations directly from audio policy manager to
     // audio hardware interface.
     virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs,
@@ -548,7 +570,10 @@
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
 
-    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                             media::DeviceConnectedState state) = 0;
+
+    virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index f130f7c..fa3a5d3 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -16,9 +16,23 @@
       "name": "CtsNativeMediaAAudioTestCases",
       "options" : [
         {
-          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+        },
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
         }
       ]
     }
+  ],
+  "auto-presubmit": [
+    {
+       "name": "audiopolicy_tests"
+    }
   ]
 }
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 3d3e0cf..d266e63 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -218,12 +218,15 @@
         return *(deviceTypes.begin());
     } else {
         // Multiple device selection is either:
+        //  - dock + one other device: give priority to dock in this case.
         //  - speaker + one other device: give priority to speaker in this case.
         //  - one A2DP device + another device: happens with duplicated output. In this case
         // retain the device on the A2DP output as the other must not correspond to an active
         // selection if not the speaker.
         //  - HDMI-CEC system audio mode only output: give priority to available item in order.
-        if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER) != 0) {
+        if (deviceTypes.count(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET) != 0) {
+            return AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
+        } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER) != 0) {
             return AUDIO_DEVICE_OUT_SPEAKER;
         } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER_SAFE) != 0) {
             return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
@@ -246,3 +249,16 @@
         }
     }
 }
+
+/**
+ * Indicates if two given audio output flags are considered as matched, which means that
+ * 1) the `supersetFlags` and `subsetFlags` both contain or both don't contain must match flags and
+ * 2) `supersetFlags` contains all flags from `subsetFlags`.
+ */
+static inline bool audio_output_flags_is_subset(audio_output_flags_t supersetFlags,
+                                                audio_output_flags_t subsetFlags,
+                                                uint32_t mustMatchFlags)
+{
+    return ((supersetFlags ^ subsetFlags) & mustMatchFlags) == AUDIO_OUTPUT_FLAG_NONE
+            && (supersetFlags & subsetFlags) == subsetFlags;
+}
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 1f23ae3..1d570b7 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -15,6 +15,7 @@
         "src/AudioInputDescriptor.cpp",
         "src/AudioOutputDescriptor.cpp",
         "src/AudioPatch.cpp",
+        "src/AudioPolicyConfig.cpp",
         "src/AudioPolicyMix.cpp",
         "src/AudioProfileVectorHelper.cpp",
         "src/AudioRoute.cpp",
@@ -24,6 +25,7 @@
         "src/HwModule.cpp",
         "src/IOProfile.cpp",
         "src/PolicyAudioPort.cpp",
+        "src/PreferredMixerAttributesInfo.cpp",
         "src/Serializer.cpp",
         "src/SoundTriggerSession.cpp",
         "src/TypeConverter.cpp",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 75fa595..876911d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -301,6 +301,10 @@
         return mActiveClients;
     }
 
+    // Returns 0 if not all active clients have the same exclusive preferred device
+    // or the number of active clients with the same exclusive preferred device
+    size_t sameExclusivePreferredDevicesCount() const;
+
     bool useHwGain() const
     {
         return !devices().isEmpty() ? devices().itemAt(0)->hasGainController() : false;
@@ -440,6 +444,10 @@
 
     void setTracksInvalidatedStatusByStrategy(product_strategy_t strategy);
 
+    bool isConfigurationMatched(const audio_config_base_t& config, audio_output_flags_t flags);
+
+    PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
@@ -450,6 +458,7 @@
     audio_session_t mDirectClientSession; // session id of the direct output client
     bool mPendingReopenToQueryProfiles = false;
     audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
+    bool mUsePreferredMixerAttributes = false;
 };
 
 // Audio output driven by an input device directly.
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index cf1f64c..32c78a1 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -16,62 +16,58 @@
 
 #pragma once
 
+#include <string>
 #include <unordered_map>
 #include <unordered_set>
+#include <vector>
 
-#include <AudioPatch.h>
 #include <DeviceDescriptor.h>
-#include <IOProfile.h>
 #include <HwModule.h>
-#include <PolicyAudioPort.h>
-#include <AudioInputDescriptor.h>
-#include <AudioOutputDescriptor.h>
-#include <AudioPolicyMix.h>
-#include <EffectDescriptor.h>
-#include <SoundTriggerSession.h>
-#include <media/AudioProfile.h>
+#include <error/Result.h>
+#include <utils/StrongPointer.h>
+#include <utils/RefBase.h>
 
 namespace android {
 
-// This class gathers together various bits of AudioPolicyManager
-// configuration, which are usually filled out as a result of parsing
-// the audio_policy_configuration.xml file.
+// This class gathers together various bits of AudioPolicyManager configuration. It can be filled
+// out either as a result of parsing the audio_policy_configuration.xml file, from the HAL data, or
+// to default fallback data.
 //
-// Note that AudioPolicyConfig doesn't own some of the data,
-// it simply proxies access to the fields of AudioPolicyManager
-// class. Be careful about the fields that are references,
-// e.g. 'mOutputDevices'. This also means that it's impossible
-// to implement "deep copying" of this class without re-designing it.
-class AudioPolicyConfig
+// The data in this class is immutable once loaded, this is why a pointer to a const is returned
+// from the factory methods. However, this does not prevent modifications of data bits that
+// are held inside collections, for example, individual modules, devices, etc.
+class AudioPolicyConfig : public RefBase
 {
 public:
-    AudioPolicyConfig(HwModuleCollection &hwModules,
-                      DeviceVector &outputDevices,
-                      DeviceVector &inputDevices,
-                      sp<DeviceDescriptor> &defaultOutputDevice)
-        : mHwModules(hwModules),
-          mOutputDevices(outputDevices),
-          mInputDevices(inputDevices),
-          mDefaultOutputDevice(defaultOutputDevice) {
-        clear();
-    }
+    // Surround formats, with an optional list of subformats that are equivalent from users' POV.
+    using SurroundFormats = std::unordered_map<audio_format_t, std::unordered_set<audio_format_t>>;
 
-    void clear() {
-        mSource = {};
-        mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
-        mHwModules.clear();
-        mOutputDevices.clear();
-        mInputDevices.clear();
-        mDefaultOutputDevice.clear();
-        mIsSpeakerDrcEnabled = false;
-        mIsCallScreenModeSupported = false;
-        mSurroundFormats.clear();
-    }
+    // The source used to indicate the default fallback configuration.
+    static const constexpr char* const kDefaultConfigSource = "AudioPolicyConfig::setDefault";
+    // The suffix of the "engine default" implementation shared library name.
+    static const constexpr char* const kDefaultEngineLibraryNameSuffix = "default";
+
+    // Creates the default (fallback) configuration.
+    static sp<const AudioPolicyConfig> createDefault();
+    // Attempts to load the configuration from the XML file, falls back to default on failure.
+    // If the XML file path is not provided, uses `audio_get_audio_policy_config_file` function.
+    static sp<const AudioPolicyConfig> loadFromApmXmlConfigWithFallback(
+            const std::string& xmlFilePath = "");
+    // The factory method to use in APM tests which craft the configuration manually.
+    static sp<AudioPolicyConfig> createWritableForTests();
+    // The factory method to use in APM tests which use a custom XML file.
+    static error::Result<sp<AudioPolicyConfig>> loadFromCustomXmlConfigForTests(
+            const std::string& xmlFilePath);
+    // The factory method to use in VTS tests. If the 'configPath' is empty,
+    // it is determined automatically from the list of known config paths.
+    static error::Result<sp<AudioPolicyConfig>> loadFromCustomXmlConfigForVtsTests(
+            const std::string& configPath, const std::string& xmlFileName);
+
+    ~AudioPolicyConfig() = default;
 
     const std::string& getSource() const {
         return mSource;
     }
-
     void setSource(const std::string& file) {
         mSource = file;
     }
@@ -79,16 +75,24 @@
     const std::string& getEngineLibraryNameSuffix() const {
         return mEngineLibraryNameSuffix;
     }
-
     void setEngineLibraryNameSuffix(const std::string& suffix) {
         mEngineLibraryNameSuffix = suffix;
     }
 
+    const HwModuleCollection& getHwModules() const { return mHwModules; }
     void setHwModules(const HwModuleCollection &hwModules)
     {
         mHwModules = hwModules;
     }
 
+    const DeviceVector& getInputDevices() const
+    {
+        return mInputDevices;
+    }
+    const DeviceVector& getOutputDevices() const
+    {
+        return mOutputDevices;
+    }
     void addDevice(const sp<DeviceDescriptor> &device)
     {
         if (audio_is_output_device(device->type())) {
@@ -97,125 +101,54 @@
             mInputDevices.add(device);
         }
     }
-
     void addInputDevices(const DeviceVector &inputDevices)
     {
         mInputDevices.add(inputDevices);
     }
-
     void addOutputDevices(const DeviceVector &outputDevices)
     {
         mOutputDevices.add(outputDevices);
     }
 
-    bool isSpeakerDrcEnabled() const { return mIsSpeakerDrcEnabled; }
-
-    void setSpeakerDrcEnabled(bool isSpeakerDrcEnabled)
-    {
-        mIsSpeakerDrcEnabled = isSpeakerDrcEnabled;
-    }
-
-    bool isCallScreenModeSupported() const { return mIsCallScreenModeSupported; }
-
-    void setCallScreenModeSupported(bool isCallScreenModeSupported)
-    {
-        mIsCallScreenModeSupported = isCallScreenModeSupported;
-    }
-
-
-    const HwModuleCollection getHwModules() const { return mHwModules; }
-
-    const DeviceVector &getInputDevices() const
-    {
-        return mInputDevices;
-    }
-
-    const DeviceVector &getOutputDevices() const
-    {
-        return mOutputDevices;
-    }
-
+    const sp<DeviceDescriptor>& getDefaultOutputDevice() const { return mDefaultOutputDevice; }
     void setDefaultOutputDevice(const sp<DeviceDescriptor> &defaultDevice)
     {
         mDefaultOutputDevice = defaultDevice;
     }
 
-    const sp<DeviceDescriptor> &getDefaultOutputDevice() const { return mDefaultOutputDevice; }
-
-    void setDefault(void)
+    bool isCallScreenModeSupported() const { return mIsCallScreenModeSupported; }
+    void setCallScreenModeSupported(bool isCallScreenModeSupported)
     {
-        mSource = "AudioPolicyConfig::setDefault";
-        mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
-        mDefaultOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPEAKER);
-        mDefaultOutputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
-        sp<DeviceDescriptor> defaultInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_BUILTIN_MIC);
-        defaultInputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
-        sp<AudioProfile> micProfile = new AudioProfile(
-                AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000);
-        defaultInputDevice->addAudioProfile(micProfile);
-        mOutputDevices.add(mDefaultOutputDevice);
-        mInputDevices.add(defaultInputDevice);
-
-        sp<HwModule> module = new HwModule(AUDIO_HARDWARE_MODULE_ID_PRIMARY, 2 /*halVersionMajor*/);
-        mHwModules.add(module);
-
-        sp<OutputProfile> outProfile = new OutputProfile("primary");
-        outProfile->addAudioProfile(
-                new AudioProfile(AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 44100));
-        outProfile->addSupportedDevice(mDefaultOutputDevice);
-        outProfile->setFlags(AUDIO_OUTPUT_FLAG_PRIMARY);
-        module->addOutputProfile(outProfile);
-
-        sp<InputProfile> inProfile = new InputProfile("primary");
-        inProfile->addAudioProfile(micProfile);
-        inProfile->addSupportedDevice(defaultInputDevice);
-        module->addInputProfile(inProfile);
-
-        setDefaultSurroundFormats();
+        mIsCallScreenModeSupported = isCallScreenModeSupported;
     }
 
-    // Surround formats, with an optional list of subformats that are equivalent from users' POV.
-    using SurroundFormats = std::unordered_map<audio_format_t, std::unordered_set<audio_format_t>>;
-
     const SurroundFormats &getSurroundFormats() const
     {
         return mSurroundFormats;
     }
-
+    void setDefaultSurroundFormats();
     void setSurroundFormats(const SurroundFormats &surroundFormats)
     {
         mSurroundFormats = surroundFormats;
     }
 
-    void setDefaultSurroundFormats()
-    {
-        mSurroundFormats = {
-            {AUDIO_FORMAT_AC3, {}},
-            {AUDIO_FORMAT_E_AC3, {}},
-            {AUDIO_FORMAT_DTS, {}},
-            {AUDIO_FORMAT_DTS_HD, {}},
-            {AUDIO_FORMAT_AAC_LC, {
-                    AUDIO_FORMAT_AAC_HE_V1, AUDIO_FORMAT_AAC_HE_V2, AUDIO_FORMAT_AAC_ELD,
-                    AUDIO_FORMAT_AAC_XHE}},
-            {AUDIO_FORMAT_DOLBY_TRUEHD, {}},
-            {AUDIO_FORMAT_E_AC3_JOC, {}},
-            {AUDIO_FORMAT_AC4, {}}};
-    }
+    void setDefault();
 
 private:
-    static const constexpr char* const kDefaultEngineLibraryNameSuffix = "default";
+    friend class sp<AudioPolicyConfig>;
 
-    std::string mSource;
-    std::string mEngineLibraryNameSuffix;
-    HwModuleCollection &mHwModules; /**< Collection of Module, with Profiles, i.e. Mix Ports. */
-    DeviceVector &mOutputDevices;
-    DeviceVector &mInputDevices;
-    sp<DeviceDescriptor> &mDefaultOutputDevice;
-    // TODO: remove when legacy conf file is removed. true on devices that use DRC on the
-    // DEVICE_CATEGORY_SPEAKER path to boost soft sounds, used to adjust volume curves accordingly.
-    // Note: remove also speaker_drc_enabled from global configuration of XML config file.
-    bool mIsSpeakerDrcEnabled;
-    bool mIsCallScreenModeSupported;
+    AudioPolicyConfig() = default;
+
+    void augmentData();
+    status_t loadFromXml(const std::string& xmlFilePath, bool forVts);
+
+    std::string mSource;  // Not kDefaultConfigSource. Empty source means an empty config.
+    std::string mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+    HwModuleCollection mHwModules; /**< Collection of Module, with Profiles, i.e. Mix Ports. */
+    DeviceVector mOutputDevices;  // Attached output devices.
+    DeviceVector mInputDevices;   // Attached input devices.
+    sp<DeviceDescriptor> mDefaultOutputDevice;
+    bool mIsCallScreenModeSupported = false;
     SurroundFormats mSurroundFormats;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index c2a20c6..92292e1 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -55,29 +55,49 @@
     status_t getAudioPolicyMix(audio_devices_t deviceType,
             const String8& address, sp<AudioPolicyMix> &policyMix) const;
 
-    status_t registerMix(AudioMix mix, sp<SwAudioOutputDescriptor> desc);
+    status_t registerMix(const AudioMix& mix, const sp<SwAudioOutputDescriptor>& desc);
 
     status_t unregisterMix(const AudioMix& mix);
 
     void closeOutput(sp<SwAudioOutputDescriptor> &desc);
 
     /**
-     * Try to find an output descriptor for the given attributes.
+     * Tries to find the best matching audio policy mix
      *
-     * @param[in] attributes to consider fowr the research of output descriptor.
-     * @param[out] desc to return if an primary output could be found.
-     * @param[out] secondaryDesc other desc that the audio should be routed to.
+     * @param[in] attributes to consider for the research of the audio policy mix.
+     * @param[in] config to consider for the research of the audio policy mix.
+     * @param[in] uid to consider for the research of the audio policy mix.
+     * @param[in] session to consider for the research of the audio policy mix.
+     * @param[in] flags to consider for the research of the audio policy mix.
+     * @param[in] availableOutputDevices available output devices can be use during the research
+     *      of the audio policy mix
+     * @param[in] requestedDevice currently requested device that can be used determined if the
+     *      matching audio policy mix should be used instead of the currently set preferred device.
+     * @param[out] primaryMix to return in case a matching audio plicy mix could be found.
+     * @param[out] secondaryMixes that audio should be routed to in case a matching
+     *      secondary mixes could be found.
+     * @param[out] usePrimaryOutputFromPolicyMixes to return in case the audio policy mix
+     *      should be use, including the case where the requested device is explicitly disallowed
+     *      by the audio policy.
+     *
      * @return OK if the request is valid
      *         otherwise if the request is not supported
      */
-    status_t getOutputForAttr(const audio_attributes_t& attributes, uid_t uid,
+    status_t getOutputForAttr(const audio_attributes_t& attributes,
+                              const audio_config_base_t& config,
+                              uid_t uid,
+                              audio_session_t session,
                               audio_output_flags_t flags,
+                              const DeviceVector &availableOutputDevices,
+                              const sp<DeviceDescriptor>& requestedDevice,
                               sp<AudioPolicyMix> &primaryMix,
-                              std::vector<sp<AudioPolicyMix>> *secondaryMixes);
+                              std::vector<sp<AudioPolicyMix>> *secondaryMixes,
+                              bool& usePrimaryOutputFromPolicyMixes);
 
-    sp<DeviceDescriptor> getDeviceAndMixForInputSource(audio_source_t inputSource,
+    sp<DeviceDescriptor> getDeviceAndMixForInputSource(const audio_attributes_t& attributes,
                                                        const DeviceVector &availableDeviceTypes,
                                                        uid_t uid,
+                                                       audio_session_t session,
                                                        sp<AudioPolicyMix> *policyMix) const;
 
     /**
@@ -123,10 +143,20 @@
     void dump(String8 *dst) const;
 
 private:
-    enum class MixMatchStatus { MATCH, NO_MATCH, INVALID_MIX };
-    MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
+    bool mixMatch(const AudioMix* mix, size_t mixIndex,
                             const audio_attributes_t& attributes,
-                            uid_t uid);
+                            const audio_config_base_t& config,
+                            uid_t uid,
+                            audio_session_t session);
+    bool mixDisallowsRequestedDevice(const AudioMix* mix,
+                            const sp<DeviceDescriptor>& requestedDevice,
+                            const sp<DeviceDescriptor>& mixDevice,
+                            const uid_t uid);
+
+    sp<DeviceDescriptor> getOutputDeviceForMix(const AudioMix* mix,
+                            const DeviceVector& availableOutputDevices);
 };
 
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr);
+
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 0431619..7119b85 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -239,12 +239,13 @@
     }
     void setUseSwBridge() { mUseSwBridge = true; }
     bool useSwBridge() const { return mUseSwBridge; }
+    bool canCloseOutput() const { return mCloseOutput; }
     bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
     audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
     sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
     sp<DeviceDescriptor> sinkDevice() const { return mSinkDevice; }
     wp<SwAudioOutputDescriptor> swOutput() const { return mSwOutput; }
-    void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput);
+    void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput = false);
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
 
@@ -258,6 +259,15 @@
     wp<SwAudioOutputDescriptor> mSwOutput;
     wp<HwAudioOutputDescriptor> mHwOutput;
     bool mUseSwBridge = false;
+    /**
+     * For either HW bridge associated to a SwOutput for activity / volume or SwBridge for also
+     * sample rendering / activity & volume, an existing playback thread may be reused (e.g.
+     * not already opened at APM startup or Direct Output).
+     * If reusing an already opened output, when this output is not used anymore, the AudioFlinger
+     * patch must be updated to refine the output device(s) information and ensure the right
+     * behavior of AudioDeviceCallback.
+     */
+    bool mCloseOutput = false;
 };
 
 /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 4adc920..80e098b 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -183,6 +183,10 @@
         return isSingleDeviceType(mDeviceTypes, deviceType);
     }
 
+    bool onlyContainsDevice(const sp<DeviceDescriptor>& item) const {
+        return this->size() == 1 && contains(item);
+    }
+
     bool contains(const sp<DeviceDescriptor>& item) const { return indexOf(item) >= 0; }
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 90b812d..c489eed 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -35,10 +35,7 @@
 class IOProfile : public AudioPort, public PolicyAudioPort
 {
 public:
-    IOProfile(const std::string &name, audio_port_role_t role)
-        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
-          curOpenCount(0),
-          curActiveCount(0) {}
+    IOProfile(const std::string &name, audio_port_role_t role);
 
     virtual ~IOProfile() = default;
 
@@ -66,6 +63,17 @@
         if (getRole() == AUDIO_PORT_ROLE_SINK && (flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             maxActiveCount = 0;
         }
+        if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
+            mMixerBehaviors.clear();
+            mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+            if (mFlags.output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+                mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_BIT_PERFECT);
+            }
+        }
+    }
+
+    const MixerBehaviorSet& getMixerBehaviors() const {
+        return mMixerBehaviors;
     }
 
     /**
@@ -97,6 +105,25 @@
                              uint32_t flags,
                              bool exactMatchRequiredForInputFlags = false) const;
 
+    /**
+     * @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
+     *
+     * @param devices vector of devices to be checked for compatibility
+     * @return true if all devices are supported, false otherwise.
+     */
+    bool areAllDevicesSupported(const DeviceVector &devices) const;
+
+    /**
+     * @brief isCompatibleProfileForFlags: Checks if the IO profile is compatible with
+     * specified flags.
+     *
+     * @param flags to be checked for compatibility
+     * @param exactMatchRequiredForInputFlags true if exact match is required on flags
+     * @return true if the profile is compatible, false otherwise.
+     */
+    bool isCompatibleProfileForFlags(uint32_t flags,
+                                     bool exactMatchRequiredForInputFlags = false) const;
+
     void dump(String8 *dst, int spaces) const;
     void log();
 
@@ -193,6 +220,8 @@
         return false;
     }
 
+    void toSupportedMixerAttributes(std::vector<audio_mixer_attributes_t>* mixerAttributes) const;
+
     // Number of streams currently opened for this profile.
     uint32_t     curOpenCount;
     // Number of streams currently active for this profile. This is not the number of active clients
@@ -201,6 +230,8 @@
 
 private:
     DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
+
+    MixerBehaviorSet mMixerBehaviors;
 };
 
 class InputProfile : public IOProfile
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
new file mode 100644
index 0000000..9472481
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+
+#include <utils/RefBase.h>
+
+#include "AudioRoute.h"
+#include "HwModule.h"
+#include "IOProfile.h"
+
+namespace android {
+
+class PreferredMixerAttributesInfo : public RefBase {
+public:
+    PreferredMixerAttributesInfo(uid_t uid, audio_port_handle_t devicePortId,
+                                 const sp<IOProfile>& profile, audio_output_flags_t flags,
+                                 const audio_mixer_attributes_t& mixerAttributes)
+        : mDevicePortId(devicePortId), mUid(uid), mProfile(profile),
+          mOutputFlags(flags), mMixerAttributes(mixerAttributes) { }
+
+    audio_port_handle_t getDeviceId() const { return mDevicePortId; }
+    const audio_config_base_t& getConfigBase() const { return mMixerAttributes.config; }
+    uid_t getUid() const { return mUid; }
+    int getActiveClientCount() const { return mActiveClientsCount; }
+    const sp<IOProfile> getProfile() const { return mProfile; };
+    audio_output_flags_t getFlags() const { return mOutputFlags; }
+    const audio_mixer_attributes_t& getMixerAttributes() const { return mMixerAttributes; }
+
+    void increaseActiveClient() { mActiveClientsCount++; }
+    void decreaseActiveClient() { mActiveClientsCount--; }
+
+    void dump(String8 *dst);
+
+private:
+    const audio_port_handle_t mDevicePortId;
+    const uid_t mUid;
+    const sp<IOProfile> mProfile;
+    const audio_output_flags_t mOutputFlags;
+    const audio_mixer_attributes_t mMixerAttributes;
+    int mActiveClientsCount = 0;
+};
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 8eefe77..09ca989 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -26,6 +26,7 @@
 #include "Volume.h"
 #include "HwModule.h"
 #include "TypeConverter.h"
+#include "policy.h"
 #include <media/AudioGain.h>
 #include <media/AudioParameter.h>
 #include <media/AudioPolicy.h>
@@ -237,6 +238,27 @@
     return clients;
 }
 
+size_t AudioOutputDescriptor::sameExclusivePreferredDevicesCount() const
+{
+    audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
+    size_t count = 0;
+    for (const auto &client : getClientIterable()) {
+        if (client->active()) {
+            if (!(client->hasPreferredDevice() &&
+                    client->isPreferredDeviceForExclusiveUse())) {
+                return 0;
+            }
+            if (deviceId == AUDIO_PORT_HANDLE_NONE) {
+                deviceId = client->preferredDeviceId();
+            } else if (deviceId != client->preferredDeviceId()) {
+                return 0;
+            }
+            count++;
+        }
+    }
+    return count;
+}
+
 bool AudioOutputDescriptor::isAnyActive(VolumeSource volumeSourceToIgnore) const
 {
     return std::find_if(begin(mActiveClients), end(mActiveClients),
@@ -301,7 +323,9 @@
     mDirectClientSession(AUDIO_SESSION_NONE)
 {
     if (profile != NULL) {
-        mFlags = (audio_output_flags_t)profile->getFlags();
+        // By default, opening the output without immutable flags, the bit-perfect flags should be
+        // applied when the apps explicitly request.
+        mFlags = (audio_output_flags_t)(profile->getFlags() & (~AUDIO_OUTPUT_FLAG_BIT_PERFECT));
     }
 }
 
@@ -311,6 +335,9 @@
     if (extraInfo != nullptr) {
         allExtraInfo.appendFormat("%s; ", extraInfo);
     }
+    if (mProfile != nullptr) {
+        allExtraInfo.appendFormat("IOProfile name:%s; ", mProfile->getName().c_str());
+    }
     std::string flagsLiteral = toString(mFlags);
     allExtraInfo.appendFormat("Latency: %d; 0x%04x", mLatency, mFlags);
     if (!flagsLiteral.empty()) {
@@ -674,8 +701,10 @@
             }
         }
 
+        // TODO(b/73175392) consider improving the AIDL interface.
+        // Signal closing to A2DP HAL.
         AudioParameter param;
-        param.add(String8("closing"), String8("true"));
+        param.add(String8(AudioParameter::keyClosing), String8("true"));
         mClientInterface->setParameters(mIoHandle, param.toString());
 
         mClientInterface->closeOutput(mIoHandle);
@@ -931,6 +960,27 @@
     return false;
 }
 
+bool SwAudioOutputDescriptor::isConfigurationMatched(const audio_config_base_t &config,
+                                                     audio_output_flags_t flags) {
+    const uint32_t mustMatchOutputFlags =
+            AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    return audio_output_flags_is_subset(AudioOutputDescriptor::mFlags, flags, mustMatchOutputFlags)
+            && mSamplingRate == config.sample_rate
+            && mChannelMask == config.channel_mask
+            && mFormat == config.format;
+}
+
+PortHandleVector SwAudioOutputDescriptor::getClientsForStream(
+        audio_stream_type_t streamType) const {
+    PortHandleVector clientsForStream;
+    for (const auto& client : getClientIterable()) {
+        if (client->stream() == streamType) {
+            clientsForStream.push_back(client->portId());
+        }
+    }
+    return clientsForStream;
+}
+
 void SwAudioOutputCollection::dump(String8 *dst) const
 {
     dst->appendFormat("\n Outputs (%zu):\n", size());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
new file mode 100644
index 0000000..42c76e2
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "APM_Config"
+
+#include <AudioPolicyConfig.h>
+#include <IOProfile.h>
+#include <Serializer.h>
+#include <media/AudioProfile.h>
+#include <system/audio.h>
+#include <system/audio_config.h>
+#include <utils/Log.h>
+
+namespace android {
+
+// static
+sp<const AudioPolicyConfig> AudioPolicyConfig::createDefault() {
+    auto config = sp<AudioPolicyConfig>::make();
+    config->setDefault();
+    return config;
+}
+
+// static
+sp<const AudioPolicyConfig> AudioPolicyConfig::loadFromApmXmlConfigWithFallback(
+        const std::string& xmlFilePath) {
+    const std::string filePath =
+            xmlFilePath.empty() ? audio_get_audio_policy_config_file() : xmlFilePath;
+    auto config = sp<AudioPolicyConfig>::make();
+    if (status_t status = config->loadFromXml(filePath, false /*forVts*/); status == NO_ERROR) {
+        return config;
+    }
+    return createDefault();
+}
+
+// static
+sp<AudioPolicyConfig> AudioPolicyConfig::createWritableForTests() {
+    return sp<AudioPolicyConfig>::make();
+}
+
+// static
+error::Result<sp<AudioPolicyConfig>> AudioPolicyConfig::loadFromCustomXmlConfigForTests(
+        const std::string& xmlFilePath) {
+    auto config = sp<AudioPolicyConfig>::make();
+    if (status_t status = config->loadFromXml(xmlFilePath, false /*forVts*/); status == NO_ERROR) {
+        return config;
+    } else {
+        return base::unexpected(status);
+    }
+}
+
+// static
+error::Result<sp<AudioPolicyConfig>> AudioPolicyConfig::loadFromCustomXmlConfigForVtsTests(
+        const std::string& configPath, const std::string& xmlFileName) {
+    auto filePath = configPath;
+    if (filePath.empty()) {
+        for (const auto& location : audio_get_configuration_paths()) {
+            std::string path = location + '/' + xmlFileName;
+            if (access(path.c_str(), F_OK) == 0) {
+                filePath = location;
+                break;
+            }
+        }
+    }
+    if (filePath.empty()) {
+        ALOGE("Did not find a config file \"%s\" among known config paths", xmlFileName.c_str());
+        return base::unexpected(BAD_VALUE);
+    }
+    auto config = sp<AudioPolicyConfig>::make();
+    if (status_t status = config->loadFromXml(filePath + "/" + xmlFileName, true /*forVts*/);
+            status == NO_ERROR) {
+        return config;
+    } else {
+        return base::unexpected(status);
+    }
+}
+
+void AudioPolicyConfig::augmentData() {
+    // If microphones address is empty, set it according to device type
+    for (size_t i = 0; i < mInputDevices.size(); i++) {
+        if (mInputDevices[i]->address().empty()) {
+            if (mInputDevices[i]->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+                mInputDevices[i]->setAddress(AUDIO_BOTTOM_MICROPHONE_ADDRESS);
+            } else if (mInputDevices[i]->type() == AUDIO_DEVICE_IN_BACK_MIC) {
+                mInputDevices[i]->setAddress(AUDIO_BACK_MICROPHONE_ADDRESS);
+            }
+        }
+    }
+}
+
+status_t AudioPolicyConfig::loadFromXml(const std::string& xmlFilePath, bool forVts) {
+    if (xmlFilePath.empty()) {
+        ALOGE("Audio policy configuration file name is empty");
+        return BAD_VALUE;
+    }
+    status_t status = forVts ? deserializeAudioPolicyFileForVts(xmlFilePath.c_str(), this)
+            : deserializeAudioPolicyFile(xmlFilePath.c_str(), this);
+    if (status == NO_ERROR) {
+        mSource = xmlFilePath;
+        augmentData();
+    } else {
+        ALOGE("Could not load audio policy from the configuration file \"%s\": %d",
+                xmlFilePath.c_str(), status);
+    }
+    return status;
+}
+
+void AudioPolicyConfig::setDefault() {
+    mSource = kDefaultConfigSource;
+    mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+
+    mDefaultOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPEAKER);
+    mDefaultOutputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
+    sp<DeviceDescriptor> defaultInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_BUILTIN_MIC);
+    defaultInputDevice->addAudioProfile(AudioProfile::createFullDynamic(gDynamicFormat));
+    sp<AudioProfile> micProfile = new AudioProfile(
+            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000);
+    defaultInputDevice->addAudioProfile(micProfile);
+    mOutputDevices.add(mDefaultOutputDevice);
+    mInputDevices.add(defaultInputDevice);
+
+    sp<HwModule> module = new HwModule(AUDIO_HARDWARE_MODULE_ID_PRIMARY, 2 /*halVersionMajor*/);
+    mHwModules.add(module);
+
+    sp<OutputProfile> outProfile = new OutputProfile("primary");
+    outProfile->addAudioProfile(
+            new AudioProfile(AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 44100));
+    outProfile->addSupportedDevice(mDefaultOutputDevice);
+    outProfile->setFlags(AUDIO_OUTPUT_FLAG_PRIMARY);
+    module->addOutputProfile(outProfile);
+
+    sp<InputProfile> inProfile = new InputProfile("primary");
+    inProfile->addAudioProfile(micProfile);
+    inProfile->addSupportedDevice(defaultInputDevice);
+    module->addInputProfile(inProfile);
+
+    setDefaultSurroundFormats();
+    augmentData();
+}
+
+void AudioPolicyConfig::setDefaultSurroundFormats() {
+    mSurroundFormats = {
+        {AUDIO_FORMAT_AC3, {}},
+        {AUDIO_FORMAT_E_AC3, {}},
+        {AUDIO_FORMAT_DTS, {}},
+        {AUDIO_FORMAT_DTS_HD, {}},
+        {AUDIO_FORMAT_DTS_HD_MA, {}},
+        {AUDIO_FORMAT_DTS_UHD, {}},
+        {AUDIO_FORMAT_DTS_UHD_P2, {}},
+        {AUDIO_FORMAT_AAC_LC, {
+                AUDIO_FORMAT_AAC_HE_V1, AUDIO_FORMAT_AAC_HE_V2, AUDIO_FORMAT_AAC_ELD,
+                AUDIO_FORMAT_AAC_XHE}},
+        {AUDIO_FORMAT_DOLBY_TRUEHD, {}},
+        {AUDIO_FORMAT_E_AC3_JOC, {}},
+        {AUDIO_FORMAT_AC4, {}}};
+}
+
+} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 546f56b..6b9757d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -15,8 +15,12 @@
  */
 
 #define LOG_TAG "APM_AudioPolicyMix"
-//#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
 
+#include <algorithm>
+#include <iterator>
+#include <optional>
+#include <regex>
 #include "AudioPolicyMix.h"
 #include "TypeConverter.h"
 #include "HwModule.h"
@@ -25,6 +29,98 @@
 #include <AudioOutputDescriptor.h>
 
 namespace android {
+namespace {
+
+bool matchAddressToTags(const audio_attributes_t& attr, const String8& addr) {
+    std::optional<std::string> tagAddress = extractAddressFromAudioAttributes(attr);
+    return tagAddress.has_value() && tagAddress->compare(addr.c_str()) == 0;
+}
+
+// Returns true if the criterion matches.
+// The exclude criteria are handled in the same way as positive
+// ones - only condition is matched (the function will return
+// same result both for RULE_MATCH_X and RULE_EXCLUDE_X).
+bool isCriterionMatched(const AudioMixMatchCriterion& criterion,
+                        const audio_attributes_t& attr,
+                        const uid_t uid,
+                        const audio_session_t session) {
+    uint32_t ruleWithoutExclusion = criterion.mRule & ~RULE_EXCLUSION_MASK;
+    switch(ruleWithoutExclusion) {
+        case RULE_MATCH_ATTRIBUTE_USAGE:
+            return criterion.mValue.mUsage == attr.usage;
+        case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+            return criterion.mValue.mSource == attr.source;
+        case RULE_MATCH_UID:
+            return criterion.mValue.mUid == uid;
+        case RULE_MATCH_USERID:
+            {
+                userid_t userId = multiuser_get_user_id(uid);
+                return criterion.mValue.mUserId == userId;
+            }
+        case RULE_MATCH_AUDIO_SESSION_ID:
+            return criterion.mValue.mAudioSessionId == session;
+    }
+    ALOGE("Encountered invalid mix rule 0x%x", criterion.mRule);
+    return false;
+}
+
+// Returns true if vector of criteria is matched:
+// - If any of the exclude criteria is matched the criteria doesn't match.
+// - Otherwise, for each 'dimension' of positive rule present
+//   (usage, capture preset, uid, userid...) at least one rule must match
+//   for the criteria to match.
+bool areMixCriteriaMatched(const std::vector<AudioMixMatchCriterion>& criteria,
+                           const audio_attributes_t& attr,
+                           const uid_t uid,
+                           const audio_session_t session) {
+    // If any of the exclusion criteria are matched the mix doesn't match.
+    auto isMatchingExcludeCriterion = [&](const AudioMixMatchCriterion& c) {
+        return c.isExcludeCriterion() && isCriterionMatched(c, attr, uid, session);
+    };
+    if (std::any_of(criteria.begin(), criteria.end(), isMatchingExcludeCriterion)) {
+        return false;
+    }
+
+    uint32_t presentPositiveRules = 0; // Bitmask of all present positive criteria.
+    uint32_t matchedPositiveRules = 0; // Bitmask of all matched positive criteria.
+    for (const auto& criterion : criteria) {
+        if (criterion.isExcludeCriterion()) {
+            continue;
+        }
+        presentPositiveRules |= criterion.mRule;
+        if (isCriterionMatched(criterion, attr, uid, session)) {
+            matchedPositiveRules |= criterion.mRule;
+        }
+    }
+    return presentPositiveRules == matchedPositiveRules;
+}
+
+// Consistency checks: for each "dimension" of rules (usage, uid...), we can
+// only have MATCH rules, or EXCLUDE rules in each dimension, not a combination.
+bool areMixCriteriaConsistent(const std::vector<AudioMixMatchCriterion>& criteria) {
+    std::set<uint32_t> positiveCriteria;
+    for (const AudioMixMatchCriterion& c : criteria) {
+        if (c.isExcludeCriterion()) {
+            continue;
+        }
+        positiveCriteria.insert(c.mRule);
+    }
+
+    auto isConflictingCriterion = [&positiveCriteria](const AudioMixMatchCriterion& c) {
+        uint32_t ruleWithoutExclusion = c.mRule & ~RULE_EXCLUSION_MASK;
+        return c.isExcludeCriterion() &&
+               (positiveCriteria.find(ruleWithoutExclusion) != positiveCriteria.end());
+    };
+    return std::none_of(criteria.begin(), criteria.end(), isConflictingCriterion);
+}
+
+template <typename Predicate>
+void EraseCriteriaIf(std::vector<AudioMixMatchCriterion>& v,
+                     const Predicate& predicate) {
+    v.erase(std::remove_if(v.begin(), v.end(), predicate), v.end());
+}
+
+} // namespace
 
 void AudioPolicyMix::dump(String8 *dst, int spaces, int index) const
 {
@@ -66,6 +162,9 @@
         case RULE_MATCH_USERID:
             ruleValue = std::to_string(criterion.mValue.mUserId);
             break;
+        case RULE_MATCH_AUDIO_SESSION_ID:
+            ruleValue = std::to_string(criterion.mValue.mAudioSessionId);
+            break;
         default:
             unknownRule = true;
         }
@@ -78,23 +177,30 @@
     }
 }
 
-status_t AudioPolicyMixCollection::registerMix(AudioMix mix, sp<SwAudioOutputDescriptor> desc)
+status_t AudioPolicyMixCollection::registerMix(const AudioMix& mix,
+                                               const sp<SwAudioOutputDescriptor>& desc)
 {
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(i);
         if (mix.mDeviceType == registeredMix->mDeviceType
-                && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
+                && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0
+                && is_mix_loopback(mix.mRouteFlags)) {
             ALOGE("registerMix(): mix already registered for dev=0x%x addr=%s",
                     mix.mDeviceType, mix.mDeviceAddress.string());
             return BAD_VALUE;
         }
     }
-    sp<AudioPolicyMix> policyMix = new AudioPolicyMix(mix);
+    if (!areMixCriteriaConsistent(mix.mCriteria)) {
+        ALOGE("registerMix(): Mix contains inconsistent criteria "
+              "(MATCH & EXCLUDE criteria of the same type)");
+        return BAD_VALUE;
+    }
+    sp<AudioPolicyMix> policyMix = sp<AudioPolicyMix>::make(mix);
     add(policyMix);
     ALOGD("registerMix(): adding mix for dev=0x%x addr=%s",
             policyMix->mDeviceType, policyMix->mDeviceAddress.string());
 
-    if (desc != 0) {
+    if (desc != nullptr) {
         desc->mPolicyMix = policyMix;
         policyMix->setOutput(desc);
     }
@@ -151,25 +257,26 @@
 }
 
 status_t AudioPolicyMixCollection::getOutputForAttr(
-        const audio_attributes_t& attributes, uid_t uid,
+        const audio_attributes_t& attributes, const audio_config_base_t& config, const uid_t uid,
+        const audio_session_t session,
         audio_output_flags_t flags,
+        const DeviceVector &availableOutputDevices,
+        const sp<DeviceDescriptor>& requestedDevice,
         sp<AudioPolicyMix> &primaryMix,
-        std::vector<sp<AudioPolicyMix>> *secondaryMixes)
+        std::vector<sp<AudioPolicyMix>> *secondaryMixes,
+        bool& usePrimaryOutputFromPolicyMixes)
 {
     ALOGV("getOutputForAttr() querying %zu mixes:", size());
     primaryMix.clear();
+    bool mixesDisallowsRequestedDevice = false;
     for (size_t i = 0; i < size(); i++) {
         sp<AudioPolicyMix> policyMix = itemAt(i);
         const bool primaryOutputMix = !is_mix_loopback_render(policyMix->mRouteFlags);
-        if (!primaryOutputMix && (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) {
-            // AAudio does not support MMAP_NO_IRQ loopback render, and there is no way with
-            // the current MmapStreamInterface::start to reject a specific client added to a shared
-            // mmap stream.
-            // As a result all MMAP_NOIRQ requests have to be rejected when an loopback render
-            // policy is present. That ensures no shared mmap stream is used when an loopback
-            // render policy is registered.
-            ALOGD("%s: Rejecting MMAP_NOIRQ request due to LOOPBACK|RENDER mix present.", __func__);
-            return INVALID_OPERATION;
+        sp<DeviceDescriptor> mixDevice = getOutputDeviceForMix(policyMix.get(),
+            availableOutputDevices);
+        if (mixDisallowsRequestedDevice(policyMix.get(), requestedDevice, mixDevice, uid)) {
+            ALOGV("%s: Mix %zu: does not allows device", __func__, i);
+            mixesDisallowsRequestedDevice = true;
         }
 
         if (primaryOutputMix && primaryMix != nullptr) {
@@ -177,15 +284,22 @@
             continue; // Primary output already found
         }
 
-        switch (mixMatch(policyMix.get(), i, attributes, uid)) {
-            case MixMatchStatus::INVALID_MIX:
-                // The mix has contradictory rules, ignore it
-                // TODO: reject invalid mix at registration
-                continue;
-            case MixMatchStatus::NO_MATCH:
-                ALOGV("%s: Mix %zu: does not match", __func__, i);
-                continue; // skip the mix
-            case MixMatchStatus::MATCH:;
+        if(!mixMatch(policyMix.get(), i, attributes, config, uid, session)) {
+            ALOGV("%s: Mix %zu: does not match", __func__, i);
+            continue; // skip the mix
+        }
+
+        if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) && is_mix_loopback(policyMix->mRouteFlags)) {
+            // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
+            // using dynamic audio policy.
+            ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic audio policy mix.",
+                __func__);
+            return INVALID_OPERATION;
+        }
+
+        if (mixDevice != nullptr && mixDevice->equals(requestedDevice)) {
+            ALOGV("%s: Mix %zu: requested device mathches", __func__, i);
+            mixesDisallowsRequestedDevice = false;
         }
 
         if (primaryOutputMix) {
@@ -198,11 +312,39 @@
             }
         }
     }
+
+    // Explicit routing is higher priority than dynamic policy primary output, but policy may
+    // explicitly deny it
+    usePrimaryOutputFromPolicyMixes =
+        (mixesDisallowsRequestedDevice || requestedDevice == nullptr) && primaryMix != nullptr;
+
     return NO_ERROR;
 }
 
-AudioPolicyMixCollection::MixMatchStatus AudioPolicyMixCollection::mixMatch(
-        const AudioMix* mix, size_t mixIndex, const audio_attributes_t& attributes, uid_t uid) {
+sp<DeviceDescriptor> AudioPolicyMixCollection::getOutputDeviceForMix(const AudioMix* mix,
+                                                    const DeviceVector& availableOutputDevices) {
+    ALOGV("%s: device (0x%x, addr=%s) forced by mix", __func__, mix->mDeviceType,
+        mix->mDeviceAddress.c_str());
+    return availableOutputDevices.getDevice(mix->mDeviceType, mix->mDeviceAddress,
+        AUDIO_FORMAT_DEFAULT);
+}
+
+bool AudioPolicyMixCollection::mixDisallowsRequestedDevice(const AudioMix* mix,
+                                                     const sp<DeviceDescriptor>& requestedDevice,
+                                                     const sp<DeviceDescriptor>& mixDevice,
+                                                     const uid_t uid) {
+    if (requestedDevice == nullptr || mixDevice == nullptr) {
+        return false;
+    }
+
+    return is_mix_disallows_preferred_device(mix->mRouteFlags)
+        && requestedDevice->equals(mixDevice)
+        && mix->hasUserIdRule(false /* match */, multiuser_get_user_id(uid));
+}
+
+bool AudioPolicyMixCollection::mixMatch(const AudioMix* mix, size_t mixIndex,
+    const audio_attributes_t& attributes, const audio_config_base_t& config,
+    uid_t uid, audio_session_t session) {
 
     if (mix->mMixType == MIX_TYPE_PLAYERS) {
         // Loopback render mixes are created from a public API and thus restricted
@@ -212,163 +354,45 @@
                   attributes.usage == AUDIO_USAGE_MEDIA ||
                   attributes.usage == AUDIO_USAGE_GAME ||
                   attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION)) {
-                return MixMatchStatus::NO_MATCH;
+                return false;
             }
             auto hasFlag = [](auto flags, auto flag) { return (flags & flag) == flag; };
             if (hasFlag(attributes.flags, AUDIO_FLAG_NO_SYSTEM_CAPTURE)) {
-                return MixMatchStatus::NO_MATCH;
+                return false;
             }
 
             if (attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION) {
                 if (!mix->mVoiceCommunicationCaptureAllowed) {
-                    return MixMatchStatus::NO_MATCH;
+                    return false;
                 }
             } else if (!mix->mAllowPrivilegedMediaPlaybackCapture &&
                 hasFlag(attributes.flags, AUDIO_FLAG_NO_MEDIA_PROJECTION)) {
-                return MixMatchStatus::NO_MATCH;
+                return false;
             }
         }
 
-        int userId = (int) multiuser_get_user_id(uid);
-
-        // TODO if adding more player rules (currently only 2), make rule handling "generic"
-        //      as there is no difference in the treatment of usage- or uid-based rules
-        bool hasUsageMatchRules = false;
-        bool hasUsageExcludeRules = false;
-        bool usageMatchFound = false;
-        bool usageExclusionFound = false;
-
-        bool hasUidMatchRules = false;
-        bool hasUidExcludeRules = false;
-        bool uidMatchFound = false;
-        bool uidExclusionFound = false;
-
-        bool hasUserIdExcludeRules = false;
-        bool userIdExclusionFound = false;
-        bool hasUserIdMatchRules = false;
-        bool userIdMatchFound = false;
-
-
-        bool hasAddrMatch = false;
-
-        // iterate over all mix criteria to list what rules this mix contains
-        for (size_t j = 0; j < mix->mCriteria.size(); j++) {
-            ALOGV(" getOutputForAttr: mix %zu: inspecting mix criteria %zu of %zu",
-                    mixIndex, j, mix->mCriteria.size());
-
-            // if there is an address match, prioritize that match
-            if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
-                    strncmp(attributes.tags + strlen("addr="),
-                            mix->mDeviceAddress.string(),
-                            AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
-                hasAddrMatch = true;
-                break;
-            }
-
-            switch (mix->mCriteria[j].mRule) {
-            case RULE_MATCH_ATTRIBUTE_USAGE:
-                ALOGV("\tmix has RULE_MATCH_ATTRIBUTE_USAGE for usage %d",
-                                            mix->mCriteria[j].mValue.mUsage);
-                hasUsageMatchRules = true;
-                if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
-                    // found one match against all allowed usages
-                    usageMatchFound = true;
-                }
-                break;
-            case RULE_EXCLUDE_ATTRIBUTE_USAGE:
-                ALOGV("\tmix has RULE_EXCLUDE_ATTRIBUTE_USAGE for usage %d",
-                        mix->mCriteria[j].mValue.mUsage);
-                hasUsageExcludeRules = true;
-                if (mix->mCriteria[j].mValue.mUsage == attributes.usage) {
-                    // found this usage is to be excluded
-                    usageExclusionFound = true;
-                }
-                break;
-            case RULE_MATCH_UID:
-                ALOGV("\tmix has RULE_MATCH_UID for uid %d", mix->mCriteria[j].mValue.mUid);
-                hasUidMatchRules = true;
-                if (mix->mCriteria[j].mValue.mUid == uid) {
-                    // found one UID match against all allowed UIDs
-                    uidMatchFound = true;
-                }
-                break;
-            case RULE_EXCLUDE_UID:
-                ALOGV("\tmix has RULE_EXCLUDE_UID for uid %d", mix->mCriteria[j].mValue.mUid);
-                hasUidExcludeRules = true;
-                if (mix->mCriteria[j].mValue.mUid == uid) {
-                    // found this UID is to be excluded
-                    uidExclusionFound = true;
-                }
-                break;
-            case RULE_MATCH_USERID:
-                ALOGV("\tmix has RULE_MATCH_USERID for userId %d",
-                    mix->mCriteria[j].mValue.mUserId);
-                hasUserIdMatchRules = true;
-                if (mix->mCriteria[j].mValue.mUserId == userId) {
-                    // found one userId match against all allowed userIds
-                    userIdMatchFound = true;
-                }
-                break;
-            case RULE_EXCLUDE_USERID:
-                ALOGV("\tmix has RULE_EXCLUDE_USERID for userId %d",
-                    mix->mCriteria[j].mValue.mUserId);
-                hasUserIdExcludeRules = true;
-                if (mix->mCriteria[j].mValue.mUserId == userId) {
-                    // found this userId is to be excluded
-                    userIdExclusionFound = true;
-                }
-                break;
-            default:
-                break;
-            }
-
-            // consistency checks: for each "dimension" of rules (usage, uid...), we can
-            // only have MATCH rules, or EXCLUDE rules in each dimension, not a combination
-            if (hasUsageMatchRules && hasUsageExcludeRules) {
-                ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_ATTRIBUTE_USAGE"
-                        " and RULE_EXCLUDE_ATTRIBUTE_USAGE in mix %zu", mixIndex);
-                return MixMatchStatus::INVALID_MIX;
-            }
-            if (hasUidMatchRules && hasUidExcludeRules) {
-                ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_UID"
-                        " and RULE_EXCLUDE_UID in mix %zu", mixIndex);
-                return MixMatchStatus::INVALID_MIX;
-            }
-            if (hasUserIdMatchRules && hasUserIdExcludeRules) {
-                ALOGE("getOutputForAttr: invalid combination of RULE_MATCH_USERID"
-                        " and RULE_EXCLUDE_USERID in mix %zu", mixIndex);
-                    return MixMatchStatus::INVALID_MIX;
-            }
-
-            if ((hasUsageExcludeRules && usageExclusionFound)
-                    || (hasUidExcludeRules && uidExclusionFound)
-                    || (hasUserIdExcludeRules && userIdExclusionFound)) {
-                break; // stop iterating on criteria because an exclusion was found (will fail)
-            }
-        }//iterate on mix criteria
-
-        // determine if exiting on success (or implicit failure as desc is 0)
-        if (hasAddrMatch ||
-                !((hasUsageExcludeRules && usageExclusionFound) ||
-                  (hasUsageMatchRules && !usageMatchFound)  ||
-                  (hasUidExcludeRules && uidExclusionFound) ||
-                  (hasUidMatchRules && !uidMatchFound) ||
-                  (hasUserIdExcludeRules && userIdExclusionFound) ||
-                  (hasUserIdMatchRules && !userIdMatchFound))) {
-            ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
-            return MixMatchStatus::MATCH;
+        // Permit match only if requested format and mix format are PCM and can be format
+        // adapted by the mixer, or are the same (compressed) format.
+        if (!is_mix_loopback(mix->mRouteFlags) &&
+            !((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
+              (config.format == mix->mFormat.format)) &&
+              config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
+            return false;
         }
 
+        // if there is an address match, prioritize that match
+        if (matchAddressToTags(attributes, mix->mDeviceAddress)
+            || areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
+                ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
+                return true;
+        }
     } else if (mix->mMixType == MIX_TYPE_RECORDERS) {
         if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE &&
-                strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
-                strncmp(attributes.tags + strlen("addr="),
-                        mix->mDeviceAddress.string(),
-                        AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
-            return MixMatchStatus::MATCH;
+            matchAddressToTags(attributes, mix->mDeviceAddress)) {
+            return true;
         }
     }
-    return MixMatchStatus::NO_MATCH;
+    return false;
 }
 
 sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForOutput(
@@ -378,20 +402,17 @@
     for (size_t i = 0; i < size(); i++) {
         if (itemAt(i)->getOutput() == output) {
             // This Desc is involved in a Mix, which has the highest prio
-            audio_devices_t deviceType = itemAt(i)->mDeviceType;
-            String8 address = itemAt(i)->mDeviceAddress;
-            ALOGV("%s: device (0x%x, addr=%s) forced by mix",
-                  __FUNCTION__, deviceType, address.c_str());
-            return availableOutputDevices.getDevice(deviceType, address, AUDIO_FORMAT_DEFAULT);
+            return getOutputDeviceForMix(itemAt(i).get(), availableOutputDevices);
         }
     }
     return nullptr;
 }
 
 sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForInputSource(
-        audio_source_t inputSource,
+        const audio_attributes_t& attributes,
         const DeviceVector &availDevices,
         uid_t uid,
+        audio_session_t session,
         sp<AudioPolicyMix> *policyMix) const
 {
     for (size_t i = 0; i < size(); i++) {
@@ -399,28 +420,17 @@
         if (mix->mMixType != MIX_TYPE_RECORDERS) {
             continue;
         }
-        for (size_t j = 0; j < mix->mCriteria.size(); j++) {
-            if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
-                    mix->mCriteria[j].mValue.mSource == inputSource) ||
-               (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mix->mCriteria[j].mRule &&
-                    mix->mCriteria[j].mValue.mSource != inputSource) ||
-               (RULE_MATCH_UID == mix->mCriteria[j].mRule &&
-                    mix->mCriteria[j].mValue.mUid == uid) ||
-               (RULE_EXCLUDE_UID == mix->mCriteria[j].mRule &&
-                    mix->mCriteria[j].mValue.mUid != uid)) {
-                // assuming PolicyMix only for remote submix for input
-                // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX
-                audio_devices_t device = AUDIO_DEVICE_IN_REMOTE_SUBMIX;
-                auto mixDevice =
-                        availDevices.getDevice(device, mix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
+        if (areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
+            // Assuming PolicyMix only for remote submix for input
+            // so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX.
+            auto mixDevice = availDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+             mix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
                 if (mixDevice != nullptr) {
                     if (policyMix != nullptr) {
                         *policyMix = mix;
                     }
                     return mixDevice;
                 }
-                break;
-            }
         }
     }
     return nullptr;
@@ -429,14 +439,14 @@
 status_t AudioPolicyMixCollection::getInputMixForAttr(
         audio_attributes_t attr, sp<AudioPolicyMix> *policyMix)
 {
-    if (strncmp(attr.tags, "addr=", strlen("addr=")) != 0) {
+    std::optional<std::string> address = extractAddressFromAudioAttributes(attr);
+    if (!address.has_value()) {
         return BAD_VALUE;
     }
-    String8 address(attr.tags + strlen("addr="));
 
 #ifdef LOG_NDEBUG
     ALOGV("getInputMixForAttr looking for address %s for source %d\n  mixes available:",
-            address.string(), attr.source);
+            address->c_str(), attr.source);
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix> audioPolicyMix = itemAt(i);
         ALOGV("\tmix %zu address=%s", i, audioPolicyMix->mDeviceAddress.string());
@@ -446,20 +456,20 @@
     size_t index;
     for (index = 0; index < size(); index++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(index);
-        if (registeredMix->mDeviceAddress.compare(address) == 0) {
+        if (address->compare(registeredMix->mDeviceAddress.c_str()) == 0) {
             ALOGD("getInputMixForAttr found addr=%s dev=0x%x",
                     registeredMix->mDeviceAddress.string(), registeredMix->mDeviceType);
             break;
         }
     }
     if (index == size()) {
-        ALOGW("getInputMixForAttr() no policy for address %s", address.string());
+        ALOGW("getInputMixForAttr() no policy for address %s", address->c_str());
         return BAD_VALUE;
     }
     const sp<AudioPolicyMix> audioPolicyMix = itemAt(index);
 
     if (audioPolicyMix->mMixType != MIX_TYPE_PLAYERS) {
-        ALOGW("getInputMixForAttr() bad policy mix type for address %s", address.string());
+        ALOGW("getInputMixForAttr() bad policy mix type for address %s", address->c_str());
         return BAD_VALUE;
     }
     if (policyMix != nullptr) {
@@ -491,7 +501,7 @@
     //     AND it doesn't have a "match uid" rule
     //   THEN add a rule to exclude the uid
     for (size_t i = 0; i < size(); i++) {
-        const AudioPolicyMix *mix = itemAt(i).get();
+        AudioPolicyMix *mix = itemAt(i).get();
         if (!mix->isDeviceAffinityCompatible()) {
             continue;
         }
@@ -521,27 +531,16 @@
 status_t AudioPolicyMixCollection::removeUidDeviceAffinities(uid_t uid) {
     // for each player mix: remove existing rules that match or exclude this uid
     for (size_t i = 0; i < size(); i++) {
-        bool foundUidRule = false;
-        const AudioPolicyMix *mix = itemAt(i).get();
+        AudioPolicyMix *mix = itemAt(i).get();
         if (!mix->isDeviceAffinityCompatible()) {
             continue;
         }
-        std::vector<size_t> criteriaToRemove;
-        for (size_t j = 0; j < mix->mCriteria.size(); j++) {
-            const uint32_t rule = mix->mCriteria[j].mRule;
-            // is this rule excluding the uid? (not considering uid match rules
-            // as those are not used for uid-device affinity)
-            if (rule == RULE_EXCLUDE_UID
-                    && uid == mix->mCriteria[j].mValue.mUid) {
-                foundUidRule = true;
-                criteriaToRemove.insert(criteriaToRemove.begin(), j);
-            }
-        }
-        if (foundUidRule) {
-            for (size_t j = 0; j < criteriaToRemove.size(); j++) {
-                mix->mCriteria.removeAt(criteriaToRemove[j]);
-            }
-        }
+
+        // is this rule excluding the uid? (not considering uid match rules
+        // as those are not used for uid-device affinity)
+        EraseCriteriaIf(mix->mCriteria, [uid](const AudioMixMatchCriterion& c) {
+            return c.mRule == RULE_EXCLUDE_UID && c.mValue.mUid == uid;
+        });
     }
     return NO_ERROR;
 }
@@ -576,7 +575,7 @@
     //    "match userId" rule for this userId, return an error
     //    (adding a userId-device affinity would result in contradictory rules)
     for (size_t i = 0; i < size(); i++) {
-        const AudioPolicyMix* mix = itemAt(i).get();
+        AudioPolicyMix* mix = itemAt(i).get();
         if (!mix->isDeviceAffinityCompatible()) {
             continue;
         }
@@ -593,7 +592,7 @@
     //     AND it doesn't have a "match userId" rule
     //   THEN add a rule to exclude the userId
     for (size_t i = 0; i < size(); i++) {
-        const AudioPolicyMix *mix = itemAt(i).get();
+        AudioPolicyMix *mix = itemAt(i).get();
         if (!mix->isDeviceAffinityCompatible()) {
             continue;
         }
@@ -606,13 +605,14 @@
                 break;
             }
         }
-        if (!deviceMatch && !mix->hasMatchUserIdRule()) {
+        if (!deviceMatch && !mix->hasUserIdRule(true /*match*/)) {
             // this mix doesn't go to one of the listed devices for the given userId,
             // and it's not already restricting the mix on a userId,
             // modify its rules to exclude the userId
-            if (!mix->hasUserIdRule(false /*match*/, userId)) {
+            if (!mix->hasUserIdRule(false /* match */, userId)) {
                 // no need to do it again if userId is already excluded
                 mix->setExcludeUserId(userId);
+                mix->mRouteFlags = mix->mRouteFlags | MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE;
             }
         }
     }
@@ -623,26 +623,19 @@
 status_t AudioPolicyMixCollection::removeUserIdDeviceAffinities(int userId) {
     // for each player mix: remove existing rules that match or exclude this userId
     for (size_t i = 0; i < size(); i++) {
-        bool foundUserIdRule = false;
-        const AudioPolicyMix *mix = itemAt(i).get();
+        AudioPolicyMix *mix = itemAt(i).get();
         if (!mix->isDeviceAffinityCompatible()) {
             continue;
         }
-        std::vector<size_t> criteriaToRemove;
-        for (size_t j = 0; j < mix->mCriteria.size(); j++) {
-            const uint32_t rule = mix->mCriteria[j].mRule;
-            // is this rule excluding the userId? (not considering userId match rules
-            // as those are not used for userId-device affinity)
-            if (rule == RULE_EXCLUDE_USERID
-                    && userId == mix->mCriteria[j].mValue.mUserId) {
-                foundUserIdRule = true;
-                criteriaToRemove.insert(criteriaToRemove.begin(), j);
-            }
-        }
-        if (foundUserIdRule) {
-            for (size_t j = 0; j < criteriaToRemove.size(); j++) {
-                mix->mCriteria.removeAt(criteriaToRemove[j]);
-            }
+
+        // is this rule excluding the userId? (not considering userId match rules
+        // as those are not used for userId-device affinity)
+        EraseCriteriaIf(mix->mCriteria, [userId](const AudioMixMatchCriterion& c) {
+            return c.mRule == RULE_EXCLUDE_USERID && c.mValue.mUserId == userId;
+        });
+
+        if (!mix->hasUserIdRule(false /* match */)) {
+            mix->mRouteFlags = mix->mRouteFlags & ~MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE;
         }
     }
     return NO_ERROR;
@@ -681,4 +674,14 @@
     }
 }
 
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr) {
+    static const std::regex addrTagRegex("addr=([^;]+)");
+
+    std::cmatch match;
+    if (std::regex_search(attr.tags, match, addrTagRegex)) {
+        return match[1].str();
+    }
+    return std::nullopt;
+}
+
 }; //namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 713b0ac..8b6866e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -105,9 +105,11 @@
 {
 }
 
-void SourceClientDescriptor::setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput)
+void SourceClientDescriptor::setSwOutput(
+        const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput)
 {
     mSwOutput = swOutput;
+    mCloseOutput = closeOutput;
 }
 
 void SourceClientDescriptor::setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput)
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 21f2018..98d7d59 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -24,6 +24,15 @@
 
 namespace android {
 
+IOProfile::IOProfile(const std::string &name, audio_port_role_t role)
+        : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
+          curOpenCount(0),
+          curActiveCount(0) {
+    if (role == AUDIO_PORT_ROLE_SOURCE) {
+        mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+    }
+}
+
 bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
                                     uint32_t samplingRate,
                                     uint32_t *updatedSamplingRate,
@@ -40,11 +49,9 @@
     const bool isRecordThread =
             getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
     ALOG_ASSERT(isPlaybackThread != isRecordThread);
-
-    if (!devices.isEmpty()) {
-        if (!mSupportedDevices.containsAllDevices(devices)) {
-            return false;
-        }
+    if (!areAllDevicesSupported(devices) ||
+            !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
+        return false;
     }
 
     if (!audio_is_valid_format(format) ||
@@ -78,21 +85,6 @@
         }
     }
 
-    const uint32_t mustMatchOutputFlags =
-            AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
-    if (isPlaybackThread && (((getFlags() ^ flags) & mustMatchOutputFlags)
-                    || (getFlags() & flags) != flags)) {
-        return false;
-    }
-    // The only input flag that is allowed to be different is the fast flag.
-    // An existing fast stream is compatible with a normal track request.
-    // An existing normal stream is compatible with a fast track request,
-    // but the fast request will be denied by AudioFlinger and converted to normal track.
-    if (isRecordThread && ((getFlags() ^ flags) &
-            ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
-        return false;
-    }
-
     if (updatedSamplingRate != NULL) {
         *updatedSamplingRate = myUpdatedSamplingRate;
     }
@@ -105,6 +97,41 @@
     return true;
 }
 
+bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
+    if (devices.empty()) {
+        return true;
+    }
+    return mSupportedDevices.containsAllDevices(devices);
+}
+
+bool IOProfile::isCompatibleProfileForFlags(uint32_t flags,
+                                            bool exactMatchRequiredForInputFlags) const {
+    const bool isPlaybackThread =
+            getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
+    const bool isRecordThread =
+            getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
+    ALOG_ASSERT(isPlaybackThread != isRecordThread);
+
+    const uint32_t mustMatchOutputFlags =
+            AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    if (isPlaybackThread &&
+        !audio_output_flags_is_subset((audio_output_flags_t)getFlags(),
+                                      (audio_output_flags_t)flags,
+                                      mustMatchOutputFlags)) {
+        return false;
+    }
+    // The only input flag that is allowed to be different is the fast flag.
+    // An existing fast stream is compatible with a normal track request.
+    // An existing normal stream is compatible with a fast track request,
+    // but the fast request will be denied by AudioFlinger and converted to normal track.
+    if (isRecordThread && ((getFlags() ^ flags) &
+            ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
+        return false;
+    }
+
+    return true;
+}
+
 bool IOProfile::containsSingleDeviceSupportingEncodedFormats(
         const sp<DeviceDescriptor>& device) const {
     if (device == nullptr) {
@@ -116,6 +143,34 @@
                 return device == deviceDesc && deviceDesc->hasCurrentEncodedFormat(); }) == 1;
 }
 
+void IOProfile::toSupportedMixerAttributes(
+        std::vector<audio_mixer_attributes_t> *mixerAttributes) const {
+    if (!hasDynamicAudioProfile()) {
+        // The mixer attributes is only supported when there is a dynamic profile.
+        return;
+    }
+    for (const auto& profile : mProfiles) {
+        if (!profile->isValid()) {
+            continue;
+        }
+        for (const auto sampleRate : profile->getSampleRates()) {
+            for (const auto channelMask : profile->getChannels()) {
+                const audio_config_base_t config = {
+                        .format = profile->getFormat(),
+                        .sample_rate = sampleRate,
+                        .channel_mask = channelMask
+                };
+                for (const auto mixerBehavior : mMixerBehaviors) {
+                    mixerAttributes->push_back({
+                        .config = config,
+                        .mixer_behavior = mixerBehavior
+                    });
+                }
+            }
+        }
+    }
+}
+
 void IOProfile::dump(String8 *dst, int spaces) const
 {
     String8 extraInfo;
diff --git a/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp b/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp
new file mode 100644
index 0000000..edb2c6d
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PreferredMixerAttributesInfo.h"
+
+namespace android {
+
+void PreferredMixerAttributesInfo::dump(String8 *dst) {
+    dst->appendFormat("device port ID: %d; owner uid: %d; profile name: %s; flags: %#x; "
+                      "sample rate: %u; channel mask: %#x; format: %#x; mixer behavior: %d; "
+                      "active clients count: %d\n",
+                      mDevicePortId, mUid, mProfile->getName().c_str(), mOutputFlags,
+                      mMixerAttributes.config.sample_rate, mMixerAttributes.config.channel_mask,
+                      mMixerAttributes.config.format, mMixerAttributes.mixer_behavior,
+                      mActiveClientsCount);
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index d446e96..3d5c1d2 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -29,6 +29,7 @@
 #include <utils/StrongPointer.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
+#include "IOProfile.h"
 #include "Serializer.h"
 #include "TypeConverter.h"
 
@@ -196,7 +197,6 @@
 
     struct Attributes
     {
-        static constexpr const char *speakerDrcEnabled = "speaker_drc_enabled";
         static constexpr const char *callScreenModeSupported= "call_screen_mode_supported";
         static constexpr const char *engineLibrarySuffix = "engine_library";
     };
@@ -769,12 +769,7 @@
     for (const xmlNode *cur = root->xmlChildrenNode; cur != NULL; cur = cur->next) {
         if (!xmlStrcmp(cur->name, reinterpret_cast<const xmlChar*>(GlobalConfigTraits::tag))) {
             bool value;
-            std::string attr = getXmlAttribute(cur, Attributes::speakerDrcEnabled);
-            if (!attr.empty() &&
-                    convertTo<std::string, bool>(attr, value)) {
-                config->setSpeakerDrcEnabled(value);
-            }
-            attr = getXmlAttribute(cur, Attributes::callScreenModeSupported);
+            std::string attr = getXmlAttribute(cur, Attributes::callScreenModeSupported);
             if (!attr.empty() &&
                     convertTo<std::string, bool>(attr, value)) {
                 config->setCallScreenModeSupported(value);
@@ -907,7 +902,6 @@
 {
     PolicySerializer serializer;
     status_t status = serializer.deserialize(fileName, config);
-    if (status != OK) config->clear();
     return status;
 }
 
@@ -915,7 +909,6 @@
 {
     PolicySerializer serializer;
     status_t status = serializer.deserialize(fileName, config, true /*ignoreVendorExtensions*/);
-    if (status != OK) config->clear();
     return status;
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index c5b3546..8a44547 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -56,10 +56,12 @@
     MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET),
     MAKE_STRING_FROM_ENUM(RULE_MATCH_UID),
     MAKE_STRING_FROM_ENUM(RULE_MATCH_USERID),
+    MAKE_STRING_FROM_ENUM(RULE_MATCH_AUDIO_SESSION_ID),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_USAGE),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_UID),
     MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_USERID),
+    MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_AUDIO_SESSION_ID),
     TERMINATOR
 };
 
diff --git a/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
index e7908eb..c453dea 100644
--- a/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
@@ -12,6 +12,7 @@
         </mixPort>
         <!-- Le Audio Audio Ports -->
         <mixPort name="le audio output" role="source" />
+        <mixPort name="le audio broadcast output" role="source" />
         <mixPort name="le audio input" role="sink">
             <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                      samplingRates="8000 16000 24000 32000 44100 48000"
@@ -47,6 +48,7 @@
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
         <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
+        <devicePort tagName="BLE Broadcast Out" type="AUDIO_DEVICE_OUT_BLE_BROADCAST" role="sink"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -63,5 +65,7 @@
                sources="BLE Headset In"/>
         <route type="mix" sink="BLE Speaker Out"
                sources="le audio output"/>
+        <route type="mix" sink="BLE Broadcast Out"
+               sources="le audio broadcast output"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/config/surround_sound_configuration_aidl.xml b/services/audiopolicy/config/surround_sound_configuration_aidl.xml
new file mode 100644
index 0000000..cf15711
--- /dev/null
+++ b/services/audiopolicy/config/surround_sound_configuration_aidl.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<surroundSound>
+  <!-- Each of the listed formats gets an entry in Surround Settings dialog on TV devices.
+       There must be a corresponding Java ENCODING_... constant defined in AudioFormat.java,
+       and a display name defined in AudioFormat.toDisplayName. For the formats that don't
+       need a dedicated Surround Settings dialog entry, a subformats list has to be used. -->
+  <formats>
+    <format name="AUDIO_FORMAT_AC3" />
+    <format name="AUDIO_FORMAT_E_AC3" />
+    <format name="AUDIO_FORMAT_E_AC3_JOC" />
+    <format name="AUDIO_FORMAT_DOLBY_TRUEHD" />
+    <format name="AUDIO_FORMAT_DTS" />
+    <format name="AUDIO_FORMAT_DTS_HD" />
+    <format name="AUDIO_FORMAT_DTS_HD_MA" />
+    <format name="AUDIO_FORMAT_DTS_UHD" />
+    <format name="AUDIO_FORMAT_DTS_UHD_P2" />
+    <format name="AUDIO_FORMAT_AAC_LC" subformats="AUDIO_FORMAT_AAC_HE_V1 AUDIO_FORMAT_AAC_HE_V2 AUDIO_FORMAT_AAC_ELD AUDIO_FORMAT_AAC_XHE" />
+    <format name="AUDIO_FORMAT_AC4" />
+  </formats>
+</surroundSound>
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 7d21ae0..bac51f5 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -105,12 +105,15 @@
     status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
             const AudioDeviceTypeAddrVector &devices) override;
 
-    status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
+    status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) override;
+
+    status_t clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
 
     status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
             AudioDeviceTypeAddrVector &devices) const override;
 
-    engineConfig::ParsingResult loadAudioPolicyEngineConfig();
+    engineConfig::ParsingResult loadAudioPolicyEngineConfig(const std::string& xmlFilePath = "");
 
     const ProductStrategyMap &getProductStrategies() const { return mProductStrategies; }
 
@@ -162,6 +165,16 @@
 
     DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const override;
 
+    void initializeDeviceSelectionCache() override;
+
+    void updateDeviceSelectionCache() override;
+
+protected:
+    DeviceVector getPreferredAvailableDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+    DeviceVector getDisabledDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+
 private:
     /**
      * Get media devices as the given role
@@ -190,6 +203,28 @@
 
     /** current forced use configuration. */
     audio_policy_forced_cfg_t mForceUse[AUDIO_POLICY_FORCE_USE_CNT] = {};
+
+protected:
+    /**
+     * Set the device information for a given strategy.
+     *
+     * @param strategy the strategy to set devices information
+     * @param devices the devices selected for the strategy
+     */
+    virtual void setStrategyDevices(const sp<ProductStrategy>& /*strategy*/,
+                                    const DeviceVector& /*devices*/) {
+        // In EngineBase, do nothing. It is up to the actual engine to decide if it is needed to
+        // set devices information for the given strategy.
+    }
+
+    /**
+     * Get devices that will be used for the given product strategy.
+     *
+     * @param strategy the strategy to query
+     */
+    virtual DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const = 0;
+
+    DeviceStrategyMap mDevicesForStrategies;
 };
 
 } // namespace audio_policy
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 2aa2f9a..1593be0 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -24,7 +24,7 @@
 #include <vector>
 
 #include <HandleGenerator.h>
-#include <media/AudioAttributes.h>
+#include <media/VolumeGroupAttributes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
@@ -43,25 +43,20 @@
 class ProductStrategy : public virtual RefBase, private HandleGenerator<uint32_t>
 {
 private:
-    struct AudioAttributes {
-        audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
-        volume_group_t mVolumeGroup = VOLUME_GROUP_NONE;
-        audio_attributes_t mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
-    };
-
-    using AudioAttributesVector = std::vector<AudioAttributes>;
+    using VolumeGroupAttributesVector = std::vector<VolumeGroupAttributes>;
 
 public:
     ProductStrategy(const std::string &name);
 
-    void addAttributes(const AudioAttributes &audioAttributes);
+    void addAttributes(const VolumeGroupAttributes &volumeGroupAttributes);
 
-    std::vector<android::AudioAttributes> listAudioAttributes() const;
+    std::vector<android::VolumeGroupAttributes> listVolumeGroupAttributes() const;
 
     std::string getName() const { return mName; }
     AttributesVector getAudioAttributes() const;
     product_strategy_t getId() const { return mId; }
     StreamTypeVector getSupportedStreams() const;
+    VolumeGroupAttributesVector getVolumeGroupAttributes() const { return mAttributesVector; }
 
     /**
      * @brief matches checks if the given audio attributes shall follow the strategy.
@@ -69,9 +64,9 @@
      *        If only the usage is available, the check is performed on the usages of the given
      *        attributes, otherwise all fields must match.
      * @param attributes to consider
-     * @return true if attributes matches with the strategy, false otherwise.
+     * @return matching score, negative value if no match.
      */
-    bool matches(const audio_attributes_t attributes) const;
+    int matchesScore(const audio_attributes_t attributes) const;
 
     bool supportStreamType(const audio_stream_type_t &streamType) const;
 
@@ -90,9 +85,6 @@
     DeviceTypeSet getDeviceTypes() const { return mApplicableDevices; }
 
     audio_attributes_t getAttributesForStreamType(audio_stream_type_t stream) const;
-    audio_stream_type_t getStreamTypeForAttributes(const audio_attributes_t &attr) const;
-
-    volume_group_t getVolumeGroupForAttributes(const audio_attributes_t &attr) const;
 
     volume_group_t getVolumeGroupForStreamType(audio_stream_type_t stream) const;
 
@@ -105,7 +97,7 @@
 private:
     std::string mName;
 
-    AudioAttributesVector mAttributesVector;
+    VolumeGroupAttributesVector mAttributesVector;
 
     product_strategy_t mId;
 
@@ -167,6 +159,9 @@
     void dump(String8 *dst, int spaces = 0) const;
 
 private:
+    VolumeGroupAttributes getVolumeGroupAttributesForAttributes(
+            const audio_attributes_t &attr, bool fallbackOnDefault = true) const;
+
     product_strategy_t mDefaultStrategy = PRODUCT_STRATEGY_NONE;
 };
 
diff --git a/services/audiopolicy/engine/common/include/VolumeGroup.h b/services/audiopolicy/engine/common/include/VolumeGroup.h
index 5378f64..f40ab1c 100644
--- a/services/audiopolicy/engine/common/include/VolumeGroup.h
+++ b/services/audiopolicy/engine/common/include/VolumeGroup.h
@@ -39,7 +39,7 @@
     VolumeCurves *getVolumeCurves() { return &mGroupVolumeCurves; }
 
     void addSupportedAttributes(const audio_attributes_t &attr);
-    AttributesVector getSupportedAttributes() const { return mGroupVolumeCurves.getAttributes(); }
+    AttributesVector getSupportedAttributes() const;
 
     void addSupportedStream(audio_stream_type_t stream);
     StreamTypeVector getStreamTypes() const { return mGroupVolumeCurves.getStreamTypes(); }
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 99507ee..7d6a308 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -115,7 +115,7 @@
     return PRODUCT_STRATEGY_NONE;
 }
 
-engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig()
+engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
 {
     auto loadVolumeConfig = [](auto &volumeGroups, auto &volumeConfig) {
         // Ensure name unicity to prevent duplicate
@@ -145,7 +145,7 @@
     };
     auto addSupportedAttributesToGroup = [](auto &group, auto &volumeGroup, auto &strategy) {
         for (const auto &attr : group.attributesVect) {
-            strategy->addAttributes({group.stream, volumeGroup->getId(), attr});
+            strategy->addAttributes({volumeGroup->getId(), group.stream, attr});
             volumeGroup->addSupportedAttributes(attr);
         }
     };
@@ -163,8 +163,9 @@
         return stat(path, &fileStat) == 0 && S_ISREG(fileStat.st_mode);
     };
 
-    auto result = fileExists(engineConfig::DEFAULT_PATH) ?
-            engineConfig::parse(engineConfig::DEFAULT_PATH) : engineConfig::ParsingResult{};
+    const std::string filePath = xmlFilePath.empty() ? engineConfig::DEFAULT_PATH : xmlFilePath;
+    auto result = fileExists(filePath.c_str()) ?
+            engineConfig::parse(filePath.c_str()) : engineConfig::ParsingResult{};
     if (result.parsedConfig == nullptr) {
         ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
         engineConfig::Config config = gDefaultEngineConfig;
@@ -284,7 +285,7 @@
     for (const auto &iter : mProductStrategies) {
         const auto &productStrategy = iter.second;
         strategies.push_back(
-        {productStrategy->getName(), productStrategy->listAudioAttributes(),
+        {productStrategy->getName(), productStrategy->listVolumeGroupAttributes(),
          productStrategy->getId()});
     }
     return NO_ERROR;
@@ -369,13 +370,11 @@
     }
 
     switch (role) {
-    case DEVICE_ROLE_PREFERRED:
-    case DEVICE_ROLE_DISABLED: {
+    case DEVICE_ROLE_PREFERRED: {
         tDevicesRoleMap[std::make_pair(t, role)] = devices;
         // The preferred devices and disabled devices are mutually exclusive. Once a device is added
         // the a list, it must be removed from the other one.
-        const device_role_t roleToRemove = role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED
-                                                                         : DEVICE_ROLE_PREFERRED;
+        const device_role_t roleToRemove = DEVICE_ROLE_DISABLED;
         auto it = tDevicesRoleMap.find(std::make_pair(t, roleToRemove));
         if (it != tDevicesRoleMap.end()) {
             it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
@@ -384,6 +383,25 @@
             }
         }
     } break;
+    case DEVICE_ROLE_DISABLED: {
+        auto it = tDevicesRoleMap.find(std::make_pair(t, role));
+        if (it != tDevicesRoleMap.end()) {
+            it->second = joinDeviceTypeAddrs(it->second, devices);
+        } else {
+            tDevicesRoleMap[std::make_pair(t, role)] = devices;
+        }
+
+        // The preferred devices and disabled devices are mutually exclusive. Once a device is added
+        // the a list, it must be removed from the other one.
+        const device_role_t roleToRemove = DEVICE_ROLE_PREFERRED;
+        it = tDevicesRoleMap.find(std::make_pair(t, roleToRemove));
+        if (it != tDevicesRoleMap.end()) {
+            it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
+            if (it->second.empty()) {
+                tDevicesRoleMap.erase(it);
+            }
+        }
+    } break;
     case DEVICE_ROLE_NONE:
         // Intentionally fall-through as it is no need to set device role as none for a strategy.
     default:
@@ -394,6 +412,36 @@
 }
 
 template <typename T>
+status_t removeDevicesRoleForT(
+        std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+        T t, device_role_t role, const AudioDeviceTypeAddrVector &devices,
+        const std::string& logStr, std::function<bool(T)> p) {
+    if (!p(t)) {
+        ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
+        return BAD_VALUE;
+    }
+
+    switch (role) {
+    case DEVICE_ROLE_PREFERRED:
+    case DEVICE_ROLE_DISABLED: {
+        auto it = tDevicesRoleMap.find(std::make_pair(t, role));
+        if (it != tDevicesRoleMap.end()) {
+            it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
+            if (it->second.empty()) {
+                tDevicesRoleMap.erase(it);
+            }
+        }
+    } break;
+    case DEVICE_ROLE_NONE:
+        // Intentionally fall-through as it is not needed to set device role as none for a strategy.
+    default:
+        ALOGE("%s invalid role %d", __func__, role);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+template <typename T>
 status_t removeAllDevicesRoleForT(
         std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
         T t, device_role_t role, const std::string& logStr, std::function<bool(T)> p) {
@@ -462,7 +510,18 @@
             mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
 }
 
-status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices)
+{
+    std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+        return mProductStrategies.find(strategy) != mProductStrategies.end();
+    };
+    return removeDevicesRoleForT(
+            mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::clearDevicesRoleForStrategy(product_strategy_t strategy,
+            device_role_t role)
 {
     std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
         return mProductStrategies.find(strategy) != mProductStrategies.end();
@@ -627,6 +686,58 @@
     return activeDevices;
 }
 
+void EngineBase::initializeDeviceSelectionCache() {
+    // Initializing the device selection cache with default device won't be harmful, it will be
+    // updated after the audio modules are initialized.
+    auto defaultDevices = DeviceVector(getApmObserver()->getDefaultOutputDevice());
+    for (const auto &iter : getProductStrategies()) {
+        const auto &strategy = iter.second;
+        mDevicesForStrategies[strategy->getId()] = defaultDevices;
+        setStrategyDevices(strategy, defaultDevices);
+    }
+}
+
+void EngineBase::updateDeviceSelectionCache() {
+    for (const auto &iter : getProductStrategies()) {
+        const auto& strategy = iter.second;
+        auto devices = getDevicesForProductStrategy(strategy->getId());
+        mDevicesForStrategies[strategy->getId()] = devices;
+        setStrategyDevices(strategy, devices);
+    }
+}
+
+DeviceVector EngineBase::getPreferredAvailableDevicesForProductStrategy(
+        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
+    DeviceVector preferredAvailableDevVec = {};
+    AudioDeviceTypeAddrVector preferredStrategyDevices;
+    const status_t status = getDevicesForRoleAndStrategy(
+            strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
+    if (status == NO_ERROR) {
+        // there is a preferred device, is it available?
+        preferredAvailableDevVec =
+                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
+        if (preferredAvailableDevVec.size() == preferredStrategyDevices.size()) {
+            ALOGV("%s using pref device %s for strategy %u",
+                   __func__, preferredAvailableDevVec.toString().c_str(), strategy);
+            return preferredAvailableDevVec;
+        }
+    }
+    return preferredAvailableDevVec;
+}
+
+DeviceVector EngineBase::getDisabledDevicesForProductStrategy(
+        const DeviceVector &availableOutputDevices, product_strategy_t strategy) const {
+    DeviceVector disabledDevices = {};
+    AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+    const status_t status = getDevicesForRoleAndStrategy(
+            strategy, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+    if (status == NO_ERROR) {
+        disabledDevices =
+                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+    }
+    return disabledDevices;
+}
+
 void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
 {
     dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index b036e12..548a20d 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -16,6 +16,8 @@
 
 #pragma once
 
+#include <EngineConfig.h>
+
 #include <system/audio.h>
 
 namespace android {
@@ -25,11 +27,11 @@
 const engineConfig::ProductStrategies gOrderedStrategies = {
     {"STRATEGY_PHONE",
      {
-         {"phone", AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
+         {AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_NONE, ""}},
          },
-         {"sco", AUDIO_STREAM_BLUETOOTH_SCO, "AUDIO_STREAM_BLUETOOTH_SCO",
+         {AUDIO_STREAM_BLUETOOTH_SCO, "AUDIO_STREAM_BLUETOOTH_SCO",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_SCO,
             ""}},
          }
@@ -37,11 +39,11 @@
     },
     {"STRATEGY_SONIFICATION",
      {
-         {"ring", AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
+         {AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
-         {"alarm", AUDIO_STREAM_ALARM, "AUDIO_STREAM_ALARM",
+         {AUDIO_STREAM_ALARM, "AUDIO_STREAM_ALARM",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_NONE, ""}},
          }
@@ -49,7 +51,7 @@
     },
     {"STRATEGY_ENFORCED_AUDIBLE",
      {
-         {"", AUDIO_STREAM_ENFORCED_AUDIBLE, "AUDIO_STREAM_ENFORCED_AUDIBLE",
+         {AUDIO_STREAM_ENFORCED_AUDIBLE, "AUDIO_STREAM_ENFORCED_AUDIBLE",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_AUDIBILITY_ENFORCED, ""}}
          }
@@ -57,7 +59,7 @@
     },
     {"STRATEGY_ACCESSIBILITY",
      {
-         {"", AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
+         {AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
@@ -65,7 +67,7 @@
     },
     {"STRATEGY_SONIFICATION_RESPECTFUL",
      {
-         {"", AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
+         {AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
                AUDIO_FLAG_NONE, ""},
@@ -77,11 +79,11 @@
     },
     {"STRATEGY_MEDIA",
      {
-         {"assistant", AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
+         {AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          },
-         {"music", AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
+         {AUDIO_STREAM_MUSIC, "AUDIO_STREAM_MUSIC",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT,
                AUDIO_FLAG_NONE, ""},
@@ -95,7 +97,7 @@
                AUDIO_FLAG_NONE, ""}
           },
          },
-         {"system", AUDIO_STREAM_SYSTEM, "AUDIO_STREAM_SYSTEM",
+         {AUDIO_STREAM_SYSTEM, "AUDIO_STREAM_SYSTEM",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_SONIFICATION,
             AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}}
          }
@@ -103,7 +105,7 @@
     },
     {"STRATEGY_DTMF",
      {
-         {"", AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
+         {AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
@@ -113,7 +115,7 @@
     },
     {"STRATEGY_CALL_ASSISTANT",
      {
-         {"", AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
+         {AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT,
             AUDIO_FLAG_NONE, ""}}
          }
@@ -121,7 +123,7 @@
     },
     {"STRATEGY_TRANSMITTED_THROUGH_SPEAKER",
      {
-         {"", AUDIO_STREAM_TTS, "AUDIO_STREAM_TTS",
+         {AUDIO_STREAM_TTS, "AUDIO_STREAM_TTS",
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
                 AUDIO_FLAG_BEACON, ""},
@@ -138,19 +140,19 @@
  * For compatibility reason why apm volume config file, volume group name is the stream type.
  */
 const engineConfig::ProductStrategies gOrderedSystemStrategies = {
-    {"rerouting",
+    {"STRATEGY_REROUTING",
      {
-         {"", AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
+         {AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
-            AUDIO_FLAG_NONE, ""}}
+            AUDIO_FLAG_NONE, AUDIO_TAG_APM_RESERVED_INTERNAL}}
          }
      },
     },
-    {"patch",
+    {"STRATEGY_PATCH",
      {
-         {"", AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
+         {AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
-            AUDIO_FLAG_NONE, ""}}
+            AUDIO_FLAG_NONE, AUDIO_TAG_APM_RESERVED_INTERNAL}}
          }
      },
     }
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index fbfcf72..1d3ad1c 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -36,16 +36,16 @@
 {
 }
 
-void ProductStrategy::addAttributes(const AudioAttributes &audioAttributes)
+void ProductStrategy::addAttributes(const VolumeGroupAttributes &volumeGroupAttributes)
 {
-    mAttributesVector.push_back(audioAttributes);
+    mAttributesVector.push_back(volumeGroupAttributes);
 }
 
-std::vector<android::AudioAttributes> ProductStrategy::listAudioAttributes() const
+std::vector<android::VolumeGroupAttributes> ProductStrategy::listVolumeGroupAttributes() const
 {
-    std::vector<android::AudioAttributes> androidAa;
+    std::vector<android::VolumeGroupAttributes> androidAa;
     for (const auto &attr : mAttributesVector) {
-        androidAa.push_back({attr.mVolumeGroup, attr.mStream, attr.mAttributes});
+        androidAa.push_back({attr.getGroupId(), attr.getStreamType(), attr.getAttributes()});
     }
     return androidAa;
 }
@@ -54,7 +54,7 @@
 {
     AttributesVector attrVector;
     for (const auto &attrGroup : mAttributesVector) {
-        attrVector.push_back(attrGroup.mAttributes);
+        attrVector.push_back(attrGroup.getAttributes());
     }
     if (not attrVector.empty()) {
         return attrVector;
@@ -62,52 +62,40 @@
     return { AUDIO_ATTRIBUTES_INITIALIZER };
 }
 
-bool ProductStrategy::matches(const audio_attributes_t attr) const
+int ProductStrategy::matchesScore(const audio_attributes_t attr) const
 {
-    return std::find_if(begin(mAttributesVector), end(mAttributesVector),
-                        [&attr](const auto &supportedAttr) {
-        return AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr);
-    }) != end(mAttributesVector);
-}
-
-audio_stream_type_t ProductStrategy::getStreamTypeForAttributes(
-        const audio_attributes_t &attr) const
-{
-    const auto &iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
-                                   [&attr](const auto &supportedAttr) {
-        return AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr); });
-    if (iter == end(mAttributesVector)) {
-        return AUDIO_STREAM_DEFAULT;
+    int strategyScore = AudioProductStrategy::NO_MATCH;
+    for (const auto &attrGroup : mAttributesVector) {
+        int score = AudioProductStrategy::attributesMatchesScore(attrGroup.getAttributes(), attr);
+        if (score == AudioProductStrategy::MATCH_EQUALS) {
+            return score;
+        }
+        strategyScore = std::max(score, strategyScore);
     }
-    audio_stream_type_t streamType = iter->mStream;
-    ALOGW_IF(streamType == AUDIO_STREAM_DEFAULT,
-             "%s: Strategy %s supporting attributes %s has not stream type associated"
-             "fallback on MUSIC. Do not use stream volume API", __func__, mName.c_str(),
-             toString(attr).c_str());
-    return streamType != AUDIO_STREAM_DEFAULT ? streamType : AUDIO_STREAM_MUSIC;
+    return strategyScore;
 }
 
 audio_attributes_t ProductStrategy::getAttributesForStreamType(audio_stream_type_t streamType) const
 {
     const auto iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
                                    [&streamType](const auto &supportedAttr) {
-        return supportedAttr.mStream == streamType; });
-    return iter != end(mAttributesVector) ? iter->mAttributes : AUDIO_ATTRIBUTES_INITIALIZER;
+        return supportedAttr.getStreamType() == streamType; });
+    return iter != end(mAttributesVector) ? iter->getAttributes() : AUDIO_ATTRIBUTES_INITIALIZER;
 }
 
 bool ProductStrategy::isDefault() const
 {
     return std::find_if(begin(mAttributesVector), end(mAttributesVector), [](const auto &attr) {
-        return attr.mAttributes == defaultAttr; }) != end(mAttributesVector);
+        return attr.getAttributes() == defaultAttr; }) != end(mAttributesVector);
 }
 
 StreamTypeVector ProductStrategy::getSupportedStreams() const
 {
     StreamTypeVector streams;
     for (const auto &supportedAttr : mAttributesVector) {
-        if (std::find(begin(streams), end(streams), supportedAttr.mStream) == end(streams) &&
-                supportedAttr.mStream != AUDIO_STREAM_DEFAULT) {
-            streams.push_back(supportedAttr.mStream);
+        if (std::find(begin(streams), end(streams), supportedAttr.getStreamType())
+                == end(streams) && supportedAttr.getStreamType() != AUDIO_STREAM_DEFAULT) {
+            streams.push_back(supportedAttr.getStreamType());
         }
     }
     return streams;
@@ -117,24 +105,14 @@
 {
     return std::find_if(begin(mAttributesVector), end(mAttributesVector),
                         [&streamType](const auto &supportedAttr) {
-        return supportedAttr.mStream == streamType; }) != end(mAttributesVector);
-}
-
-volume_group_t ProductStrategy::getVolumeGroupForAttributes(const audio_attributes_t &attr) const
-{
-    for (const auto &supportedAttr : mAttributesVector) {
-        if (AudioProductStrategy::attributesMatches(supportedAttr.mAttributes, attr)) {
-            return supportedAttr.mVolumeGroup;
-        }
-    }
-    return VOLUME_GROUP_NONE;
+        return supportedAttr.getStreamType() == streamType; }) != end(mAttributesVector);
 }
 
 volume_group_t ProductStrategy::getVolumeGroupForStreamType(audio_stream_type_t stream) const
 {
     for (const auto &supportedAttr : mAttributesVector) {
-        if (supportedAttr.mStream == stream) {
-            return supportedAttr.mVolumeGroup;
+        if (supportedAttr.getStreamType() == stream) {
+            return supportedAttr.getGroupId();
         }
     }
     return VOLUME_GROUP_NONE;
@@ -143,8 +121,10 @@
 volume_group_t ProductStrategy::getDefaultVolumeGroup() const
 {
     const auto &iter = std::find_if(begin(mAttributesVector), end(mAttributesVector),
-                                    [](const auto &attr) {return attr.mAttributes == defaultAttr;});
-    return iter != end(mAttributesVector) ? iter->mVolumeGroup : VOLUME_GROUP_NONE;
+                                    [](const auto &attr) {
+        return attr.getAttributes() == defaultAttr;
+    });
+    return iter != end(mAttributesVector) ? iter->getGroupId() : VOLUME_GROUP_NONE;
 }
 
 void ProductStrategy::dump(String8 *dst, int spaces) const
@@ -155,26 +135,32 @@
                        deviceLiteral.c_str(), mDeviceAddress.c_str());
 
     for (const auto &attr : mAttributesVector) {
-        dst->appendFormat("%*sGroup: %d stream: %s\n", spaces + 3, "", attr.mVolumeGroup,
-                          android::toString(attr.mStream).c_str());
+        dst->appendFormat("%*sGroup: %d stream: %s\n", spaces + 3, "", attr.getGroupId(),
+                          android::toString(attr.getStreamType()).c_str());
         dst->appendFormat("%*s Attributes: ", spaces + 3, "");
-        std::string attStr =
-                attr.mAttributes == defaultAttr ? "{ Any }" : android::toString(attr.mAttributes);
+        std::string attStr = attr.getAttributes() == defaultAttr ?
+                "{ Any }" : android::toString(attr.getAttributes());
         dst->appendFormat("%s\n", attStr.c_str());
     }
 }
 
 product_strategy_t ProductStrategyMap::getProductStrategyForAttributes(
-        const audio_attributes_t &attr, bool fallbackOnDefault) const
+        const audio_attributes_t &attributes, bool fallbackOnDefault) const
 {
+    product_strategy_t bestStrategyOrdefault = PRODUCT_STRATEGY_NONE;
+    int matchScore = AudioProductStrategy::NO_MATCH;
     for (const auto &iter : *this) {
-        if (iter.second->matches(attr)) {
+        int score = iter.second->matchesScore(attributes);
+        if (score == AudioProductStrategy::MATCH_EQUALS) {
             return iter.second->getId();
         }
+        if (score > matchScore) {
+            bestStrategyOrdefault = iter.second->getId();;
+            matchScore = score;
+        }
     }
-    ALOGV("%s: No matching product strategy for attributes %s, return default", __FUNCTION__,
-          toString(attr).c_str());
-    return fallbackOnDefault? getDefault() : PRODUCT_STRATEGY_NONE;
+    return (matchScore != AudioProductStrategy::MATCH_ON_DEFAULT_SCORE || fallbackOnDefault) ?
+            bestStrategyOrdefault : PRODUCT_STRATEGY_NONE;
 }
 
 audio_attributes_t ProductStrategyMap::getAttributesForStreamType(audio_stream_type_t stream) const
@@ -190,20 +176,6 @@
     return {};
 }
 
-audio_stream_type_t ProductStrategyMap::getStreamTypeForAttributes(
-        const audio_attributes_t &attr) const
-{
-    for (const auto &iter : *this) {
-        audio_stream_type_t stream = iter.second->getStreamTypeForAttributes(attr);
-        if (stream != AUDIO_STREAM_DEFAULT) {
-            return stream;
-        }
-    }
-    ALOGV("%s: No product strategy for attributes %s, using default (aka MUSIC)", __FUNCTION__,
-          toString(attr).c_str());
-    return  AUDIO_STREAM_MUSIC;
-}
-
 product_strategy_t ProductStrategyMap::getDefault() const
 {
     if (mDefaultStrategy != PRODUCT_STRATEGY_NONE) {
@@ -268,16 +240,39 @@
     return at(psId)->getDeviceAddress();
 }
 
+VolumeGroupAttributes ProductStrategyMap::getVolumeGroupAttributesForAttributes(
+        const audio_attributes_t &attr, bool fallbackOnDefault) const
+{
+    int matchScore = AudioProductStrategy::NO_MATCH;
+    VolumeGroupAttributes bestVolumeGroupAttributes = {};
+    for (const auto &iter : *this) {
+        for (const auto &volGroupAttr : iter.second->getVolumeGroupAttributes()) {
+            int score = volGroupAttr.matchesScore(attr);
+            if (score == AudioProductStrategy::MATCH_EQUALS) {
+                return volGroupAttr;
+            }
+            if (score > matchScore) {
+                matchScore = score;
+                bestVolumeGroupAttributes = volGroupAttr;
+            }
+        }
+    }
+    return (matchScore != AudioProductStrategy::MATCH_ON_DEFAULT_SCORE || fallbackOnDefault) ?
+            bestVolumeGroupAttributes : VolumeGroupAttributes();
+}
+
+audio_stream_type_t ProductStrategyMap::getStreamTypeForAttributes(
+        const audio_attributes_t &attr) const
+{
+    audio_stream_type_t streamType = getVolumeGroupAttributesForAttributes(
+            attr, /* fallbackOnDefault= */ true).getStreamType();
+    return streamType != AUDIO_STREAM_DEFAULT ? streamType : AUDIO_STREAM_MUSIC;
+}
+
 volume_group_t ProductStrategyMap::getVolumeGroupForAttributes(
         const audio_attributes_t &attr, bool fallbackOnDefault) const
 {
-    for (const auto &iter : *this) {
-        volume_group_t group = iter.second->getVolumeGroupForAttributes(attr);
-        if (group != VOLUME_GROUP_NONE) {
-            return group;
-        }
-    }
-    return fallbackOnDefault ? getDefaultVolumeGroup() : VOLUME_GROUP_NONE;
+    return getVolumeGroupAttributesForAttributes(attr, fallbackOnDefault).getGroupId();
 }
 
 volume_group_t ProductStrategyMap::getVolumeGroupForStreamType(
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index 8aa4b08..fccbc60 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -52,7 +52,7 @@
             volIdx = volIndexMin;
         } else {
             // This would result in a divide-by-zero below
-            ALOG_ASSERT(volIndexmin != volIndexMax, "Invalid volume index range & value: 0");
+            ALOG_ASSERT(volIndexMin != volIndexMax, "Invalid volume index range & value: 0");
             return NAN;
         }
     } else {
diff --git a/services/audiopolicy/engine/common/src/VolumeGroup.cpp b/services/audiopolicy/engine/common/src/VolumeGroup.cpp
index e189807..f5ffbba 100644
--- a/services/audiopolicy/engine/common/src/VolumeGroup.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeGroup.cpp
@@ -37,6 +37,17 @@
 {
 }
 
+// Used for introspection, e.g. JAVA
+AttributesVector VolumeGroup::getSupportedAttributes() const
+{
+    AttributesVector supportedAttributes = {};
+    for (auto &aa : mGroupVolumeCurves.getAttributes()) {
+        aa.source = AUDIO_SOURCE_INVALID;
+        supportedAttributes.push_back(aa);
+    }
+    return supportedAttributes;
+}
+
 void VolumeGroup::dump(String8 *dst, int spaces) const
 {
     dst->appendFormat("\n%*s-%s (id: %d)\n", spaces, "", mName.c_str(), mId);
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 2ebb7df..4de16c5 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -25,6 +25,12 @@
 struct _xmlNode;
 struct _xmlDoc;
 
+/**
+ * AudioAttributes custom tag to identify internal strategies, whose volumes are exclusively
+ * controlled by AudioPolicyManager
+ */
+#define AUDIO_TAG_APM_RESERVED_INTERNAL "reserved_internal_strategy"
+
 namespace android {
 namespace engineConfig {
 
@@ -35,7 +41,6 @@
 using StreamVector = std::vector<audio_stream_type_t>;
 
 struct AttributesGroup {
-    std::string name;
     audio_stream_type_t stream;
     std::string volumeGroup;
     AttributesVector attributesVect;
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 6f560d5..ac117f0 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -57,7 +57,6 @@
     static constexpr const char *collectionTag = "AttributesGroups";
 
     struct Attributes {
-        static constexpr const char *name = "name";
         static constexpr const char *streamType = "streamType";
         static constexpr const char *volumeGroup = "volumeGroup";
     };
@@ -313,12 +312,6 @@
 status_t AttributesGroupTraits::deserialize(_xmlDoc *doc, const _xmlNode *child,
                                             Collection &attributesGroup)
 {
-    std::string name = getXmlAttribute(child, Attributes::name);
-    if (name.empty()) {
-        ALOGV("AttributesGroupTraits No attribute %s found", Attributes::name);
-    }
-    ALOGV("%s: %s = %s", __FUNCTION__, Attributes::name, name.c_str());
-
     std::string volumeGroup = getXmlAttribute(child, Attributes::volumeGroup);
     if (volumeGroup.empty()) {
         ALOGE("%s: No attribute %s found", __FUNCTION__, Attributes::volumeGroup);
@@ -339,7 +332,7 @@
     AttributesVector attributesVect;
     deserializeAttributesCollection(doc, child, attributesVect);
 
-    attributesGroup.push_back({name, streamType, volumeGroup, attributesVect});
+    attributesGroup.push_back({streamType, volumeGroup, attributesVect});
     return NO_ERROR;
 }
 
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index 518f86e..dd7ac1a 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -16,6 +16,7 @@
 
 #pragma once
 
+#include <string>
 #include <utility>
 
 #include <AudioPolicyManagerObserver.h>
@@ -46,6 +47,13 @@
 {
 public:
     /**
+     * Loads the engine configuration from the specified or the default config file.
+     * If loading failed, tries to fall back to some default configuration. If fallback
+     * is impossible, returns an error.
+     */
+    virtual status_t loadFromXmlConfigWithFallback(const std::string& xmlFilePath = "") = 0;
+
+    /**
      * Checks if the engine was correctly initialized.
      *
      * @return NO_ERROR if initialization has been done correctly, error code otherwise..
@@ -173,10 +181,11 @@
      * @param[out] mix to be used if a mix has been installed for the given audio attributes.
      * @return selected input device for the audio attributes, may be null if error.
      */
-    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
-                                                             uid_t uid = 0,
-                                                             sp<AudioPolicyMix> *mix = nullptr)
-                                                             const = 0;
+    virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
+            const audio_attributes_t &attr,
+            uid_t uid = 0,
+            audio_session_t session = AUDIO_SESSION_NONE,
+            sp<AudioPolicyMix> *mix = nullptr) const = 0;
 
     /**
      * Get the legacy stream type for a given audio attributes.
@@ -325,10 +334,22 @@
      * for the given strategy
      * @param strategy the audio strategy whose routing will be affected
      * @param role the role of the devices for strategy
+     * @param devices the audio devices to be removed
      * @return BAD_VALUE if the strategy or role is invalid,
      *     or NO_ERROR if the devices for this role was removed
      */
-    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+    virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+            const AudioDeviceTypeAddrVector &devices) = 0;
+
+    /**
+     * @brief clearDevicesRoleForStrategy removes the role of all devices previously set
+     * for the given strategy
+     * @param strategy the audio strategy whose routing will be affected
+     * @param role the role of the devices for strategy
+     * @return BAD_VALUE if the strategy or role is invalid,
+     *     or NO_ERROR if the devices for this role was removed
+     */
+    virtual status_t clearDevicesRoleForStrategy(product_strategy_t strategy,
             device_role_t role) = 0;
 
     /**
@@ -421,6 +442,16 @@
      */
     virtual DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const = 0;
 
+    /**
+     * @brief initializeDeviceSelectionCache. Device selection for AudioAttribute / Streams is
+     * cached in the engine in order to speed up process when the audio system is stable. When the
+     * audio system is initializing, not all audio devices information will be available. In that
+     * case, calling this function can allow the engine to initialize the device selection cache
+     * with default values.
+     * This must only be called when audio policy manager is initializing.
+     */
+    virtual void initializeDeviceSelectionCache() = 0;
+
     virtual void dump(String8 *dst) const = 0;
 
 protected:
diff --git a/services/audiopolicy/engineconfigurable/src/Collection.h b/services/audiopolicy/engineconfigurable/src/Collection.h
index 02b41cb..4640515 100644
--- a/services/audiopolicy/engineconfigurable/src/Collection.h
+++ b/services/audiopolicy/engineconfigurable/src/Collection.h
@@ -53,6 +53,10 @@
     {
         collectionSupported();
     }
+    ~Collection()
+    {
+        clear();
+    }
 
     /**
      * Add a policy element to the collection. Policy elements are streams, strategies, input
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 3d74920..2eb0177 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -68,16 +68,15 @@
 
 Engine::Engine() : mPolicyParameterMgr(new ParameterManagerWrapper())
 {
-    status_t loadResult = loadAudioPolicyEngineConfig();
+}
+
+status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath)
+{
+    status_t loadResult = loadAudioPolicyEngineConfig(xmlFilePath);
     if (loadResult < 0) {
         ALOGE("Policy Engine configuration is invalid.");
     }
-}
-
-Engine::~Engine()
-{
-    mStreamCollection.clear();
-    mInputSourceCollection.clear();
+    return loadResult;
 }
 
 status_t Engine::initCheck()
@@ -93,7 +92,7 @@
 template <typename Key>
 Element<Key> *Engine::getFromCollection(const Key &key) const
 {
-    const Collection<Key> collection = getCollection<Key>();
+    const Collection<Key> &collection = getCollection<Key>();
     return collection.get(key);
 }
 
@@ -165,6 +164,21 @@
     return mPolicyParameterMgr->getForceUse(usage);
 }
 
+status_t Engine::setOutputDevicesConnectionState(const DeviceVector &devices,
+                                                 audio_policy_dev_state_t state)
+{
+    for (const auto &device : devices) {
+        mPolicyParameterMgr->setDeviceConnectionState(device->type(), device->address(), state);
+    }
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
+        availableOutputDevices.remove(devices);
+    } else {
+        availableOutputDevices.add(devices);
+    }
+    return mPolicyParameterMgr->setAvailableOutputDevices(availableOutputDevices.types());
+}
+
 status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
                                           audio_policy_dev_state_t state)
 {
@@ -179,9 +193,9 @@
     return EngineBase::setDeviceConnectionState(device, state);
 }
 
-status_t Engine::loadAudioPolicyEngineConfig()
+status_t Engine::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
 {
-    auto result = EngineBase::loadAudioPolicyEngineConfig();
+    auto result = EngineBase::loadAudioPolicyEngineConfig(xmlFilePath);
 
     // Custom XML Parsing
     auto loadCriteria= [this](const auto& configCriteria, const auto& configCriterionTypes) {
@@ -205,17 +219,126 @@
     return result.nbSkippedElement == 0? NO_ERROR : BAD_VALUE;
 }
 
+status_t Engine::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+                                           const AudioDeviceTypeAddrVector &devices)
+{
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector prevDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    status_t status = EngineBase::setDevicesRoleForStrategy(strategy, role, devices);
+    if (status != NO_ERROR) {
+        return status;
+    }
+    DeviceVector newDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    if (role == DEVICE_ROLE_PREFERRED) {
+        DeviceVector reenabledDevices = prevDisabledDevices;
+        reenabledDevices.remove(newDisabledDevices);
+        if (reenabledDevices.empty()) {
+            ALOGD("%s DEVICE_ROLE_PREFERRED empty renabled devices", __func__);
+            return status;
+        }
+        // some devices were moved from disabled to preferred, need to force a resync for these
+        enableDevicesForStrategy(strategy, prevDisabledDevices);
+    }
+    if (newDisabledDevices.empty()) {
+        return status;
+    }
+    return disableDevicesForStrategy(strategy, newDisabledDevices);
+}
+
+status_t Engine::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+        const AudioDeviceTypeAddrVector &devices)
+{
+    const auto productStrategies = getProductStrategies();
+    if (productStrategies.find(strategy) == end(productStrategies)) {
+        ALOGE("%s invalid %d", __func__, strategy);
+        return BAD_VALUE;
+    }
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector prevDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    status_t status = EngineBase::removeDevicesRoleForStrategy(strategy, role, devices);
+    if (status != NO_ERROR || role == DEVICE_ROLE_PREFERRED) {
+        return status;
+    }
+    // Removing ROLE_DISABLED for given devices, need to force a resync for these
+    enableDevicesForStrategy(strategy, prevDisabledDevices);
+
+    DeviceVector remainingDisabledDevices = getDisabledDevicesForProductStrategy(
+            availableOutputDevices, strategy);
+    if (remainingDisabledDevices.empty()) {
+        return status;
+    }
+    return disableDevicesForStrategy(strategy, remainingDisabledDevices);
+}
+
+status_t Engine::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+{
+    const auto productStrategies = getProductStrategies();
+    if (productStrategies.find(strategy) == end(productStrategies)) {
+        ALOGE("%s invalid %d", __func__, strategy);
+        return BAD_VALUE;
+    }
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector prevDisabledDevices =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    status_t status = EngineBase::clearDevicesRoleForStrategy(strategy, role);
+    if (status != NO_ERROR || role == DEVICE_ROLE_PREFERRED || prevDisabledDevices.empty()) {
+        return status;
+    }
+    // Disabled devices were removed, need to force a resync for these
+    enableDevicesForStrategy(strategy, prevDisabledDevices);
+    return NO_ERROR;
+}
+
+void Engine::enableDevicesForStrategy(product_strategy_t strategy __unused,
+        const DeviceVector &devicesToEnable) {
+    // devices were (re)enabled, need to force a resync for these
+    setOutputDevicesConnectionState(devicesToEnable, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+    setOutputDevicesConnectionState(devicesToEnable, AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+}
+
+status_t Engine::disableDevicesForStrategy(product_strategy_t strategy,
+        const DeviceVector &devicesToDisable) {
+    // Filter out disabled devices for this strategy.
+    // However, to update the output device decision, availability criterion shall be updated,
+    // which may impact other strategies. So, as a WA, reconsider now and later to prevent from
+    // altering decision for other strategies;
+    setOutputDevicesConnectionState(devicesToDisable, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+
+    DeviceTypeSet deviceTypes = getProductStrategies().getDeviceTypesForProductStrategy(strategy);
+    const std::string address(getProductStrategies().getDeviceAddressForProductStrategy(strategy));
+
+    setOutputDevicesConnectionState(devicesToDisable, AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+
+    // Force reapply devices for given strategy
+    getProductStrategies().at(strategy)->setDeviceTypes(deviceTypes);
+    setDeviceAddressForProductStrategy(strategy, address);
+    return NO_ERROR;
+}
+
 DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t ps) const
 {
+    DeviceVector selectedDevices = {};
+    DeviceVector disabledDevices = {};
     const auto productStrategies = getProductStrategies();
     if (productStrategies.find(ps) == productStrategies.end()) {
         ALOGE("%s: Trying to get device on invalid strategy %d", __FUNCTION__, ps);
-        return {};
+        return selectedDevices;
     }
-    const DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+    DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
     const SwAudioOutputCollection &outputs = getApmObserver()->getOutputs();
     DeviceTypeSet availableOutputDevicesTypes = availableOutputDevices.types();
 
+    // check if this strategy has a preferred device that is available,
+    // if yes, give priority to it.
+    DeviceVector preferredAvailableDevVec =
+            getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps);
+    if (!preferredAvailableDevVec.isEmpty()) {
+        return preferredAvailableDevVec;
+    }
+
     /** This is the only case handled programmatically because the PFW is unable to know the
      * activity of streams.
      *
@@ -227,33 +350,34 @@
      * -When media is not playing anymore, fall back on the sonification behavior
      */
     DeviceTypeSet deviceTypes;
+    product_strategy_t psOrFallback = ps;
     if (ps == getProductStrategyForStream(AUDIO_STREAM_NOTIFICATION) &&
             !is_state_in_call(getPhoneState()) &&
             !outputs.isActiveRemotely(toVolumeSource(AUDIO_STREAM_MUSIC),
                                       SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY) &&
             outputs.isActive(toVolumeSource(AUDIO_STREAM_MUSIC),
                              SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
-        product_strategy_t strategyForMedia =
-                getProductStrategyForStream(AUDIO_STREAM_MUSIC);
-        deviceTypes = productStrategies.getDeviceTypesForProductStrategy(strategyForMedia);
+        psOrFallback = getProductStrategyForStream(AUDIO_STREAM_MUSIC);
     } else if (ps == getProductStrategyForStream(AUDIO_STREAM_ACCESSIBILITY) &&
         (outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
          outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM)))) {
             // do not route accessibility prompts to a digital output currently configured with a
             // compressed format as they would likely not be mixed and dropped.
             // Device For Sonification conf file has HDMI, SPDIF and HDMI ARC unreacheable.
-        product_strategy_t strategyNotification = getProductStrategyForStream(AUDIO_STREAM_RING);
-        deviceTypes = productStrategies.getDeviceTypesForProductStrategy(strategyNotification);
-    } else {
-        deviceTypes = productStrategies.getDeviceTypesForProductStrategy(ps);
+        psOrFallback = getProductStrategyForStream(AUDIO_STREAM_RING);
     }
+    disabledDevices = getDisabledDevicesForProductStrategy(availableOutputDevices, psOrFallback);
+    deviceTypes = productStrategies.getDeviceTypesForProductStrategy(psOrFallback);
+    // In case a fallback is decided on other strategy, prevent from selecting this device if
+    // disabled for current strategy.
+    availableOutputDevices.remove(disabledDevices);
+
     if (deviceTypes.empty() ||
             Intersection(deviceTypes, availableOutputDevicesTypes).empty()) {
         auto defaultDevice = getApmObserver()->getDefaultOutputDevice();
         ALOG_ASSERT(defaultDevice != nullptr, "no valid default device defined");
-        return DeviceVector(defaultDevice);
-    }
-    if (/*device_distinguishes_on_address(*deviceTypes.begin())*/ isSingleDeviceType(
+        selectedDevices = DeviceVector(defaultDevice);
+    } else if (/*device_distinguishes_on_address(*deviceTypes.begin())*/ isSingleDeviceType(
             deviceTypes, AUDIO_DEVICE_OUT_BUS)) {
         // We do expect only one device for these types of devices
         // Criterion device address garantee this one is available
@@ -268,12 +392,15 @@
                   dumpDeviceTypes(deviceTypes).c_str(), address.c_str());
             auto defaultDevice = getApmObserver()->getDefaultOutputDevice();
             ALOG_ASSERT(defaultDevice != nullptr, "Default Output Device NOT available");
-            return DeviceVector(defaultDevice);
+            selectedDevices = DeviceVector(defaultDevice);
+        } else {
+            selectedDevices = DeviceVector(busDevice);
         }
-        return DeviceVector(busDevice);
+    } else {
+        ALOGV("%s:device %s %d", __FUNCTION__, dumpDeviceTypes(deviceTypes).c_str(), ps);
+        selectedDevices = availableOutputDevices.getDevicesFromTypes(deviceTypes);
     }
-    ALOGV("%s:device %s %d", __FUNCTION__, dumpDeviceTypes(deviceTypes).c_str(), ps);
-    return availableOutputDevices.getDevicesFromTypes(deviceTypes);
+    return selectedDevices;
 }
 
 DeviceVector Engine::getOutputDevicesForAttributes(const audio_attributes_t &attributes,
@@ -315,6 +442,7 @@
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                          uid_t uid,
+                                                         audio_session_t session,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -333,9 +461,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+    device = policyMixes.getDeviceAndMixForInputSource(attr,
                                                        availableInputDevices,
                                                        uid,
+                                                       session,
                                                        mix);
     if (device != nullptr) {
         return device;
@@ -354,14 +483,6 @@
     return availableInputDevices.getDevice(deviceType, String8(address.c_str()), AUDIO_FORMAT_DEFAULT);
 }
 
-void Engine::updateDeviceSelectionCache()
-{
-    for (const auto &iter : getProductStrategies()) {
-        const auto &strategy = iter.second;
-        mDevicesForStrategies[strategy->getId()] = getDevicesForProductStrategy(strategy->getId());
-    }
-}
-
 void Engine::setDeviceAddressForProductStrategy(product_strategy_t strategy,
                                                 const std::string &address)
 {
@@ -409,5 +530,3 @@
 
 } // namespace audio_policy
 } // namespace android
-
-
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 4b559f0..b964cd6 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -33,12 +33,17 @@
 {
 public:
     Engine();
-    virtual ~Engine();
+    virtual ~Engine() = default;
 
     template <class RequestedInterface>
     RequestedInterface *queryInterface();
 
     ///
+    /// from EngineInterface
+    ///
+    android::status_t loadFromXmlConfigWithFallback(const std::string& xmlFilePath = "") override;
+
+    ///
     /// from EngineBase
     ///
     android::status_t initCheck() override;
@@ -63,10 +68,16 @@
 
     sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                      uid_t uid = 0,
+                                                     audio_session_t session = AUDIO_SESSION_NONE,
                                                      sp<AudioPolicyMix> *mix = nullptr)
                                                      const override;
 
-    void updateDeviceSelectionCache() override;
+    status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+                                       const AudioDeviceTypeAddrVector &devices) override;
+
+    status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+                const AudioDeviceTypeAddrVector &devices) override;
+    status_t clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
 
     ///
     /// from AudioPolicyPluginInterface
@@ -95,6 +106,12 @@
     }
 
 private:
+    android::status_t disableDevicesForStrategy(product_strategy_t strategy,
+            const DeviceVector &devicesToDisable);
+    void enableDevicesForStrategy(product_strategy_t strategy, const DeviceVector &devicesToEnable);
+    android::status_t setOutputDevicesConnectionState(const DeviceVector &devices,
+                                                      audio_policy_dev_state_t state);
+
     /* Copy facilities are put private to disable copy. */
     Engine(const Engine &object);
     Engine &operator=(const Engine &object);
@@ -120,20 +137,21 @@
     template <typename Property, typename Key>
     bool setPropertyForKey(const Property &property, const Key &key);
 
-    status_t loadAudioPolicyEngineConfig();
+    status_t loadAudioPolicyEngineConfig(const std::string& xmlFilePath);
 
-    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
     DeviceVector getCachedDevices(product_strategy_t ps) const;
 
+    ///
+    /// from EngineBase
+    ///
+    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const override;
+
     /**
      * Policy Parameter Manager hidden through a wrapper.
      */
     ParameterManagerWrapper *mPolicyParameterMgr;
-
-    DeviceStrategyMap mDevicesForStrategies;
 };
 
 } // namespace audio_policy
 
 } // namespace android
-
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 40efb3d..b6089b7 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -21,36 +21,23 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-python_defaults {
-    name: "tools_default",
-    version: {
-        py2: {
-            enabled: false,
-        },
-        py3: {
-            enabled: true,
-        },
-    },
-}
-
 //##################################################################################################
 // Tools for audio policy engine criterion type configuration file
 //
 python_binary_host {
-    name: "buildPolicyCriterionTypes.py",
+    name: "buildPolicyCriterionTypes",
     main: "buildPolicyCriterionTypes.py",
     srcs: [
         "buildPolicyCriterionTypes.py",
     ],
-    defaults: ["tools_default"],
 }
 
 genrule_defaults {
     name: "buildpolicycriteriontypesrule",
-    tools: ["buildPolicyCriterionTypes.py"],
+    tools: ["buildPolicyCriterionTypes"],
     cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
          "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
-         "$(location buildPolicyCriterionTypes.py) " +
+         "$(location buildPolicyCriterionTypes) " +
          " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
          " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
          "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
@@ -72,12 +59,11 @@
 // Tools for audio policy engine parameter framework configurable domains
 //
 python_binary_host {
-    name: "domainGeneratorPolicy.py",
+    name: "domainGeneratorPolicy",
     main: "domainGeneratorPolicy.py",
     srcs: [
         "domainGeneratorPolicy.py",
     ],
-    defaults: ["tools_default"],
     libs: [
         "EddParser.py",
         "hostConfig.py",
@@ -91,13 +77,13 @@
 genrule_defaults {
     name: "domaingeneratorpolicyrule",
     tools: [
-        "domainGeneratorPolicy.py",
+        "domainGeneratorPolicy",
         "domainGeneratorConnector",
     ],
     cmd: "mkdir -p $(genDir)/Structure/Policy && " +
          "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
          "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
-         "$(location domainGeneratorPolicy.py) " +
+         "$(location domainGeneratorPolicy) " +
          "--validate " +
          "--domain-generator-tool $(location domainGeneratorConnector) " +
          "--toplevel-config $(genDir)/top_level " +
@@ -121,19 +107,18 @@
 // Tools for policy parameter-framework product strategies structure file generation
 //
 python_binary_host {
-    name: "buildStrategiesStructureFile.py",
+    name: "buildStrategiesStructureFile",
     main: "buildStrategiesStructureFile.py",
     srcs: [
         "buildStrategiesStructureFile.py",
     ],
-    defaults: ["tools_default"],
 }
 
 genrule_defaults {
     name: "buildstrategiesstructurerule",
-    tools: ["buildStrategiesStructureFile.py"],
+    tools: ["buildStrategiesStructureFile"],
     cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&"+
-         "$(location buildStrategiesStructureFile.py) " +
+         "$(location buildStrategiesStructureFile) " +
          "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml "+
          "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
          "--outputfile $(out)",
@@ -149,18 +134,17 @@
 // Tools for policy parameter-framework common type structure file generation
 //
 python_binary_host {
-    name: "buildCommonTypesStructureFile.py",
+    name: "buildCommonTypesStructureFile",
     main: "buildCommonTypesStructureFile.py",
     srcs: [
         "buildCommonTypesStructureFile.py",
     ],
-    defaults: ["tools_default"],
 }
 
 genrule_defaults {
     name: "buildcommontypesstructurerule",
-    tools: ["buildCommonTypesStructureFile.py"],
-    cmd: "$(location buildCommonTypesStructureFile.py) " +
+    tools: ["buildCommonTypesStructureFile"],
+    cmd: "$(location buildCommonTypesStructureFile) " +
          "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
          "--commontypesstructure $(location :common_types_structure_template) " +
          "--outputfile $(out)",
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index d4d514d..88cbb7d 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -59,9 +59,8 @@
     return legacyStrategy;
 }
 
-Engine::Engine()
-{
-    auto result = EngineBase::loadAudioPolicyEngineConfig();
+status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath) {
+    auto result = EngineBase::loadAudioPolicyEngineConfig(xmlFilePath);
     ALOGE_IF(result.nbSkippedElement != 0,
              "Policy Engine configuration is partially invalid, skipped %zu elements",
              result.nbSkippedElement);
@@ -70,6 +69,8 @@
     for (const auto &strategy : legacyStrategy) {
         mLegacyStrategyMap[getProductStrategyByName(strategy.name)] = strategy.id;
     }
+
+    return OK;
 }
 
 status_t Engine::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config)
@@ -78,7 +79,7 @@
     case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
         if (config != AUDIO_POLICY_FORCE_SPEAKER && config != AUDIO_POLICY_FORCE_BT_SCO &&
             config != AUDIO_POLICY_FORCE_NONE) {
-            ALOGW("setForceUse() invalid config %d for FOR_COMMUNICATION", config);
+            ALOGW("setForceUse() invalid config %d for COMMUNICATION", config);
             return BAD_VALUE;
         }
         break;
@@ -88,14 +89,14 @@
             config != AUDIO_POLICY_FORCE_ANALOG_DOCK &&
             config != AUDIO_POLICY_FORCE_DIGITAL_DOCK && config != AUDIO_POLICY_FORCE_NONE &&
             config != AUDIO_POLICY_FORCE_NO_BT_A2DP && config != AUDIO_POLICY_FORCE_SPEAKER ) {
-            ALOGW("setForceUse() invalid config %d for FOR_MEDIA", config);
+            ALOGW("setForceUse() invalid config %d for MEDIA", config);
             return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_RECORD:
         if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY &&
             config != AUDIO_POLICY_FORCE_NONE) {
-            ALOGW("setForceUse() invalid config %d for FOR_RECORD", config);
+            ALOGW("setForceUse() invalid config %d for RECORD", config);
             return BAD_VALUE;
         }
         break;
@@ -105,19 +106,22 @@
             config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY &&
             config != AUDIO_POLICY_FORCE_ANALOG_DOCK &&
             config != AUDIO_POLICY_FORCE_DIGITAL_DOCK) {
-            ALOGW("setForceUse() invalid config %d for FOR_DOCK", config);
+            ALOGW("setForceUse() invalid config %d for DOCK", config);
+            return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_SYSTEM:
         if (config != AUDIO_POLICY_FORCE_NONE &&
             config != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
-            ALOGW("setForceUse() invalid config %d for FOR_SYSTEM", config);
+            ALOGW("setForceUse() invalid config %d for SYSTEM", config);
+            return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
         if (config != AUDIO_POLICY_FORCE_NONE &&
             config != AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED) {
             ALOGW("setForceUse() invalid config %d for HDMI_SYSTEM_AUDIO", config);
+            return BAD_VALUE;
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
@@ -131,13 +135,13 @@
         break;
     case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
         if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_NONE) {
-            ALOGW("setForceUse() invalid config %d for FOR_VIBRATE_RINGING", config);
+            ALOGW("setForceUse() invalid config %d for VIBRATE_RINGING", config);
             return BAD_VALUE;
         }
         break;
     default:
         ALOGW("setForceUse() invalid usage %d", usage);
-        break; // TODO return BAD_VALUE?
+        return BAD_VALUE;
     }
     return EngineBase::setForceUse(usage, config);
 }
@@ -163,8 +167,12 @@
         //   - cannot route from voice call RX OR
         //   - audio HAL version is < 3.0 and TX device is on the primary HW module
         if (getPhoneState() == AUDIO_MODE_IN_CALL) {
-            audio_devices_t txDevice = getDeviceForInputSource(
-                    AUDIO_SOURCE_VOICE_COMMUNICATION)->type();
+            audio_devices_t txDevice = AUDIO_DEVICE_NONE;
+            sp<DeviceDescriptor> txDeviceDesc =
+                    getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+            if (txDeviceDesc != nullptr) {
+                txDevice = txDeviceDesc->type();
+            }
             sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput();
             LOG_ALWAYS_FATAL_IF(primaryOutput == nullptr, "Primary output not found");
             DeviceVector availPrimaryInputDevices =
@@ -195,6 +203,12 @@
                     AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
                     AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, }));
         }
+        // If connected to a dock, never use the device speaker for calls
+        if (!availableOutputDevices.getDevicesFromTypes({AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET})
+                .isEmpty()) {
+            availableOutputDevices.remove(
+                    availableOutputDevices.getDevicesFromTypes({AUDIO_DEVICE_OUT_SPEAKER}));
+        }
         } break;
     case STRATEGY_ACCESSIBILITY: {
         // do not route accessibility prompts to a digital output currently configured with a
@@ -267,13 +281,19 @@
         break;
 
     case STRATEGY_PHONE: {
-        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
-        if (!devices.isEmpty()) break;
+        // TODO(b/243670205): remove this logic that gives preference to last removable devices
+        // once a UX decision has been made
         devices = availableOutputDevices.getFirstDevicesFromTypes(
-                        getLastRemovableMediaDevices(GROUP_NONE, {AUDIO_DEVICE_OUT_BLE_HEADSET}));
+                        getLastRemovableMediaDevices(GROUP_NONE, {
+                            // excluding HEARING_AID and BLE_HEADSET because Dialer uses
+                            // setCommunicationDevice to select them explicitly
+                            AUDIO_DEVICE_OUT_HEARING_AID,
+                            AUDIO_DEVICE_OUT_BLE_HEADSET
+                            }));
         if (!devices.isEmpty()) break;
         devices = availableOutputDevices.getFirstDevicesFromTypes({
-                AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE});
+                AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE,
+                AUDIO_DEVICE_OUT_SPEAKER});
     } break;
 
     case STRATEGY_SONIFICATION:
@@ -285,7 +305,9 @@
 
         if ((strategy == STRATEGY_SONIFICATION) ||
                 (getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)) {
-            devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
+            // favor dock over speaker when available
+            devices = availableOutputDevices.getFirstDevicesFromTypes({
+                    AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_SPEAKER});
         }
 
         // if SCO headset is connected and we are told to use it, play ringtone over
@@ -449,7 +471,7 @@
     }
 
     if (devices.isEmpty()) {
-        ALOGV("%s no device found for strategy %d", __func__, strategy);
+        ALOGI("%s no device found for strategy %d", __func__, strategy);
         sp<DeviceDescriptor> defaultOutputDevice = getApmObserver()->getDefaultOutputDevice();
         if (defaultOutputDevice != nullptr) {
             devices.add(defaultOutputDevice);
@@ -463,6 +485,37 @@
     return devices;
 }
 
+DeviceVector Engine::getPreferredAvailableDevicesForInputSource(
+            const DeviceVector& availableInputDevices, audio_source_t inputSource) const {
+    DeviceVector preferredAvailableDevVec = {};
+    AudioDeviceTypeAddrVector preferredDevices;
+    const status_t status = getDevicesForRoleAndCapturePreset(
+            inputSource, DEVICE_ROLE_PREFERRED, preferredDevices);
+    if (status == NO_ERROR) {
+        // Only use preferred devices when they are all available.
+        preferredAvailableDevVec =
+                availableInputDevices.getDevicesFromDeviceTypeAddrVec(preferredDevices);
+        if (preferredAvailableDevVec.size() == preferredDevices.size()) {
+            ALOGVV("%s using pref device %s for source %u",
+                   __func__, preferredAvailableDevVec.toString().c_str(), inputSource);
+            return preferredAvailableDevVec;
+        }
+    }
+    return preferredAvailableDevVec;
+}
+
+DeviceVector Engine::getDisabledDevicesForInputSource(
+            const DeviceVector& availableInputDevices, audio_source_t inputSource) const {
+    DeviceVector disabledDevices = {};
+    AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+    const status_t status = getDevicesForRoleAndCapturePreset(
+            inputSource, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+    if (status == NO_ERROR) {
+        disabledDevices =
+                availableInputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+    }
+    return disabledDevices;
+}
 
 sp<DeviceDescriptor> Engine::getDeviceForInputSource(audio_source_t inputSource) const
 {
@@ -494,6 +547,20 @@
         }
     }
 
+    // Use the preferred device for the input source if it is available.
+    DeviceVector preferredInputDevices = getPreferredAvailableDevicesForInputSource(
+            availableDevices, inputSource);
+    if (!preferredInputDevices.isEmpty()) {
+        // Currently, only support single device for input. The public JAVA API also only
+        // support setting single device as preferred device. In that case, returning the
+        // first device is OK here.
+        return preferredInputDevices[0];
+    }
+    // Remove the disabled device for the input source from the available input device list.
+    DeviceVector disabledInputDevices = getDisabledDevicesForInputSource(
+            availableDevices, inputSource);
+    availableDevices.remove(disabledInputDevices);
+
     audio_devices_t commDeviceType =
         getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE);
 
@@ -533,22 +600,26 @@
             }
         }
         switch (commDeviceType) {
-        case AUDIO_DEVICE_OUT_BLE_HEADSET:
-            device = availableDevices.getDevice(
-                    AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
-            break;
         case AUDIO_DEVICE_OUT_SPEAKER:
             device = availableDevices.getFirstExistingDevice({
                     AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC,
                     AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_USB_HEADSET});
             break;
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+            device = availableDevices.getDevice(
+                    AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+            if (device != nullptr) {
+                break;
+            }
+            ALOGE("%s LE Audio selected for communication but input device not available",
+                    __func__);
+            FALLTHROUGH_INTENDED;
         default:    // FORCE_NONE
             device = availableDevices.getFirstExistingDevice({
                     AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
                     AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BLUETOOTH_BLE,
                     AUDIO_DEVICE_IN_BUILTIN_MIC});
             break;
-
         }
         break;
 
@@ -635,15 +706,9 @@
     return device;
 }
 
-void Engine::updateDeviceSelectionCache()
-{
-    for (const auto &iter : getProductStrategies()) {
-        const auto& strategy = iter.second;
-        auto devices = getDevicesForProductStrategy(strategy->getId());
-        mDevicesForStrategies[strategy->getId()] = devices;
-        strategy->setDeviceTypes(devices.types());
-        strategy->setDeviceAddress(devices.getFirstValidAddress().c_str());
-    }
+void Engine::setStrategyDevices(const sp<ProductStrategy>& strategy, const DeviceVector &devices) {
+    strategy->setDeviceTypes(devices.types());
+    strategy->setDeviceAddress(devices.getFirstValidAddress().c_str());
 }
 
 product_strategy_t Engine::getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const {
@@ -666,26 +731,6 @@
     return AUDIO_DEVICE_NONE;
 }
 
-DeviceVector Engine::getPreferredAvailableDevicesForProductStrategy(
-        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
-    DeviceVector preferredAvailableDevVec = {};
-    AudioDeviceTypeAddrVector preferredStrategyDevices;
-    const status_t status = getDevicesForRoleAndStrategy(
-            strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
-    if (status == NO_ERROR) {
-        // there is a preferred device, is it available?
-        preferredAvailableDevVec =
-                availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
-        if (preferredAvailableDevVec.size() == preferredStrategyDevices.size()) {
-            ALOGVV("%s using pref device %s for strategy %u",
-                   __func__, preferredAvailableDevVec.toString().c_str(), strategy);
-            return preferredAvailableDevVec;
-        }
-    }
-    return preferredAvailableDevVec;
-}
-
-
 DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
     const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
 
@@ -708,6 +753,11 @@
         return preferredAvailableDevVec;
     }
 
+    // Remove all disabled devices from the available device list.
+    DeviceVector disabledDevVec =
+            getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+    availableOutputDevices.remove(disabledDevVec);
+
     return getDevicesForStrategyInt(legacyStrategy,
                                     availableOutputDevices,
                                     outputs);
@@ -747,6 +797,7 @@
 
 sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                          uid_t uid,
+                                                         audio_session_t session,
                                                          sp<AudioPolicyMix> *mix) const
 {
     const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -766,9 +817,10 @@
         return device;
     }
 
-    device = policyMixes.getDeviceAndMixForInputSource(attr.source,
+    device = policyMixes.getDeviceAndMixForInputSource(attr,
                                                        availableInputDevices,
                                                        uid,
+                                                       session,
                                                        mix);
     if (device != nullptr) {
         return device;
@@ -794,5 +846,3 @@
 
 } // namespace audio_policy
 } // namespace android
-
-
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 595e289..8410560 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -45,9 +45,14 @@
 class Engine : public EngineBase
 {
 public:
-    Engine();
+    Engine() = default;
     virtual ~Engine() = default;
 
+    ///
+    /// from EngineInterface
+    ///
+    status_t loadFromXmlConfigWithFallback(const std::string& xmlFilePath = "") override;
+
 private:
     ///
     /// from EngineBase, so from EngineInterface
@@ -64,10 +69,14 @@
 
     sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
                                                      uid_t uid = 0,
+                                                     audio_session_t session = AUDIO_SESSION_NONE,
                                                      sp<AudioPolicyMix> *mix = nullptr)
                                                      const override;
 
-    void updateDeviceSelectionCache() override;
+    void setStrategyDevices(const sp<ProductStrategy>& strategy,
+                            const DeviceVector& devices) override;
+
+    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const override;
 
 private:
     /* Copy facilities are put private to disable copy. */
@@ -87,20 +96,17 @@
                                           DeviceVector availableOutputDevices,
                                           const SwAudioOutputCollection &outputs) const;
 
-    DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
-
     sp<DeviceDescriptor> getDeviceForInputSource(audio_source_t inputSource) const;
 
     product_strategy_t getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const;
     audio_devices_t getPreferredDeviceTypeForLegacyStrategy(
         const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const;
-    DeviceVector getPreferredAvailableDevicesForProductStrategy(
-        const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
-
-    DeviceStrategyMap mDevicesForStrategies;
+    DeviceVector getPreferredAvailableDevicesForInputSource(
+            const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
+    DeviceVector getDisabledDevicesForInputSource(
+            const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
 
     std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
 };
 } // namespace audio_policy
 } // namespace android
-
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index 9f6b703..621f643 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -63,6 +63,15 @@
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
     fuzz_config: {
-       cc: ["mnaganov@google.com"],
+        cc: ["mnaganov@google.com"],
+        componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libaudiopolicy",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 48f7410..8793085 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -216,8 +216,9 @@
     virtual void process();
 
    protected:
+    sp<AudioPolicyConfig> mConfig{AudioPolicyConfig::createWritableForTests()};
     std::unique_ptr<AudioPolicyManagerTestClient> mClient{new AudioPolicyManagerTestClient};
-    std::unique_ptr<AudioPolicyTestManager> mManager{new AudioPolicyTestManager(mClient.get())};
+    std::unique_ptr<AudioPolicyTestManager> mManager;
     FuzzedDataProvider *mFdp;
 };
 
@@ -230,7 +231,10 @@
     }
     // init code
     SetUpManagerConfig();
-
+    if (mConfig == nullptr) {
+        return false;
+    }
+    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get()));
     if (mManager->initialize() != NO_ERROR) {
         return false;
     }
@@ -240,7 +244,7 @@
     return true;
 }
 
-void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mManager->getConfig().setDefault(); }
+void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mConfig->setDefault(); }
 
 bool AudioPolicyManagerFuzzer::getOutputForAttr(
     audio_port_handle_t *selectedDeviceId, audio_format_t format, audio_channel_mask_t channelMask,
@@ -259,13 +263,15 @@
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
+    bool isBitPerfect;
 
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource;
     attributionSource.uid = 0;
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
-            &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized) != OK) {
+            &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+            &isBitPerfect) != OK) {
         return false;
     }
     if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
@@ -406,7 +412,11 @@
 }
 
 void AudioPolicyManagerFuzzerWithConfigurationFile::SetUpManagerConfig() {
-    deserializeAudioPolicyFile(getConfigFile().c_str(), &mManager->getConfig());
+    const std::string configFilePath = getConfigFile();
+    auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(configFilePath);
+    mConfig = result.ok() ? mConfig = result.value() : nullptr;
+    ALOGE_IF(!result.ok(), "%s: Failed to deserialize \"%s\": %d",
+            __func__, configFilePath.c_str(), result.error());
 }
 
 void AudioPolicyManagerFuzzerWithConfigurationFile::traverseAndFuzzXML(xmlDocPtr pDoc,
@@ -544,10 +554,11 @@
 status_t AudioPolicyManagerFuzzerDynamicPolicy::addPolicyMix(
     int mixType, int mixFlag, audio_devices_t deviceType, std::string mixAddress,
     const audio_config_t &audioConfig, const std::vector<PolicyMixTuple> &rules) {
-    Vector<AudioMixMatchCriterion> myMixMatchCriteria;
+    std::vector<AudioMixMatchCriterion> myMixMatchCriteria;
 
+    myMixMatchCriteria.reserve(rules.size());
     for (const auto &rule : rules) {
-        myMixMatchCriteria.add(
+        myMixMatchCriteria.push_back(
             AudioMixMatchCriterion(std::get<0>(rule), std::get<1>(rule), std::get<2>(rule)));
     }
 
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 4b4817e..6e34eb0 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -10,6 +10,10 @@
 cc_library_shared {
     name: "libaudiopolicymanagerdefault",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
     srcs: [
         "AudioPolicyManager.cpp",
         "EngineLibrary.cpp",
@@ -36,7 +40,6 @@
         "libaudiopolicyenginedefault",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
-        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
     ],
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 744609f..f093e68 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -34,6 +34,7 @@
 #include <map>
 #include <math.h>
 #include <set>
+#include <type_traits>
 #include <unordered_set>
 #include <vector>
 
@@ -68,16 +69,6 @@
 // media / notification / system volume.
 constexpr float IN_CALL_EARPIECE_HEADROOM_DB = 3.f;
 
-// Compressed formats for MSD module, ordered from most preferred to least preferred.
-static const std::vector<audio_format_t> msdCompressedFormatsOrder = {{
-        AUDIO_FORMAT_IEC60958, AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
-        AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_16_BIT }};
-// Channel masks for MSD module, 3D > 2D > 1D ordering (most preferred to least preferred).
-static const std::vector<audio_channel_mask_t> msdSurroundChannelMasksOrder = {{
-        AUDIO_CHANNEL_OUT_3POINT1POINT2, AUDIO_CHANNEL_OUT_3POINT0POINT2,
-        AUDIO_CHANNEL_OUT_2POINT1POINT2, AUDIO_CHANNEL_OUT_2POINT0POINT2,
-        AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_STEREO }};
-
 template <typename T>
 bool operator== (const SortedVector<T> &left, const SortedVector<T> &right)
 {
@@ -114,7 +105,7 @@
                                                       const char* device_address,
                                                       const char* device_name,
                                                       audio_format_t encodedFormat) {
-    media::AudioPort aidlPort;
+    media::AudioPortFw aidlPort;
     if (status_t status = deviceToAudioPort(device, device_address, device_name, &aidlPort);
         status == OK) {
         return setDeviceConnectionState(state, aidlPort.hal, encodedFormat);
@@ -125,14 +116,13 @@
 }
 
 void AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
-                                                        audio_policy_dev_state_t state)
+                                                        media::DeviceConnectedState state)
 {
     audio_port_v7 devicePort;
     device->toAudioPort(&devicePort);
-    if (status_t status = mpClientInterface->setDeviceConnectedState(
-                    &devicePort, state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+    if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
             status != OK) {
-        ALOGE("Error %d while setting connected state for device %s", status,
+        ALOGE("Error %d while setting connected state for device %s", state,
                 device->getDeviceTypeAddr().toString(false).c_str());
     }
 }
@@ -140,8 +130,6 @@
 status_t AudioPolicyManager::setDeviceConnectionStateInt(
         audio_policy_dev_state_t state, const android::media::audio::common::AudioPort& port,
         audio_format_t encodedFormat) {
-    // TODO: b/211601178 Forward 'port' to Audio HAL via mHwModules. For now, only device_type,
-    // device_address and device_name are forwarded.
     if (port.ext.getTag() != AudioPortExt::device) {
         return BAD_VALUE;
     }
@@ -160,7 +148,13 @@
     sp<DeviceDescriptor> device = mHwModules.getDeviceDescriptor(
             device_type, device_address.c_str(), device_name, encodedFormat,
             state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
-    return device ? setDeviceConnectionStateInt(device, state) : INVALID_OPERATION;
+    if (device == nullptr) {
+        return INVALID_OPERATION;
+    }
+    if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+        device->setExtraAudioDescriptors(port.extraAudioDescriptors);
+    }
+    return setDeviceConnectionStateInt(device, state);
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t deviceType,
@@ -168,7 +162,7 @@
                                                          const char* device_address,
                                                          const char* device_name,
                                                          audio_format_t encodedFormat) {
-    media::AudioPort aidlPort;
+    media::AudioPortFw aidlPort;
     if (status_t status = deviceToAudioPort(deviceType, device_address, device_name, &aidlPort);
         status == OK) {
         return setDeviceConnectionStateInt(state, aidlPort.hal, encodedFormat);
@@ -211,14 +205,14 @@
 
             // Before checking outputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the outputs...)
-            broadcastDeviceConnectionState(device, state);
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
 
             if (checkOutputsForDevice(device, state, outputs) != NO_ERROR) {
                 mAvailableOutputDevices.remove(device);
 
                 mHwModules.cleanUpForDevice(device);
 
-                broadcastDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+                broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
                 return INVALID_OPERATION;
             }
 
@@ -240,8 +234,9 @@
 
             ALOGV("%s() disconnecting output device %s", __func__, device->toString().c_str());
 
-            // Send Disconnect to HALs
-            broadcastDeviceConnectionState(device, state);
+            // Notify the HAL to prepare to disconnect device
+            broadcastDeviceConnectionState(
+                    device, media::DeviceConnectedState::PREPARE_TO_DISCONNECT);
 
             // remove device from available output devices
             mAvailableOutputDevices.remove(device);
@@ -250,12 +245,18 @@
 
             checkOutputsForDevice(device, state, outputs);
 
+            // Send Disconnect to HALs
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
+
             // Reset active device codec
             device->setEncodedFormat(AUDIO_FORMAT_DEFAULT);
 
             // remove device from mReportedFormatsMap cache
             mReportedFormatsMap.erase(device);
 
+            // remove preferred mixer configurations
+            mPreferredMixerAttrInfos.erase(device->getId());
+
             } break;
 
         default:
@@ -312,10 +313,10 @@
             checkCloseOutputs();
         }
         (void)updateCallRouting(false /*fromCache*/);
-        std::vector<audio_io_handle_t> outputsToReopen;
         const DeviceVector msdOutDevices = getMsdAudioOutDevices();
         const DeviceVector activeMediaDevices =
                 mEngine->getActiveMediaDevices(mAvailableOutputDevices);
+        std::map<audio_io_handle_t, DeviceVector> outputsToReopenWithDevices;
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc->isActive() && ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
@@ -329,6 +330,13 @@
                         && (!device_distinguishes_on_address(device->type())
                                 // always force when disconnecting (a non-duplicated device)
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+                if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                    // If the device is using preferred mixer attributes, the output need to reopen
+                    // with default configuration when the new selected devices are different from
+                    // current routing devices
+                    outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
+                    continue;
+                }
                 setOutputDevices(desc, newDevices, force, 0);
             }
             if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
@@ -339,7 +347,7 @@
                 // `mPendingReopenToQueryProfiles` in the SwOutputDescriptor so that the output
                 // can be reopened to query dynamic profiles when all clients are inactive.
                 if (areAllActiveTracksRerouted(desc)) {
-                    outputsToReopen.push_back(mOutputs.keyAt(i));
+                    outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), activeMediaDevices);
                 } else {
                     desc->mPendingReopenToQueryProfiles = true;
                 }
@@ -349,11 +357,7 @@
                 desc->mPendingReopenToQueryProfiles = false;
             }
         }
-        for (const auto& output : outputsToReopen) {
-            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
-            closeOutput(output);
-            openOutputWithProfileAndDevice(desc->mProfile, activeMediaDevices);
-        }
+        reopenOutputsWithDevices(outputsToReopenWithDevices);
 
         if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
             cleanUpForDevice(device);
@@ -383,12 +387,12 @@
 
             // Before checking intputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the inputs...)
-            broadcastDeviceConnectionState(device, state);
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
 
             if (checkInputsForDevice(device, state) != NO_ERROR) {
                 mAvailableInputDevices.remove(device);
 
-                broadcastDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+                broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
 
                 mHwModules.cleanUpForDevice(device);
 
@@ -406,13 +410,17 @@
 
             ALOGV("%s() disconnecting input device %s", __func__, device->toString().c_str());
 
-            // Set Disconnect to HALs
-            broadcastDeviceConnectionState(device, state);
+            // Notify the HAL to prepare to disconnect device
+            broadcastDeviceConnectionState(
+                    device, media::DeviceConnectedState::PREPARE_TO_DISCONNECT);
 
             mAvailableInputDevices.remove(device);
 
             checkInputsForDevice(device, state);
 
+            // Set Disconnect to HALs
+            broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
+
             // remove device from mReportedFormatsMap cache
             mReportedFormatsMap.erase(device);
         } break;
@@ -450,7 +458,7 @@
 
 status_t AudioPolicyManager::deviceToAudioPort(audio_devices_t device, const char* device_address,
                                                const char* device_name,
-                                               media::AudioPort* aidlPort) {
+                                               media::AudioPortFw* aidlPort) {
     DeviceDescriptorBase devDescr(device, device_address);
     devDescr.setName(device_name);
     return devDescr.writeToParcelable(aidlPort);
@@ -602,7 +610,10 @@
         audioDeviceSet = getAudioDeviceOutAllA2dpSet();
         break;
     case AUDIO_DEVICE_OUT_BLE_HEADSET:
-        audioDeviceSet = getAudioDeviceOutAllBleSet();
+        audioDeviceSet = getAudioDeviceOutLeAudioUnicastSet();
+        break;
+    case AUDIO_DEVICE_OUT_BLE_BROADCAST:
+        audioDeviceSet = getAudioDeviceOutLeAudioBroadcastSet();
         break;
     default:
         ALOGE("%s() device type 0x%08x not supported", __func__, device);
@@ -667,7 +678,10 @@
 
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
-    ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
+    if (txSourceDevice == nullptr) {
+        ALOGE("%s() selected input device not available", __func__);
+        return INVALID_OPERATION;
+    }
 
     ALOGV("%s device rxDevice %s txDevice %s", __func__,
           rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
@@ -788,7 +802,8 @@
     ALOGV("%s between source %s and sink %s", __func__,
             srcDevice->toString().c_str(), sinkDevice->toString().c_str());
     auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
-    const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
     struct audio_port_config source = {};
     srcDevice->toAudioPortConfig(&source);
     mCallTxSourceClient = new InternalSourceClientDescriptor(
@@ -819,7 +834,7 @@
     if (isStateInCall(oldState)) {
         ALOGV("setPhoneState() in call state management: new state is %d", state);
         // force reevaluating accessibility routing when call stops
-        mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+        invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
     }
 
     /**
@@ -877,23 +892,32 @@
         }
     }
 
+    std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
     // reevaluate routing on all outputs in case tracks have been started during the call
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
         if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
             bool forceRouting = !newDevices.isEmpty();
+            if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                // If the device is using preferred mixer attributes, the output need to reopen
+                // with default configuration when the new selected devices are different from
+                // current routing devices.
+                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+                continue;
+            }
             setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
     }
+    reopenOutputsWithDevices(outputsToReopen);
 
     checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
 
     if (isStateInCall(state)) {
         ALOGV("setPhoneState() in call state management: new state is %d", state);
         // force reevaluating accessibility routing when call starts
-        mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+        invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
     }
 
     // Flag that ringtone volume must be limited to music volume until we exit MODE_RINGTONE
@@ -926,8 +950,7 @@
 
     // force client reconnection to reevaluate flag AUDIO_FLAG_AUDIBILITY_ENFORCED
     if (usage == AUDIO_POLICY_FORCE_FOR_SYSTEM) {
-        mpClientInterface->invalidateStream(AUDIO_STREAM_SYSTEM);
-        mpClientInterface->invalidateStream(AUDIO_STREAM_ENFORCED_AUDIBLE);
+        invalidateStreams({AUDIO_STREAM_SYSTEM, AUDIO_STREAM_ENFORCED_AUDIBLE});
     }
 
     //FIXME: workaround for truncated touch sounds
@@ -1033,7 +1056,7 @@
 
              // when searching for direct outputs, if several profiles are compatible, give priority
              // to one with offload capability
-             if (profile != 0 && 
+             if (profile != 0 &&
                  ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
                 continue;
              }
@@ -1054,12 +1077,12 @@
             if (curProfile->getFlags() != AUDIO_OUTPUT_FLAG_SPATIALIZER) {
                 continue;
             }
-            // reject profiles not corresponding to a device currently available
-            DeviceVector supportedDevices = curProfile->getSupportedDevices();
-            if (!mAvailableOutputDevices.containsAtLeastOne(supportedDevices)) {
-                continue;
-            }
             if (!devices.empty()) {
+                // reject profiles not corresponding to a device currently available
+                DeviceVector supportedDevices = curProfile->getSupportedDevices();
+                if (!mAvailableOutputDevices.containsAtLeastOne(supportedDevices)) {
+                    continue;
+                }
                 if (supportedDevices.getDevicesFromDeviceTypeAddrVec(devices).size()
                         != devices.size()) {
                     continue;
@@ -1128,13 +1151,14 @@
         const audio_attributes_t *attr,
         audio_stream_type_t *stream,
         uid_t uid,
-        const audio_config_t *config,
+        audio_config_t *config,
         audio_output_flags_t *flags,
         audio_port_handle_t *selectedDeviceId,
         bool *isRequestedDeviceForExclusiveUse,
         std::vector<sp<AudioPolicyMix>> *secondaryMixes,
         output_type_t *outputType,
-        bool *isSpatialized)
+        bool *isSpatialized,
+        bool *isBitPerfect)
 {
     DeviceVector outputDevices;
     const audio_port_handle_t requestedPortId = *selectedDeviceId;
@@ -1157,23 +1181,27 @@
     ALOGV("%s() attributes=%s stream=%s session %d selectedDeviceId %d", __func__,
           toString(*resultAttr).c_str(), toString(*stream).c_str(), session, requestedPortId);
 
+    bool usePrimaryOutputFromPolicyMixes = false;
+
     // The primary output is the explicit routing (eg. setPreferredDevice) if specified,
     //       otherwise, fallback to the dynamic policies, if none match, query the engine.
     // Secondary outputs are always found by dynamic policies as the engine do not support them
     sp<AudioPolicyMix> primaryMix;
-    status = mPolicyMixes.getOutputForAttr(*resultAttr, uid, *flags, primaryMix, secondaryMixes);
+    const audio_config_base_t clientConfig = {.sample_rate = config->sample_rate,
+        .channel_mask = config->channel_mask,
+        .format = config->format,
+    };
+    status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, session, *flags,
+                                           mAvailableOutputDevices, requestedDevice, primaryMix,
+                                           secondaryMixes, usePrimaryOutputFromPolicyMixes);
     if (status != OK) {
         return status;
     }
 
-    // Explicit routing is higher priority then any dynamic policy primary output
-    bool usePrimaryOutputFromPolicyMixes = requestedDevice == nullptr && primaryMix != nullptr;
-
     // FIXME: in case of RENDER policy, the output capabilities should be checked
-    if ((usePrimaryOutputFromPolicyMixes
-            || (secondaryMixes != nullptr && !secondaryMixes->empty()))
-        && !audio_is_linear_pcm(config->format)) {
-        ALOGD("%s: rejecting request as dynamic audio policy only support pcm", __func__);
+    if ((secondaryMixes != nullptr && !secondaryMixes->empty())
+            && !audio_is_linear_pcm(config->format)) {
+        ALOGD("%s: rejecting request as secondary mixes only support pcm", __func__);
         return BAD_VALUE;
     }
     if (usePrimaryOutputFromPolicyMixes) {
@@ -1182,19 +1210,27 @@
                                                   primaryMix->mDeviceAddress,
                                                   AUDIO_FORMAT_DEFAULT);
         sp<SwAudioOutputDescriptor> policyDesc = primaryMix->getOutput();
-        if (deviceDesc != nullptr
-                && (policyDesc == nullptr || (policyDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT))) {
+        bool tryDirectForFlags = policyDesc == nullptr ||
+                (policyDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT);
+        // if a direct output can be opened to deliver the track's multi-channel content to the
+        // output rather than being downmixed by the primary output, then use this direct
+        // output by by-passing the primary mix if possible, otherwise fall-through to primary
+        // mix.
+        bool tryDirectForChannelMask = policyDesc != nullptr
+                    && (audio_channel_count_from_out_mask(policyDesc->getConfig().channel_mask) <
+                        audio_channel_count_from_out_mask(config->channel_mask));
+        if (deviceDesc != nullptr && (tryDirectForFlags || tryDirectForChannelMask)) {
             audio_io_handle_t newOutput;
             status = openDirectOutput(
                     *stream, session, config,
                     (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT),
                     DeviceVector(deviceDesc), &newOutput);
-            if (status != NO_ERROR) {
-                policyDesc = nullptr;
-            } else {
+            if (status == NO_ERROR) {
                 policyDesc = mOutputs.valueFor(newOutput);
                 primaryMix->setOutput(policyDesc);
-            }
+            } else if (tryDirectForFlags) {
+                policyDesc = nullptr;
+            } // otherwise use primary if available.
         }
         if (policyDesc != nullptr) {
             policyDesc->mPolicyMix = primaryMix;
@@ -1253,16 +1289,48 @@
         }
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
+        sp<PreferredMixerAttributesInfo> info = nullptr;
+        if (outputDevices.size() == 1) {
+            info = getPreferredMixerAttributesInfo(
+                    outputDevices.itemAt(0)->getId(),
+                    mEngine->getProductStrategyForAttributes(*resultAttr));
+            // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
+            // and it is currently active.
+            if (info != nullptr && info->getUid() != uid &&
+                ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE ||
+                        info->getActiveClientCount() == 0)) {
+                info = nullptr;
+            }
+        }
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
-                flags, isSpatialized, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+                flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+        // The client will be active if the client is currently preferred mixer owner and the
+        // requested configuration matches the preferred mixer configuration.
+        *isBitPerfect = (info != nullptr
+                && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->getUid() == uid
+                && *output != AUDIO_IO_HANDLE_NONE
+                // When bit-perfect output is selected for the preferred mixer attributes owner,
+                // only need to consider the config matches.
+                && mOutputs.valueFor(*output)->isConfigurationMatched(
+                        clientConfig, AUDIO_OUTPUT_FLAG_NONE));
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
+        AudioProfileVector profiles;
+        status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
+        if (ret == NO_ERROR && !profiles.empty()) {
+            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
+                    : *profiles[0]->getChannels().begin();
+            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
+                    : *profiles[0]->getSampleRates().begin();
+            config->format = profiles[0]->getFormat();
+        }
         return INVALID_OPERATION;
     }
 
     *selectedDeviceId = getFirstDeviceId(outputDevices);
     for (auto &outputDevice : outputDevices) {
-        if (outputDevice->getId() == getConfig().getDefaultOutputDevice()->getId()) {
+        if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
             *selectedDeviceId = outputDevice->getId();
             break;
         }
@@ -1284,13 +1352,14 @@
                                               audio_session_t session,
                                               audio_stream_type_t *stream,
                                               const AttributionSourceState& attributionSource,
-                                              const audio_config_t *config,
+                                              audio_config_t *config,
                                               audio_output_flags_t *flags,
                                               audio_port_handle_t *selectedDeviceId,
                                               audio_port_handle_t *portId,
                                               std::vector<audio_io_handle_t> *secondaryOutputs,
                                               output_type_t *outputType,
-                                              bool *isSpatialized)
+                                              bool *isSpatialized,
+                                              bool *isBitPerfect)
 {
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1312,7 +1381,8 @@
 
     status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
             config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
-            secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized);
+            secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized,
+            isBitPerfect);
     if (status != NO_ERROR) {
         return status;
     }
@@ -1456,6 +1526,7 @@
         const audio_config_t *config,
         audio_output_flags_t *flags,
         bool *isSpatialized,
+        sp<PreferredMixerAttributesInfo> prefMixerConfigInfo,
         bool forceMutingHaptic)
 {
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
@@ -1521,6 +1592,10 @@
     if ((*flags & (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) != 0) {
         return AUDIO_IO_HANDLE_NONE;
     }
+    // A request for Tuner cannot fallback to a mixed output
+    if ((directConfig.offload_info.content_id || directConfig.offload_info.sync_id)) {
+        return AUDIO_IO_HANDLE_NONE;
+    }
 
     // ignoring channel mask due to downmix capability in mixer
 
@@ -1531,11 +1606,36 @@
         // get which output is suitable for the specified stream. The actual
         // routing change will happen when startOutput() will be called
         SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
-
-        // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
-        *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
-        output = selectOutput(
-                outputs, *flags, config->format, channelMask, config->sample_rate, session);
+        if (prefMixerConfigInfo != nullptr) {
+            for (audio_io_handle_t outputHandle : outputs) {
+                sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputHandle);
+                if (outputDesc->mProfile == prefMixerConfigInfo->getProfile()) {
+                    output = outputHandle;
+                    break;
+                }
+            }
+            if (output == AUDIO_IO_HANDLE_NONE) {
+                // No output open with the preferred profile. Open a new one.
+                audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+                config.channel_mask = prefMixerConfigInfo->getConfigBase().channel_mask;
+                config.sample_rate = prefMixerConfigInfo->getConfigBase().sample_rate;
+                config.format = prefMixerConfigInfo->getConfigBase().format;
+                sp<SwAudioOutputDescriptor> preferredOutput = openOutputWithProfileAndDevice(
+                        prefMixerConfigInfo->getProfile(), devices, nullptr /*mixerConfig*/,
+                        &config, prefMixerConfigInfo->getFlags());
+                if (preferredOutput == nullptr) {
+                    ALOGE("%s failed to open output with preferred mixer config", __func__);
+                } else {
+                    output = preferredOutput->mIoHandle;
+                }
+            }
+        } else {
+            // at this stage we should ignore the DIRECT flag as no direct output could be
+            // found earlier
+            *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+            output = selectOutput(
+                    outputs, *flags, config->format, channelMask, config->sample_rate, session);
+        }
     }
     ALOGW_IF((output == 0), "getOutputForDevices() could not find output for stream %d, "
             "sampling rate %d, format %#x, channels %#x, flags %#x",
@@ -1628,10 +1728,28 @@
         const AudioProfileVector &sourceProfiles, const AudioProfileVector &sinkProfiles,
         audio_port_config *sourceConfig, audio_port_config *sinkConfig) const
 {
+    // Compressed formats for MSD module, ordered from most preferred to least preferred.
+    static const std::vector<audio_format_t> formatsOrder = {{
+            AUDIO_FORMAT_IEC60958, AUDIO_FORMAT_MAT_2_1, AUDIO_FORMAT_MAT_2_0, AUDIO_FORMAT_E_AC3,
+            AUDIO_FORMAT_AC3, AUDIO_FORMAT_PCM_16_BIT }};
+    static const std::vector<audio_channel_mask_t> channelMasksOrder = [](){
+        // Channel position masks for MSD module, 3D > 2D > 1D ordering (most preferred to least
+        // preferred).
+        std::vector<audio_channel_mask_t> masks = {{
+            AUDIO_CHANNEL_OUT_3POINT1POINT2, AUDIO_CHANNEL_OUT_3POINT0POINT2,
+            AUDIO_CHANNEL_OUT_2POINT1POINT2, AUDIO_CHANNEL_OUT_2POINT0POINT2,
+            AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_STEREO }};
+        // insert index masks (higher counts most preferred) as preferred over position masks
+        for (int i = 1; i <= AUDIO_CHANNEL_COUNT_MAX; i++) {
+            masks.insert(
+                    masks.begin(), audio_channel_mask_for_index_assignment_from_count(i));
+        }
+        return masks;
+    }();
+
     struct audio_config_base bestSinkConfig;
-    status_t result = findBestMatchingOutputConfig(sourceProfiles, sinkProfiles,
-            msdCompressedFormatsOrder, msdSurroundChannelMasksOrder,
-            true /*preferHigherSamplingRates*/, bestSinkConfig);
+    status_t result = findBestMatchingOutputConfig(sourceProfiles, sinkProfiles, formatsOrder,
+            channelMasksOrder, true /*preferHigherSamplingRates*/, bestSinkConfig);
     if (result != NO_ERROR) {
         ALOGD("%s() no matching config found for sink, hwAvSync: %d",
                 __func__, hwAvSync);
@@ -1653,7 +1771,10 @@
     }
     sourceConfig->sample_rate = bestSinkConfig.sample_rate;
     // Specify exact channel mask to prevent guessing by bit count in PatchPanel.
-    sourceConfig->channel_mask = audio_channel_mask_out_to_in(bestSinkConfig.channel_mask);
+    sourceConfig->channel_mask =
+            audio_channel_mask_get_representation(bestSinkConfig.channel_mask)
+            == AUDIO_CHANNEL_REPRESENTATION_INDEX ?
+            bestSinkConfig.channel_mask : audio_channel_mask_out_to_in(bestSinkConfig.channel_mask);
     sourceConfig->format = bestSinkConfig.format;
     // Copy input stream directly without any processing (e.g. resampling).
     sourceConfig->flags.input = static_cast<audio_input_flags_t>(
@@ -1794,7 +1915,8 @@
 }
 
 bool AudioPolicyManager::msdHasPatchesToAllDevices(const AudioDeviceTypeAddrVector& devices) {
-    DeviceVector devicesToCheck = mOutputDevicesAll.getDevicesFromDeviceTypeAddrVec(devices);
+    DeviceVector devicesToCheck =
+            mConfig->getOutputDevices().getDevicesFromDeviceTypeAddrVec(devices);
     AudioPatchCollection msdPatches = getMsdOutputPatches();
     for (size_t i = 0; i < msdPatches.size(); i++) {
         const auto& patch = msdPatches[i];
@@ -1868,7 +1990,8 @@
     //    (see b/200293124, the incorrect selection of AUDIO_OUTPUT_FLAG_VOIP_RX).
     // 3: the output supporting the exact channel mask
     // 4: the output with a higher channel count than requested
-    // 5: the output with a higher sampling rate than requested
+    // 5: the output with the highest sampling rate if the requested sample rate is
+    //    greater than default sampling rate
     // 6: the output with the highest number of requested performance flags
     // 7: the output with the bit depth the closest to the requested one
     // 8: the primary output
@@ -1928,8 +2051,7 @@
         }
 
         // sampling rate match
-        if (samplingRate > SAMPLE_RATE_HZ_DEFAULT &&
-                samplingRate <= outputDesc->getSamplingRate()) {
+        if (samplingRate > SAMPLE_RATE_HZ_DEFAULT) {
             currentMatchCriteria[4] = outputDesc->getSamplingRate();
         }
 
@@ -1987,8 +2109,51 @@
 
     if (status != NO_ERROR) {
         outputDesc->stop();
+        if (status == DEAD_OBJECT) {
+            sp<SwAudioOutputDescriptor> desc =
+                    reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+            if (desc == nullptr) {
+                // This is not common, it may indicate something wrong with the HAL.
+                ALOGE("%s unable to open output with default config", __func__);
+                return status;
+            }
+            desc->mUsePreferredMixerAttributes = true;
+        }
         return status;
     }
+
+    // If the client is the first one active on preferred mixer parameters, reopen the output
+    // if the current mixer parameters doesn't match the preferred one.
+    if (outputDesc->devices().size() == 1) {
+        sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+                outputDesc->devices()[0]->getId(), client->strategy());
+        if (info != nullptr && info->getUid() == client->uid()) {
+            if (info->getActiveClientCount() == 0 && !outputDesc->isConfigurationMatched(
+                    info->getConfigBase(), info->getFlags())) {
+                stopSource(outputDesc, client);
+                outputDesc->stop();
+                audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+                config.channel_mask = info->getConfigBase().channel_mask;
+                config.sample_rate = info->getConfigBase().sample_rate;
+                config.format = info->getConfigBase().format;
+                sp<SwAudioOutputDescriptor> desc =
+                        reopenOutput(outputDesc, &config, info->getFlags(), __func__);
+                if (desc == nullptr) {
+                    return BAD_VALUE;
+                }
+                desc->mUsePreferredMixerAttributes = true;
+                // Intentionally return error to let the client side resending request for
+                // creating and starting.
+                return DEAD_OBJECT;
+            }
+            info->increaseActiveClient();
+        }
+    }
+
+    if (client->hasPreferredDevice()) {
+        // playback activity with preferred device impacts routing occurred, inform upper layers
+        mpClientInterface->onRoutingUpdated();
+    }
     if (delayMs != 0) {
         usleep(delayMs * 1000);
     }
@@ -2071,8 +2236,7 @@
     outputDesc->setClientActive(client, true);
 
     if (client->hasPreferredDevice(true)) {
-        if (outputDesc->clientsList(true /*activeOnly*/).size() == 1 &&
-                client->isPreferredDeviceForExclusiveUse()) {
+        if (outputDesc->sameExclusivePreferredDevicesCount() > 0) {
             // Preferred device may be exclusive, use only if no other active clients on this output
             devices = DeviceVector(
                         mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId()));
@@ -2137,8 +2301,14 @@
             }
         }
 
+        if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+            // If the output is open with preferred mixer attributes, but the routed device is
+            // changed when calling this function, returning DEAD_OBJECT to indicate routing
+            // changed.
+            return DEAD_OBJECT;
+        }
         const uint32_t muteWaitMs =
-                setOutputDevices(outputDesc, devices, force, 0, NULL, requiresMuteCheck);
+                setOutputDevices(outputDesc, devices, force, 0, nullptr, requiresMuteCheck);
 
         // apply volume rules for current stream and device if necessary
         auto &curves = getVolumeCurves(client->attributes());
@@ -2158,7 +2328,7 @@
 
         // force reevaluating accessibility routing when ringtone or alarm starts
         if (followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_ALARM))) {
-            mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+            invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
         }
 
         if (waitMs > muteWaitMs) {
@@ -2201,6 +2371,7 @@
     bool isUnicastActive = isLeUnicastActive();
 
     if (wasUnicastActive != isUnicastActive) {
+        std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
         //reroute all outputs routed to LE broadcast if LE unicast activy changed on any output
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -2213,6 +2384,13 @@
                                     getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
                 bool force = desc->devices() != newDevices;
+                if (desc->mUsePreferredMixerAttributes && force) {
+                    // If the device is using preferred mixer attributes, the output need to reopen
+                    // with default configuration when the new selected devices are different from
+                    // current routing devices.
+                    outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+                    continue;
+                }
                 setOutputDevices(desc, newDevices, force, delayMs);
                 // re-apply device specific volume if not done by setOutputDevice()
                 if (!force) {
@@ -2220,6 +2398,7 @@
                 }
             }
         }
+        reopenOutputsWithDevices(outputsToReopen);
     }
 }
 
@@ -2234,6 +2413,11 @@
     }
     sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
 
+    if (client->hasPreferredDevice(true)) {
+        // playback activity with preferred device impacts routing occurred, inform upper layers
+        mpClientInterface->onRoutingUpdated();
+    }
+
     ALOGV("stopOutput() output %d, stream %d, session %d",
           outputDesc->mIoHandle, client->stream(), client->session());
 
@@ -2241,6 +2425,19 @@
 
     if (status == NO_ERROR ) {
         outputDesc->stop();
+    } else {
+        return status;
+    }
+
+    if (outputDesc->devices().size() == 1) {
+        sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+                outputDesc->devices()[0]->getId(), client->strategy());
+        if (info != nullptr && info->getUid() == client->uid()) {
+            info->decreaseActiveClient();
+            if (info->getActiveClientCount() == 0) {
+                reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+            }
+        }
     }
     return status;
 }
@@ -2271,7 +2468,8 @@
             }
         }
         bool forceDeviceUpdate = false;
-        if (client->hasPreferredDevice(true)) {
+        if (client->hasPreferredDevice(true) &&
+                outputDesc->sameExclusivePreferredDevicesCount() < 2) {
             checkStrategyRoute(client->strategy(), AUDIO_IO_HANDLE_NONE);
             forceDeviceUpdate = true;
         }
@@ -2300,6 +2498,7 @@
 
             // force restoring the device selection on other active outputs if it differs from the
             // one being selected for this output
+            std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
             uint32_t delayMs = outputDesc->latency()*2;
             for (size_t i = 0; i < mOutputs.size(); i++) {
                 sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -2310,6 +2509,13 @@
                     DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
                     bool force = desc->devices() != newDevices2;
 
+                    if (desc->mUsePreferredMixerAttributes && force) {
+                        // If the device is using preferred mixer attributes, the output need to
+                        // reopen with default configuration when the new selected devices are
+                        // different from current routing devices.
+                        outputsToReopen.emplace(mOutputs.keyAt(i), newDevices2);
+                        continue;
+                    }
                     setOutputDevices(desc, newDevices2, force, delayMs);
 
                     // re-apply device specific volume if not done by setOutputDevice()
@@ -2318,6 +2524,7 @@
                     }
                 }
             }
+            reopenOutputsWithDevices(outputsToReopen);
             // update the outputs if stopping one with a stream that can affect notification routing
             handleNotificationRoutingForStream(stream);
         }
@@ -2396,7 +2603,7 @@
                                              audio_unique_id_t riid,
                                              audio_session_t session,
                                              const AttributionSourceState& attributionSource,
-                                             const audio_config_base_t *config,
+                                             audio_config_base_t *config,
                                              audio_input_flags_t flags,
                                              audio_port_handle_t *selectedDeviceId,
                                              input_type_t *inputType,
@@ -2479,7 +2686,7 @@
     *inputType = API_INPUT_INVALID;
 
     if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
-            strncmp(attributes.tags, "addr=", strlen("addr=")) == 0) {
+            extractAddressFromAudioAttributes(attributes).has_value()) {
         status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
         if (status != NO_ERROR) {
             ALOGW("%s could not find input mix for attr %s",
@@ -2507,7 +2714,7 @@
         } else {
             // Prevent from storing invalid requested device id in clients
             requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
-            device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
+            device = mEngine->getInputDeviceForAttributes(attributes, uid, session, &policyMix);
             ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
                 __FUNCTION__, device->type());
         }
@@ -2537,6 +2744,16 @@
     *input = getInputForDevice(device, session, attributes, config, flags, policyMix);
     if (*input == AUDIO_IO_HANDLE_NONE) {
         status = INVALID_OPERATION;
+        AudioProfileVector profiles;
+        status_t ret = getProfilesForDevices(
+                DeviceVector(device), profiles, flags, true /*isInput*/);
+        if (ret == NO_ERROR && !profiles.empty()) {
+            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
+                    : *profiles[0]->getChannels().begin();
+            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
+                    : *profiles[0]->getSampleRates().begin();
+            config->format = profiles[0]->getFormat();
+        }
         goto error;
     }
 
@@ -2568,7 +2785,7 @@
 audio_io_handle_t AudioPolicyManager::getInputForDevice(const sp<DeviceDescriptor> &device,
                                                         audio_session_t session,
                                                         const audio_attributes_t &attributes,
-                                                        const audio_config_base_t *config,
+                                                        audio_config_base_t *config,
                                                         audio_input_flags_t flags,
                                                         const sp<AudioPolicyMix> &policyMix)
 {
@@ -2595,31 +2812,19 @@
         flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_ULTRASOUND);
     }
 
-    // find a compatible input profile (not necessarily identical in parameters)
-    sp<IOProfile> profile;
     // sampling rate and flags may be updated by getInputProfile
     uint32_t profileSamplingRate = (config->sample_rate == 0) ?
             SAMPLE_RATE_HZ_DEFAULT : config->sample_rate;
-    audio_format_t profileFormat;
+    audio_format_t profileFormat = config->format;
     audio_channel_mask_t profileChannelMask = config->channel_mask;
     audio_input_flags_t profileFlags = flags;
-    for (;;) {
-        profileFormat = config->format; // reset each time through loop, in case it is updated
-        profile = getInputProfile(device, profileSamplingRate, profileFormat, profileChannelMask,
-                                  profileFlags);
-        if (profile != 0) {
-            break; // success
-        } else if (profileFlags & AUDIO_INPUT_FLAG_RAW) {
-            profileFlags = (audio_input_flags_t) (profileFlags & ~AUDIO_INPUT_FLAG_RAW); // retry
-        } else if (profileFlags != AUDIO_INPUT_FLAG_NONE && audio_is_linear_pcm(config->format)) {
-            profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
-        } else { // fail
-            ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
-                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
-                  config->sample_rate, config->format, config->channel_mask, flags);
-            return input;
-        }
+    // find a compatible input profile (not necessarily identical in parameters)
+    sp<IOProfile> profile = getInputProfile(
+            device, profileSamplingRate, profileFormat, profileChannelMask, profileFlags);
+    if (profile == nullptr) {
+        return input;
     }
+
     // Pick input sampling rate if not specified by client
     uint32_t samplingRate = config->sample_rate;
     if (samplingRate == 0) {
@@ -2916,7 +3121,8 @@
             bool close = false;
             for (const auto& client : input->clientsList()) {
                 sp<DeviceDescriptor> device =
-                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid());
+                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
+                                                         client->session());
                 if (!input->supportedDevices().contains(device)) {
                     close = true;
                     break;
@@ -2996,6 +3202,10 @@
     status_t status = NO_ERROR;
     IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
+    // AUDIO_STREAM_BLUETOOTH_SCO is only used for volume control so we remap
+    // to AUDIO_STREAM_VOICE_CALL to match with relevant playback activity
+    VolumeSource activityVs = (vs == toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false)) ?
+            toVolumeSource(AUDIO_STREAM_VOICE_CALL, false) : vs;
     product_strategy_t strategy = mEngine->getProductStrategyForAttributes(attributes);
 
     status = setVolumeCurveIndex(index, device, curves);
@@ -3034,7 +3244,8 @@
         if (curDevices.erase(AUDIO_DEVICE_OUT_SPEAKER_SAFE)) {
             curDevices.insert(AUDIO_DEVICE_OUT_SPEAKER);
         }
-        if (!(desc->isActive(vs) || isInCall())) {
+
+        if (!(desc->isActive(activityVs) || isInCallOrScreening())) {
             continue;
         }
         if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME &&
@@ -3068,7 +3279,7 @@
                 bool isPreempted = false;
                 bool isHigherPriority = productStrategy < strategy;
                 for (const auto &client : activeClients) {
-                    if (isHigherPriority && (client->volumeSource() != vs)) {
+                    if (isHigherPriority && (client->volumeSource() != activityVs)) {
                         ALOGV("%s: Strategy=%d (\nrequester:\n"
                               " group %d, volumeGroup=%d attributes=%s)\n"
                               " higher priority source active:\n"
@@ -3081,7 +3292,7 @@
                         break;
                     }
                     // However, continue for loop to ensure no higher prio clients running on output
-                    if (client->volumeSource() == vs) {
+                    if (client->volumeSource() == activityVs) {
                         applyVolume = true;
                     }
                 }
@@ -3141,7 +3352,7 @@
     // stream by the engine.
     DeviceTypeSet deviceTypes = {device};
     if (device == AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
-        DeviceTypeSet deviceTypes = mEngine->getOutputDevicesForAttributes(
+        deviceTypes = mEngine->getOutputDevicesForAttributes(
                 attr, nullptr, true /*fromCache*/).types();
     }
     return getVolumeIndex(getVolumeCurves(attr), index, deviceTypes);
@@ -3151,7 +3362,7 @@
                                             int &index,
                                             const DeviceTypeSet& deviceTypes) const
 {
-    if (isSingleDeviceType(deviceTypes, audio_is_output_device)) {
+    if (!isSingleDeviceType(deviceTypes, audio_is_output_device)) {
         return BAD_VALUE;
     }
     index = curves.getVolumeIndex(deviceTypes);
@@ -3556,11 +3767,12 @@
 bool  AudioPolicyManager::areAllDevicesSupported(
         const AudioDeviceTypeAddrVector& devices,
         std::function<bool(audio_devices_t)> predicate,
-        const char *context) {
+        const char *context,
+        bool matchAddress) {
     for (size_t i = 0; i < devices.size(); i++) {
         sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
                 devices[i].mType, devices[i].getAddress(), String8(),
-                AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, true /*matchAddress*/);
+                AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, matchAddress);
         if (devDesc == nullptr || (predicate != nullptr && !predicate(devices[i].mType))) {
             ALOGE("%s: device type %#x address %s not supported or not match predicate",
                     context, devices[i].mType, devices[i].getAddress());
@@ -3644,6 +3856,7 @@
         // Only apply special touch sound delay once
         delayMs = 0;
     }
+    std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(outputDesc, true /*fromCache*/);
@@ -3653,6 +3866,13 @@
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
             bool forceRouting = !newDevices.isEmpty();
+            if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+                // If the device is using preferred mixer attributes, the output need to reopen
+                // with default configuration when the new selected devices are different from
+                // current routing devices.
+                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+                continue;
+            }
             waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
                                       true /*requiresMuteCheck*/,
                                       !forceRouting /*requiresVolumeCheck*/);
@@ -3663,6 +3883,7 @@
             applyStreamVolumes(outputDesc, newDevices.types(), waitMs, true);
         }
     }
+    reopenOutputsWithDevices(outputsToReopen);
     checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
 }
 
@@ -3683,14 +3904,49 @@
     }
 }
 
-status_t AudioPolicyManager::removeDevicesRoleForStrategy(product_strategy_t strategy,
-                                                          device_role_t role)
+status_t
+AudioPolicyManager::removeDevicesRoleForStrategy(product_strategy_t strategy,
+                                                 device_role_t role,
+                                                 const AudioDeviceTypeAddrVector &devices) {
+    ALOGV("%s() strategy=%d role=%d %s", __func__, strategy, role,
+            dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+    if (!areAllDevicesSupported(
+            devices, audio_is_output_device, __func__, /*matchAddress*/false)) {
+        return BAD_VALUE;
+    }
+    status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role, devices);
+    if (status != NO_ERROR) {
+        ALOGW("Engine could not remove devices %s for strategy %d role %d",
+                dumpAudioDeviceTypeAddrVector(devices).c_str(), strategy, role);
+        return status;
+    }
+
+    checkForDeviceAndOutputChanges();
+
+    bool forceVolumeReeval = false;
+    // TODO(b/263479999): workaround for truncated touch sounds
+    // to be removed when the problem is handled by system UI
+    uint32_t delayMs = 0;
+    if (strategy == mCommunnicationStrategy) {
+        forceVolumeReeval = true;
+        delayMs = TOUCH_SOUND_FIXED_DELAY_MS;
+        updateInputRouting();
+    }
+    updateCallAndOutputRouting(forceVolumeReeval, delayMs);
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::clearDevicesRoleForStrategy(product_strategy_t strategy,
+                                                         device_role_t role)
 {
     ALOGV("%s() strategy=%d role=%d", __func__, strategy, role);
 
-    status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role);
+    status_t status = mEngine->clearDevicesRoleForStrategy(strategy, role);
     if (status != NO_ERROR) {
-        ALOGV("Engine could not remove preferred device for strategy %d status %d",
+        ALOGW_IF(status != NAME_NOT_FOUND,
+                "Engine could not remove device role for strategy %d status %d",
                 strategy, status);
         return status;
     }
@@ -3756,16 +4012,18 @@
     ALOGV("%s() audioSource=%d role=%d devices=%s", __func__, audioSource, role,
             dumpAudioDeviceTypeAddrVector(devices).c_str());
 
-    if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+    if (!areAllDevicesSupported(
+            devices, audio_call_is_input_device, __func__, /*matchAddress*/false)) {
         return BAD_VALUE;
     }
 
     status_t status = mEngine->removeDevicesRoleForCapturePreset(
             audioSource, role, devices);
-    ALOGW_IF(status != NO_ERROR,
+    ALOGW_IF(status != NO_ERROR && status != NAME_NOT_FOUND,
             "Engine could not remove devices role (%d) for capture preset %d", role, audioSource);
-
-    updateInputRouting();
+    if (status == NO_ERROR) {
+        updateInputRouting();
+    }
     return status;
 }
 
@@ -3774,10 +4032,11 @@
     ALOGV("%s() audioSource=%d role=%d", __func__, audioSource, role);
 
     status_t status = mEngine->clearDevicesRoleForCapturePreset(audioSource, role);
-    ALOGW_IF(status != NO_ERROR,
+    ALOGW_IF(status != NO_ERROR && status != NAME_NOT_FOUND,
             "Engine could not clear devices role (%d) for capture preset %d", role, audioSource);
-
-    updateInputRouting();
+    if (status == NO_ERROR) {
+        updateInputRouting();
+    }
     return status;
 }
 
@@ -3848,13 +4107,13 @@
     dst->appendFormat(" TTS output %savailable\n", mTtsOutputAvailable ? "" : "not ");
     dst->appendFormat(" Master mono: %s\n", mMasterMono ? "on" : "off");
     dst->appendFormat(" Communication Strategy id: %d\n", mCommunnicationStrategy);
-    dst->appendFormat(" Config source: %s\n", mConfig.getSource().c_str()); // getConfig not const
+    dst->appendFormat(" Config source: %s\n", mConfig->getSource().c_str());
 
     dst->append("\n");
     mAvailableOutputDevices.dump(dst, String8("Available output"), 1);
     dst->append("\n");
     mAvailableInputDevices.dump(dst, String8("Available input"), 1);
-    mHwModulesAll.dump(dst);
+    mHwModules.dump(dst);
     mOutputs.dump(dst);
     mInputs.dump(dst);
     mEffects.dump(dst, 1);
@@ -3867,6 +4126,15 @@
         dst->appendFormat("   - uid=%d flag_mask=%#x\n", policy.first, policy.second);
     }
 
+    dst->appendFormat(" Preferred mixer audio configuration:\n");
+    for (const auto it : mPreferredMixerAttrInfos) {
+        dst->appendFormat("   - device port id: %d\n", it.first);
+        for (const auto preferredMixerInfoIt : it.second) {
+            dst->appendFormat("     - strategy: %d; ", preferredMixerInfoIt.first);
+            preferredMixerInfoIt.second->dump(dst);
+        }
+    }
+
     dst->appendFormat("\nPolicy Engine dump:\n");
     mEngine->dump(dst);
 }
@@ -4081,8 +4349,11 @@
 
 status_t AudioPolicyManager::getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                                 AudioProfileVector& audioProfilesVector) {
-    AudioDeviceTypeAddrVector devices;
-    status_t status = getDevicesForAttributes(*attr, &devices, false /* forVolume */);
+    if (mEffects.isNonOffloadableEffectEnabled()) {
+        return OK;
+    }
+    DeviceVector devices;
+    status_t status = getDevicesForAttributes(*attr, devices, false /* forVolume */);
     if (status != OK) {
         return status;
     }
@@ -4090,43 +4361,187 @@
     if (devices.empty()) {
         return OK; // no output devices for the attributes
     }
+    return getProfilesForDevices(devices, audioProfilesVector,
+                                 AUDIO_OUTPUT_FLAG_DIRECT /*flags*/, false /*isInput*/);
+}
 
+status_t AudioPolicyManager::getSupportedMixerAttributes(
+        audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> &mixerAttrs) {
+    ALOGV("%s, portId=%d", __func__, portId);
+    sp<DeviceDescriptor> deviceDescriptor = mAvailableOutputDevices.getDeviceFromId(portId);
+    if (deviceDescriptor == nullptr) {
+        ALOGE("%s the requested device is currently unavailable", __func__);
+        return BAD_VALUE;
+    }
+    if (!audio_is_usb_out_device(deviceDescriptor->type())) {
+        ALOGE("%s the requested device(type=%#x) is not usb device", __func__,
+              deviceDescriptor->type());
+        return BAD_VALUE;
+    }
     for (const auto& hwModule : mHwModules) {
-        // the MSD module checks for different conditions
-        if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+        for (const auto& curProfile : hwModule->getOutputProfiles()) {
+            if (curProfile->supportsDevice(deviceDescriptor)) {
+                curProfile->toSupportedMixerAttributes(&mixerAttrs);
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::setPreferredMixerAttributes(
+        const audio_attributes_t *attr,
+        audio_port_handle_t portId,
+        uid_t uid,
+        const audio_mixer_attributes_t *mixerAttributes) {
+    ALOGV("%s, attr=%s, mixerAttributes={format=%#x, channelMask=%#x, samplingRate=%u, "
+          "mixerBehavior=%d}, uid=%d, portId=%u",
+          __func__, toString(*attr).c_str(), mixerAttributes->config.format,
+          mixerAttributes->config.channel_mask, mixerAttributes->config.sample_rate,
+          mixerAttributes->mixer_behavior, uid, portId);
+    if (attr->usage != AUDIO_USAGE_MEDIA) {
+        ALOGE("%s failed, only media is allowed, the given usage is %d", __func__, attr->usage);
+        return BAD_VALUE;
+    }
+    sp<DeviceDescriptor> deviceDescriptor = mAvailableOutputDevices.getDeviceFromId(portId);
+    if (deviceDescriptor == nullptr) {
+        ALOGE("%s the requested device is currently unavailable", __func__);
+        return BAD_VALUE;
+    }
+    if (!audio_is_usb_out_device(deviceDescriptor->type())) {
+        ALOGE("%s(%d), type=%d, is not a usb output device",
+              __func__, portId, deviceDescriptor->type());
+        return BAD_VALUE;
+    }
+
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+    audio_flags_to_audio_output_flags(attr->flags, &flags);
+    flags = (audio_output_flags_t) (flags |
+            audio_output_flags_from_mixer_behavior(mixerAttributes->mixer_behavior));
+    sp<IOProfile> profile = nullptr;
+    DeviceVector devices(deviceDescriptor);
+    for (const auto& hwModule : mHwModules) {
+        for (const auto& curProfile : hwModule->getOutputProfiles()) {
+            if (curProfile->hasDynamicAudioProfile()
+                    && curProfile->isCompatibleProfile(devices,
+                                                       mixerAttributes->config.sample_rate,
+                                                       nullptr /*updatedSamplingRate*/,
+                                                       mixerAttributes->config.format,
+                                                       nullptr /*updatedFormat*/,
+                                                       mixerAttributes->config.channel_mask,
+                                                       nullptr /*updatedChannelMask*/,
+                                                       flags,
+                                                       false /*exactMatchRequiredForInputFlags*/)) {
+                profile = curProfile;
+                break;
+            }
+        }
+    }
+    if (profile == nullptr) {
+        ALOGE("%s, there is no compatible profile found", __func__);
+        return BAD_VALUE;
+    }
+
+    sp<PreferredMixerAttributesInfo> mixerAttrInfo =
+            sp<PreferredMixerAttributesInfo>::make(
+                    uid, portId, profile, flags, *mixerAttributes);
+    const product_strategy_t strategy = mEngine->getProductStrategyForAttributes(*attr);
+    mPreferredMixerAttrInfos[portId][strategy] = mixerAttrInfo;
+
+    // If 1) there is any client from the preferred mixer configuration owner that is currently
+    // active and matches the strategy and 2) current output is on the preferred device and the
+    // mixer configuration doesn't match the preferred one, reopen output with preferred mixer
+    // configuration.
+    std::vector<audio_io_handle_t> outputsToReopen;
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        const auto output = mOutputs.valueAt(i);
+        if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
+            if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
+                output->mUsePreferredMixerAttributes = true;
+            } else {
+                for (const auto &client: output->getActiveClients()) {
+                    if (client->uid() == uid && client->strategy() == strategy) {
+                        client->setIsInvalid();
+                        outputsToReopen.push_back(output->mIoHandle);
+                    }
+                }
+            }
+        }
+    }
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = mixerAttributes->config.sample_rate;
+    config.channel_mask = mixerAttributes->config.channel_mask;
+    config.format = mixerAttributes->config.format;
+    for (const auto output : outputsToReopen) {
+        sp<SwAudioOutputDescriptor> desc =
+                reopenOutput(mOutputs.valueFor(output), &config, flags, __func__);
+        if (desc == nullptr) {
+            ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
             continue;
         }
-        for (const auto& outputProfile : hwModule->getOutputProfiles()) {
-            if (!outputProfile->asAudioPort()->isDirectOutput()) {
-                continue;
-            }
-            // allow only profiles that support all the available and routed devices
-            if (outputProfile->getSupportedDevices().getDevicesFromDeviceTypeAddrVec(devices).size()
-                    != devices.size()) {
-                continue;
-            }
-            audioProfilesVector.addAllValidProfiles(
-                    outputProfile->asAudioPort()->getAudioProfiles());
-        }
+        desc->mUsePreferredMixerAttributes = true;
     }
 
-    // add the direct profiles from MSD if present and has audio patches to all the output(s)
-    const auto& msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
-    if (msdModule != nullptr) {
-        if (msdHasPatchesToAllDevices(devices)) {
-            ALOGV("%s: MSD audio patches set to all output devices.", __func__);
-            for (const auto& outputProfile : msdModule->getOutputProfiles()) {
-                if (!outputProfile->asAudioPort()->isDirectOutput()) {
-                    continue;
-                }
-                audioProfilesVector.addAllValidProfiles(
-                        outputProfile->asAudioPort()->getAudioProfiles());
-            }
-        } else {
-            ALOGV("%s: MSD audio patches NOT set to all output devices.", __func__);
-        }
+    return NO_ERROR;
+}
+
+sp<PreferredMixerAttributesInfo> AudioPolicyManager::getPreferredMixerAttributesInfo(
+        audio_port_handle_t devicePortId, product_strategy_t strategy) {
+    auto it = mPreferredMixerAttrInfos.find(devicePortId);
+    if (it == mPreferredMixerAttrInfos.end()) {
+        return nullptr;
+    }
+    auto mixerAttrInfoIt = it->second.find(strategy);
+    if (mixerAttrInfoIt == it->second.end()) {
+        return nullptr;
+    }
+    return mixerAttrInfoIt->second;
+}
+
+status_t AudioPolicyManager::getPreferredMixerAttributes(
+        const audio_attributes_t *attr,
+        audio_port_handle_t portId,
+        audio_mixer_attributes_t* mixerAttributes) {
+    sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+            portId, mEngine->getProductStrategyForAttributes(*attr));
+    if (info == nullptr) {
+        return NAME_NOT_FOUND;
+    }
+    *mixerAttributes = info->getMixerAttributes();
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::clearPreferredMixerAttributes(const audio_attributes_t *attr,
+                                                           audio_port_handle_t portId,
+                                                           uid_t uid) {
+    const product_strategy_t strategy = mEngine->getProductStrategyForAttributes(*attr);
+    const auto preferredMixerAttrInfo = getPreferredMixerAttributesInfo(portId, strategy);
+    if (preferredMixerAttrInfo == nullptr) {
+        return NAME_NOT_FOUND;
+    }
+    if (preferredMixerAttrInfo->getUid() != uid) {
+        ALOGE("%s, requested uid=%d, owned uid=%d",
+              __func__, uid, preferredMixerAttrInfo->getUid());
+        return PERMISSION_DENIED;
+    }
+    mPreferredMixerAttrInfos[portId].erase(strategy);
+    if (mPreferredMixerAttrInfos[portId].empty()) {
+        mPreferredMixerAttrInfos.erase(portId);
     }
 
+    // Reconfig existing output
+    std::vector<audio_io_handle_t> potentialOutputsToReopen;
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        if (mOutputs.valueAt(i)->mProfile == preferredMixerAttrInfo->getProfile()) {
+            potentialOutputsToReopen.push_back(mOutputs.keyAt(i));
+        }
+    }
+    for (const auto output : potentialOutputsToReopen) {
+        sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+        if (desc->isConfigurationMatched(preferredMixerAttrInfo->getConfigBase(),
+                                         preferredMixerAttrInfo->getFlags())) {
+            reopenOutput(desc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        }
+    }
     return NO_ERROR;
 }
 
@@ -4194,11 +4609,34 @@
             *num_ports += numOutputs;
         }
     }
+
     *generation = curAudioPortGeneration();
     ALOGV("listAudioPorts() got %zu ports needed %d", portsWritten, *num_ports);
     return NO_ERROR;
 }
 
+status_t AudioPolicyManager::listDeclaredDevicePorts(media::AudioPortRole role,
+        std::vector<media::AudioPortFw>* _aidl_return) {
+    auto pushPort = [&](const sp<DeviceDescriptor>& dev) -> status_t {
+        audio_port_v7 port;
+        dev->toAudioPort(&port);
+        auto aidlPort = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
+        _aidl_return->push_back(std::move(aidlPort));
+        return OK;
+    };
+
+    for (const auto& module : mHwModules) {
+        for (const auto& dev : module->getDeclaredDevices()) {
+            if (role == media::AudioPortRole::NONE ||
+                    ((role == media::AudioPortRole::SOURCE)
+                            == audio_is_input_device(dev->type()))) {
+                RETURN_STATUS_IF_ERROR(pushPort(dev));
+            }
+        }
+    }
+    return OK;
+}
+
 status_t AudioPolicyManager::getAudioPort(struct audio_port_v7 *port)
 {
     if (port == nullptr || port->id == AUDIO_PORT_HANDLE_NONE) {
@@ -4519,7 +4957,7 @@
                 // In case of Hw bridge, it is a Work Around. The mixPort used is the one declared
                 // in config XML to reach the sink so that is can be declared as available.
                 audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-                sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+                sp<SwAudioOutputDescriptor> outputDesc;
                 if (!sourceDesc->isInternal()) {
                     // take care of dynamic routing for SwOutput selection,
                     audio_attributes_t attributes = sourceDesc->attributes();
@@ -4527,17 +4965,22 @@
                     audio_attributes_t resultAttr;
                     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
                     config.sample_rate = sourceDesc->config().sample_rate;
-                    config.channel_mask = sourceDesc->config().channel_mask;
+                    audio_channel_mask_t sourceMask = sourceDesc->config().channel_mask;
+                    config.channel_mask =
+                            (audio_channel_mask_get_representation(sourceMask)
+                                == AUDIO_CHANNEL_REPRESENTATION_INDEX) ? sourceMask
+                                    : audio_channel_mask_in_to_out(sourceMask);
                     config.format = sourceDesc->config().format;
                     audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
                     bool isRequestedDeviceForExclusiveUse = false;
                     output_type_t outputType;
                     bool isSpatialized;
+                    bool isBitPerfect;
                     getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
                                         &stream, sourceDesc->uid(), &config, &flags,
                                         &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
-                                        nullptr, &outputType, &isSpatialized);
+                                        nullptr, &outputType, &isSpatialized, &isBitPerfect);
                     if (output == AUDIO_IO_HANDLE_NONE) {
                         ALOGV("%s no output for device %s",
                               __FUNCTION__, sinkDevice->toString().c_str());
@@ -4548,7 +4991,8 @@
                         ALOGE("%s output is duplicated", __func__);
                         return INVALID_OPERATION;
                     }
-                    sourceDesc->setSwOutput(outputDesc);
+                    bool closeOutput = outputDesc->mDirectOpenCount != 0;
+                    sourceDesc->setSwOutput(outputDesc, closeOutput);
                 } else {
                     // Same for "raw patches" aka created from createAudioPatch API
                     SortedVector<audio_io_handle_t> outputs =
@@ -4567,7 +5011,7 @@
                               __func__, sinkDevice->toString().c_str());
                         return INVALID_OPERATION;
                     }
-                    sourceDesc->setSwOutput(outputDesc);
+                    sourceDesc->setSwOutput(outputDesc, /* closeOutput= */ false);
                 }
                 // create a software bridge in PatchPanel if:
                 // - source and sink devices are on different HW modules OR
@@ -4589,7 +5033,8 @@
                         audio_port_config srcMixPortConfig = {};
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
-                        srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+                        srcMixPortConfig.ext.mix.usecase.stream =
+                            (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
                                     mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
                                     AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
@@ -4696,17 +5141,29 @@
                 // releaseOutput has already called closeOutput in case of direct output
                 return NO_ERROR;
             }
-            if (!outputDesc->isActive() && !sourceDesc->useSwBridge()) {
-                resetOutputDevice(outputDesc);
-            } else {
-                // Reuse patch handle if still valid / do not force rerouting if still routed
-                patchHandle = outputDesc->getPatchHandle();
-                setOutputDevices(outputDesc,
-                                 getNewOutputDevices(outputDesc, true /*fromCache*/),
-                                 patchHandle == AUDIO_PATCH_HANDLE_NONE, /*force*/
-                                 0,
-                                 patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
-            }
+            patchHandle = outputDesc->getPatchHandle();
+            // When a Sw bridge is released, the mixer used by this bridge will release its
+            // patch at AudioFlinger side. Hence, the mixer audio patch must be recreated
+            // Reuse patch handle to force audio flinger removing initial mixer patch removal
+            // updating hal patch handle (prevent leaks).
+            // While using a HwBridge, force reconsidering device only if not reusing an existing
+            // output and no more activity on output (will force to close).
+            bool force = sourceDesc->useSwBridge() ||
+                    (sourceDesc->canCloseOutput() && !outputDesc->isActive());
+            // APM pattern is to have always outputs opened / patch realized for reachable devices.
+            // Update device may result to NONE (empty), coupled with force, it releases the patch.
+            // Reconsider device only for cases:
+            //      1 / Active Output
+            //      2 / Inactive Output previously hosting HwBridge
+            //      3 / Inactive Output previously hosting SwBridge that can be closed.
+            bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
+                    sourceDesc->canCloseOutput();
+            setOutputDevices(outputDesc,
+                             updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
+                                            outputDesc->devices(),
+                             force,
+                             0,
+                             patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
         } else {
             return BAD_VALUE;
         }
@@ -4815,6 +5272,7 @@
     auto attributes = mEngine->getAllAttributesForProductStrategy(ps).front();
     DeviceVector devices = mEngine->getOutputDevicesForAttributes(attributes, nullptr, false);
     SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
+    std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
     for (size_t j = 0; j < mOutputs.size(); j++) {
         if (mOutputs.keyAt(j) == ouptutToSkip) {
             continue;
@@ -4827,14 +5285,20 @@
         // invalidate all tracks in this strategy to force re connection.
         // Otherwise select new device on the output mix.
         if (outputs.indexOf(mOutputs.keyAt(j)) < 0) {
-            for (auto stream : mEngine->getStreamTypesForProductStrategy(ps)) {
-                mpClientInterface->invalidateStream(stream);
-            }
+            invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
         } else {
-            setOutputDevices(
-                        outputDesc, getNewOutputDevices(outputDesc, false /*fromCache*/), false);
+            DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
+            if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+                // If the device is using preferred mixer attributes, the output need to reopen
+                // with default configuration when the new selected devices are different from
+                // current routing devices.
+                outputsToReopen.emplace(mOutputs.keyAt(j), newDevices);
+                continue;
+            }
+            setOutputDevices(outputDesc, newDevices, false);
         }
     }
+    reopenOutputsWithDevices(outputsToReopen);
 }
 
 void AudioPolicyManager::clearSessionRoutes(uid_t uid)
@@ -4901,7 +5365,11 @@
     *session = (audio_session_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
     *ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_INPUT);
     audio_attributes_t attr = { .source = AUDIO_SOURCE_HOTWORD };
-    *device = mEngine->getInputDeviceForAttributes(attr)->type();
+    sp<DeviceDescriptor> deviceDesc = mEngine->getInputDeviceForAttributes(attr);
+    if (deviceDesc == nullptr) {
+        return INVALID_OPERATION;
+    }
+    *device = deviceDesc->type();
 
     return mSoundTriggerSessions.acquireSession(*session, *ioHandle);
 }
@@ -5064,10 +5532,10 @@
     size_t formatsWritten = 0;
     size_t formatsMax = *numSurroundFormats;
 
-    *numSurroundFormats = mConfig.getSurroundFormats().size();
+    *numSurroundFormats = mConfig->getSurroundFormats().size();
     audio_policy_forced_cfg_t forceUse = mEngine->getForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND);
-    for (const auto& format: mConfig.getSurroundFormats()) {
+    for (const auto& format: mConfig->getSurroundFormats()) {
         if (formatsWritten < formatsMax) {
             surroundFormats[formatsWritten] = format.first;
             bool formatEnabled = true;
@@ -5120,10 +5588,10 @@
         formatset.insert(encodedFormats.begin(), encodedFormats.end());
         // Filter the formats which are supported by the vendor hardware.
         for (auto it = formatset.begin(); it != formatset.end(); ++it) {
-            if (mConfig.getSurroundFormats().count(*it) != 0) {
+            if (mConfig->getSurroundFormats().count(*it) != 0) {
                 formats.insert(*it);
             } else {
-                for (const auto& pair : mConfig.getSurroundFormats()) {
+                for (const auto& pair : mConfig->getSurroundFormats()) {
                     if (pair.second.count(*it) != 0) {
                         formats.insert(pair.first);
                         break;
@@ -5144,8 +5612,8 @@
 status_t AudioPolicyManager::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled)
 {
     ALOGV("%s() format 0x%X enabled %d", __func__, audioFormat, enabled);
-    const auto& formatIter = mConfig.getSurroundFormats().find(audioFormat);
-    if (formatIter == mConfig.getSurroundFormats().end()) {
+    const auto& formatIter = mConfig->getSurroundFormats().find(audioFormat);
+    if (formatIter == mConfig->getSurroundFormats().end()) {
         ALOGW("%s() format 0x%X is not a known surround format", __func__, audioFormat);
         return BAD_VALUE;
     }
@@ -5283,9 +5751,24 @@
     return false;
 }
 
+bool AudioPolicyManager::isHotwordStreamSupported(bool lookbackAudio)
+{
+    const auto mask = AUDIO_INPUT_FLAG_HOTWORD_TAP |
+        (lookbackAudio ? AUDIO_INPUT_FLAG_HW_LOOKBACK : 0);
+    for (const auto& hwModule : mHwModules) {
+        const InputProfileCollection &inputProfiles = hwModule->getInputProfiles();
+        for (const auto &inputProfile : inputProfiles) {
+            if ((inputProfile->getFlags() & mask) == mask) {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
 bool AudioPolicyManager::isCallScreenModeSupported()
 {
-    return getConfig().isCallScreenModeSupported();
+    return mConfig->isCallScreenModeSupported();
 }
 
 
@@ -5353,10 +5836,6 @@
         }
     }
 
-    // The caller can have the devices criteria ignored by passing and empty vector, and
-    // getSpatializerOutputProfile() will ignore the devices when looking for a match.
-    // Otherwise an output profile supporting a spatializer effect that can be routed
-    // to the specified devices must exist.
     sp<IOProfile> profile =
             getSpatializerOutputProfile(config, devices);
     if (profile == nullptr) {
@@ -5393,9 +5872,7 @@
         }
     }
 
-    for (audio_stream_type_t stream : streamsToInvalidate) {
-        mpClientInterface->invalidateStream(stream);
-    }
+    invalidateStreams(StreamTypeVector(streamsToInvalidate.begin(), streamsToInvalidate.end()));
 }
 
 
@@ -5524,26 +6001,16 @@
     return mAudioPortGeneration++;
 }
 
-static status_t deserializeAudioPolicyXmlConfig(AudioPolicyConfig &config) {
-    if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
-            !audioPolicyXmlConfigFile.empty()) {
-        status_t ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile.c_str(), &config);
-        if (ret == NO_ERROR) {
-            config.setSource(audioPolicyXmlConfigFile);
-        }
-        return ret;
-    }
-    return BAD_VALUE;
-}
-
-AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface,
-                                       bool /*forTesting*/)
+AudioPolicyManager::AudioPolicyManager(const sp<const AudioPolicyConfig>& config,
+                                       EngineInstance&& engine,
+                                       AudioPolicyClientInterface *clientInterface)
     :
     mUidCached(AID_AUDIOSERVER), // no need to call getuid(), there's only one of us running.
+    mConfig(config),
+    mEngine(std::move(engine)),
     mpClientInterface(clientInterface),
     mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
     mA2dpSuspended(false),
-    mConfig(mHwModulesAll, mOutputDevicesAll, mInputDevicesAll, mDefaultOutputDevice),
     mAudioPortGeneration(1),
     mBeaconMuteRefCount(0),
     mBeaconPlayingRefCount(0),
@@ -5554,32 +6021,9 @@
 {
 }
 
-AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface)
-        : AudioPolicyManager(clientInterface, false /*forTesting*/)
-{
-    loadConfig();
-}
-
-void AudioPolicyManager::loadConfig() {
-    if (deserializeAudioPolicyXmlConfig(getConfig()) != NO_ERROR) {
-        ALOGE("could not load audio policy configuration file, setting defaults");
-        getConfig().setDefault();
-    }
-}
-
 status_t AudioPolicyManager::initialize() {
-    {
-        auto engLib = EngineLibrary::load(
-                        "libaudiopolicyengine" + getConfig().getEngineLibraryNameSuffix() + ".so");
-        if (!engLib) {
-            ALOGE("%s: Failed to load the engine library", __FUNCTION__);
-            return NO_INIT;
-        }
-        mEngine = engLib->createEngine();
-        if (mEngine == nullptr) {
-            ALOGE("%s: Failed to instantiate the APM engine", __FUNCTION__);
-            return NO_INIT;
-        }
+    if (mEngine == nullptr) {
+        return NO_INIT;
     }
     mEngine->setObserver(this);
     status_t status = mEngine->initCheck();
@@ -5588,31 +6032,24 @@
         return status;
     }
 
-    mEngine->updateDeviceSelectionCache();
+    // The actual device selection cache will be updated when calling `updateDevicesAndOutputs`
+    // at the end of this function.
+    mEngine->initializeDeviceSelectionCache();
     mCommunnicationStrategy = mEngine->getProductStrategyForAttributes(
         mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL));
 
-    // after parsing the config, mOutputDevicesAll and mInputDevicesAll contain all known devices;
+    // after parsing the config, mConfig contain all known devices;
     // open all output streams needed to access attached devices
     onNewAudioModulesAvailableInt(nullptr /*newDevices*/);
 
     // make sure default device is reachable
-    if (mDefaultOutputDevice == 0 || !mAvailableOutputDevices.contains(mDefaultOutputDevice)) {
-        ALOGE_IF(mDefaultOutputDevice != 0, "Default device %s is unreachable",
-                 mDefaultOutputDevice->toString().c_str());
+    if (const auto defaultOutputDevice = mConfig->getDefaultOutputDevice();
+            defaultOutputDevice == nullptr ||
+            !mAvailableOutputDevices.contains(defaultOutputDevice)) {
+        ALOGE_IF(defaultOutputDevice != nullptr, "Default device %s is unreachable",
+                 defaultOutputDevice->toString().c_str());
         status = NO_INIT;
     }
-    // If microphones address is empty, set it according to device type
-    for (size_t i = 0; i < mAvailableInputDevices.size(); i++) {
-        if (mAvailableInputDevices[i]->address().empty()) {
-            if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
-                mAvailableInputDevices[i]->setAddress(AUDIO_BOTTOM_MICROPHONE_ADDRESS);
-            } else if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_BACK_MIC) {
-                mAvailableInputDevices[i]->setAddress(AUDIO_BACK_MICROPHONE_ADDRESS);
-            }
-        }
-    }
-
     ALOGW_IF(mPrimaryOutput == nullptr, "The policy configuration does not declare a primary output");
 
     // Silence ALOGV statements
@@ -5635,8 +6072,8 @@
    mOutputs.clear();
    mInputs.clear();
    mHwModules.clear();
-   mHwModulesAll.clear();
    mManualSurroundFormats.clear();
+   mConfig.clear();
 }
 
 status_t AudioPolicyManager::initCheck()
@@ -5658,14 +6095,18 @@
 
 void AudioPolicyManager::onNewAudioModulesAvailableInt(DeviceVector *newDevices)
 {
-    for (const auto& hwModule : mHwModulesAll) {
+    for (const auto& hwModule : mConfig->getHwModules()) {
         if (std::find(mHwModules.begin(), mHwModules.end(), hwModule) != mHwModules.end()) {
             continue;
         }
-        hwModule->setHandle(mpClientInterface->loadHwModule(hwModule->getName()));
         if (hwModule->getHandle() == AUDIO_MODULE_HANDLE_NONE) {
-            ALOGW("could not open HW module %s", hwModule->getName());
-            continue;
+            if (audio_module_handle_t handle = mpClientInterface->loadHwModule(hwModule->getName());
+                    handle != AUDIO_MODULE_HANDLE_NONE) {
+                hwModule->setHandle(handle);
+            } else {
+                ALOGW("could not load HW module %s", hwModule->getName());
+                continue;
+            }
         }
         mHwModules.push_back(hwModule);
         // open all output streams needed to access attached devices.
@@ -5687,10 +6128,10 @@
             }
 
             const DeviceVector &supportedDevices = outProfile->getSupportedDevices();
-            DeviceVector availProfileDevices = supportedDevices.filter(mOutputDevicesAll);
+            DeviceVector availProfileDevices = supportedDevices.filter(mConfig->getOutputDevices());
             sp<DeviceDescriptor> supportedDevice = 0;
-            if (supportedDevices.contains(mDefaultOutputDevice)) {
-                supportedDevice = mDefaultOutputDevice;
+            if (supportedDevices.contains(mConfig->getDefaultOutputDevice())) {
+                supportedDevice = mConfig->getDefaultOutputDevice();
             } else {
                 // choose first device present in profile's SupportedDevices also part of
                 // mAvailableOutputDevices.
@@ -5699,7 +6140,7 @@
                 }
                 supportedDevice = availProfileDevices.itemAt(0);
             }
-            if (!mOutputDevicesAll.contains(supportedDevice)) {
+            if (!mConfig->getOutputDevices().contains(supportedDevice)) {
                 continue;
             }
             sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
@@ -5754,7 +6195,7 @@
             // chose first device present in profile's SupportedDevices also part of
             // available input devices
             const DeviceVector &supportedDevices = inProfile->getSupportedDevices();
-            DeviceVector availProfileDevices = supportedDevices.filter(mInputDevicesAll);
+            DeviceVector availProfileDevices = supportedDevices.filter(mConfig->getInputDevices());
             if (availProfileDevices.isEmpty()) {
                 ALOGV("%s: Input device list is empty! for profile %s",
                     __func__, inProfile->getTagName().c_str());
@@ -6005,6 +6446,14 @@
     }
 
     if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+        // first call getAudioPort to get the supported attributes from the HAL
+        struct audio_port_v7 port = {};
+        device->toAudioPort(&port);
+        status_t status = mpClientInterface->getAudioPort(&port);
+        if (status == NO_ERROR) {
+            device->importAudioPort(port);
+        }
+
         // look for input profiles that can be routed to this device
         SortedVector< sp<IOProfile> > profiles;
         for (const auto& hwModule : mHwModules) {
@@ -6056,11 +6505,7 @@
 
             desc = new AudioInputDescriptor(profile, mpClientInterface);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status_t status = desc->open(nullptr,
-                                         device,
-                                         AUDIO_SOURCE_MIC,
-                                         AUDIO_INPUT_FLAG_NONE,
-                                         &input);
+            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC, AUDIO_INPUT_FLAG_NONE, &input);
 
             if (status == NO_ERROR) {
                 const String8& address = String8(device->address().c_str());
@@ -6317,10 +6762,11 @@
     SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevices(newDevices, mOutputs);
 
     uint32_t maxLatency = 0;
-    bool invalidate = false;
+    bool unneededUsePrimaryOutputFromPolicyMixes = false;
+    std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
     // take into account dynamic audio policies related changes: if a client is now associated
     // to a different policy mix than at creation time, invalidate corresponding stream
-    for (size_t i = 0; i < mPreviousOutputs.size() && !invalidate; i++) {
+    for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
         if (desc->isDuplicated()) {
             continue;
@@ -6330,22 +6776,23 @@
                 continue;
             }
             sp<AudioPolicyMix> primaryMix;
-            status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->uid(),
-                    client->flags(), primaryMix, nullptr);
+            status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
+                    client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
+                    nullptr /* requestedDevice */, primaryMix, nullptr /* secondaryMixes */,
+                    unneededUsePrimaryOutputFromPolicyMixes);
             if (status != OK) {
                 continue;
             }
             if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
-                invalidate = true;
-                if (desc->isStrategyActive(psId)) {
+                if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
                     maxLatency = desc->latency();
                 }
-                break;
+                invalidatedOutputs.push_back(desc);
             }
         }
     }
 
-    if (srcOutputs != dstOutputs || invalidate) {
+    if (srcOutputs != dstOutputs || !invalidatedOutputs.empty()) {
         // get maximum latency of all source outputs to determine the minimum mute time guaranteeing
         // audio from invalidated tracks will be rendered when unmuting
         for (audio_io_handle_t srcOut : srcOutputs) {
@@ -6356,8 +6803,7 @@
                 maxLatency = desc->latency();
             }
 
-            if (invalidate) continue;
-
+            bool invalidate = false;
             for (auto client : desc->clientsList(false /*activeOnly*/)) {
                 if (desc->isDuplicated() || !desc->mProfile->isDirectOutput()) {
                     // a client on a non direct outputs has necessarily a linear PCM format
@@ -6385,21 +6831,14 @@
                     }
                 }
             }
-        }
-
-        ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
-              "%s: strategy %d, moving from output %s to output %s", __func__, psId,
-              std::to_string(srcOutputs[0]).c_str(),
-              std::to_string(dstOutputs[0]).c_str());
-        // mute strategy while moving tracks from one output to another
-        for (audio_io_handle_t srcOut : srcOutputs) {
-            sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
-            if (desc == nullptr) continue;
-
-            if (desc->isStrategyActive(psId)) {
-                setStrategyMute(psId, true, desc);
-                setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
-                                newDevices.types());
+            // mute strategy while moving tracks from one output to another
+            if (invalidate) {
+                invalidatedOutputs.push_back(desc);
+                if (desc->isStrategyActive(psId)) {
+                    setStrategyMute(psId, true, desc);
+                    setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
+                                    newDevices.types());
+                }
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
             if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
@@ -6407,19 +6846,19 @@
             }
         }
 
+        ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
+              "%s: strategy %d, moving from output %s to output %s", __func__, psId,
+              std::to_string(srcOutputs[0]).c_str(),
+              std::to_string(dstOutputs[0]).c_str());
+
         // Move effects associated to this stream from previous output to new output
         if (followsSameRouting(attr, attributes_initializer(AUDIO_USAGE_MEDIA))) {
             selectOutputForMusicEffects();
         }
         // Move tracks associated to this stream (and linked) from previous output to new output
-        if (invalidate) {
-            for (auto stream :  mEngine->getStreamTypesForProductStrategy(psId)) {
-                mpClientInterface->invalidateStream(stream);
-            }
-            for (audio_io_handle_t srcOut : srcOutputs) {
-                sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
-                if (desc == nullptr) continue;
-
+        if (!invalidatedOutputs.empty()) {
+            invalidateStreams(mEngine->getStreamTypesForProductStrategy(psId));
+            for (sp<SwAudioOutputDescriptor> desc : invalidatedOutputs) {
                 desc->setTracksInvalidatedStatusByStrategy(psId);
             }
         }
@@ -6436,15 +6875,18 @@
 }
 
 void AudioPolicyManager::checkSecondaryOutputs() {
-    std::set<audio_stream_type_t> streamsToInvalidate;
+    PortHandleVector clientsToInvalidate;
     TrackSecondaryOutputsMap trackSecondaryOutputs;
+    bool unneededUsePrimaryOutputFromPolicyMixes = false;
     for (size_t i = 0; i < mOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& outputDescriptor = mOutputs[i];
         for (const sp<TrackClientDescriptor>& client : outputDescriptor->getClientIterable()) {
             sp<AudioPolicyMix> primaryMix;
             std::vector<sp<AudioPolicyMix>> secondaryMixes;
-            status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->uid(),
-                    client->flags(), primaryMix, &secondaryMixes);
+            status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
+                    client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
+                    nullptr /* requestedDevice */, primaryMix, &secondaryMixes,
+                    unneededUsePrimaryOutputFromPolicyMixes);
             std::vector<sp<SwAudioOutputDescriptor>> secondaryDescs;
             for (auto &secondaryMix : secondaryMixes) {
                 sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
@@ -6454,8 +6896,11 @@
                 }
             }
 
-            if (status != OK) {
-                streamsToInvalidate.insert(client->stream());
+            if (status != OK &&
+                (client->flags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == AUDIO_OUTPUT_FLAG_NONE) {
+                // When it failed to query secondary output, only invalidate the client that is not
+                // MMAP. The reason is that MMAP stream will not support secondary output.
+                clientsToInvalidate.push_back(client->portId());
             } else if (!std::equal(
                     client->getSecondaryOutputs().begin(),
                     client->getSecondaryOutputs().end(),
@@ -6463,7 +6908,7 @@
                 if (!audio_is_linear_pcm(client->config().format)) {
                     // If the format is not PCM, the tracks should be invalidated to get correct
                     // behavior when the secondary output is changed.
-                    streamsToInvalidate.insert(client->stream());
+                    clientsToInvalidate.push_back(client->portId());
                 } else {
                     std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
                     std::vector<audio_io_handle_t> secondaryOutputIds;
@@ -6480,9 +6925,9 @@
     if (!trackSecondaryOutputs.empty()) {
         mpClientInterface->updateSecondaryOutputs(trackSecondaryOutputs);
     }
-    for (audio_stream_type_t stream : streamsToInvalidate) {
-        ALOGD("%s Invalidate stream %d due to fail getting output for attr", __func__, stream);
-        mpClientInterface->invalidateStream(stream);
+    if (!clientsToInvalidate.empty()) {
+        ALOGD("%s Invalidate clients due to fail getting output for attr", __func__);
+        mpClientInterface->invalidateTracks(clientsToInvalidate);
     }
 }
 
@@ -6652,20 +7097,23 @@
     // a null sp<>, causing the patch on the input stream to be released.
     audio_attributes_t attributes;
     uid_t uid;
+    audio_session_t session;
     sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
     if (topClient != nullptr) {
         attributes = topClient->attributes();
         uid = topClient->uid();
+        session = topClient->session();
     } else {
         attributes = { .source = AUDIO_SOURCE_DEFAULT };
         uid = 0;
+        session = AUDIO_SESSION_NONE;
     }
 
     if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
         attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     }
     if (attributes.source != AUDIO_SOURCE_DEFAULT) {
-        device = mEngine->getInputDeviceForAttributes(attributes, uid);
+        device = mEngine->getInputDeviceForAttributes(attributes, uid, session);
     }
 
     return device;
@@ -6676,70 +7124,15 @@
     return (stream1 == stream2);
 }
 
-// TODO - consider MSD routes b/214971780
 status_t AudioPolicyManager::getDevicesForAttributes(
         const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices, bool forVolume) {
     if (devices == nullptr) {
         return BAD_VALUE;
     }
 
-    // Devices are determined in the following precedence:
-    //
-    // 1) Devices associated with a dynamic policy matching the attributes.  This is often
-    //    a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.
-    //
-    // If no such dynamic policy then
-    // 2) Devices containing an active client using setPreferredDevice
-    //    with same strategy as the attributes.
-    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
-    //
-    // If no corresponding active client with setPreferredDevice then
-    // 3) Devices associated with the strategy determined by the attributes
-    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
-    //
-    // See related getOutputForAttrInt().
-
-    // check dynamic policies but only for primary descriptors (secondary not used for audible
-    // audio routing, only used for duplication for playback capture)
-    sp<AudioPolicyMix> policyMix;
-    status_t status = mPolicyMixes.getOutputForAttr(attr, 0 /*uid unknown here*/,
-            AUDIO_OUTPUT_FLAG_NONE, policyMix, nullptr /* secondaryMixes */);
-    if (status != OK) {
-        return status;
-    }
-
     DeviceVector curDevices;
-    if (policyMix != nullptr && policyMix->getOutput() != nullptr &&
-            // For volume control, skip LOOPBACK mixes which use AUDIO_DEVICE_OUT_REMOTE_SUBMIX
-            // as they are unaffected by device/stream volume
-            // (per SwAudioOutputDescriptor::isFixedVolume()).
-            (!forVolume || policyMix->mDeviceType != AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
-            ) {
-        sp<DeviceDescriptor> deviceDesc = mAvailableOutputDevices.getDevice(
-                policyMix->mDeviceType, policyMix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
-        curDevices.add(deviceDesc);
-    } else {
-        // The default Engine::getOutputDevicesForAttributes() uses findPreferredDevice()
-        // which selects setPreferredDevice if active.  This means forVolume call
-        // will take an active setPreferredDevice, if such exists.
-
-        curDevices = mEngine->getOutputDevicesForAttributes(
-                attr, nullptr /* preferredDevice */, false /* fromCache */);
-    }
-
-    if (forVolume) {
-        // We alias the device AUDIO_DEVICE_OUT_SPEAKER_SAFE to AUDIO_DEVICE_OUT_SPEAKER
-        // for single volume control in AudioService (such relationship should exist if
-        // SPEAKER_SAFE is present).
-        //
-        // (This is unrelated to a different device grouping as Volume::getDeviceCategory)
-        DeviceVector speakerSafeDevices =
-                curDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
-        if (!speakerSafeDevices.isEmpty()) {
-            curDevices.merge(
-                    mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
-            curDevices.remove(speakerSafeDevices);
-        }
+    if (status_t status = getDevicesForAttributes(attr, curDevices, forVolume); status != OK) {
+        return status;
     }
     for (const auto& device : curDevices) {
         devices->push_back(device->getDeviceTypeAddr());
@@ -6924,6 +7317,7 @@
                                               audio_patch_handle_t *patchHandle,
                                               bool requiresMuteCheck, bool requiresVolumeCheck)
 {
+    // TODO(b/262404095): Consider if the output need to be reopened.
     ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
     uint32_t muteWaitMs;
 
@@ -7094,51 +7488,71 @@
 {
     // Choose an input profile based on the requested capture parameters: select the first available
     // profile supporting all requested parameters.
+    // The flags can be ignored if it doesn't contain a much match flag.
     //
     // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
     // the best matching profile, not the first one.
 
-    sp<IOProfile> firstInexact;
-    uint32_t updatedSamplingRate = 0;
-    audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
-    audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
-    for (const auto& hwModule : mHwModules) {
-        for (const auto& profile : hwModule->getInputProfiles()) {
-            // profile->log();
-            //updatedFormat = format;
-            if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
-                                             &samplingRate  /*updatedSamplingRate*/,
-                                             format,
-                                             &format,       /*updatedFormat*/
-                                             channelMask,
-                                             &channelMask   /*updatedChannelMask*/,
-                                             // FIXME ugly cast
-                                             (audio_output_flags_t) flags,
-                                             true /*exactMatchRequiredForInputFlags*/)) {
-                return profile;
-            }
-            if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
-                                             samplingRate,
-                                             &updatedSamplingRate,
-                                             format,
-                                             &updatedFormat,
-                                             channelMask,
-                                             &updatedChannelMask,
-                                             // FIXME ugly cast
-                                             (audio_output_flags_t) flags,
-                                             false /*exactMatchRequiredForInputFlags*/)) {
-                firstInexact = profile;
-            }
+    using underlying_input_flag_t = std::underlying_type_t<audio_input_flags_t>;
+    const underlying_input_flag_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ |
+                         AUDIO_INPUT_FLAG_HOTWORD_TAP | AUDIO_INPUT_FLAG_HW_LOOKBACK;
 
+    const underlying_input_flag_t oriFlags = flags;
+
+    for (;;) {
+        sp<IOProfile> firstInexact = nullptr;
+        uint32_t updatedSamplingRate = 0;
+        audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
+        audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
+        for (const auto& hwModule : mHwModules) {
+            for (const auto& profile : hwModule->getInputProfiles()) {
+                // profile->log();
+                //updatedFormat = format;
+                if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
+                                                 &samplingRate  /*updatedSamplingRate*/,
+                                                 format,
+                                                 &format,       /*updatedFormat*/
+                                                 channelMask,
+                                                 &channelMask   /*updatedChannelMask*/,
+                                                 // FIXME ugly cast
+                                                 (audio_output_flags_t) flags,
+                                                 true /*exactMatchRequiredForInputFlags*/)) {
+                    return profile;
+                }
+                if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
+                                                 samplingRate,
+                                                 &updatedSamplingRate,
+                                                 format,
+                                                 &updatedFormat,
+                                                 channelMask,
+                                                 &updatedChannelMask,
+                                                 // FIXME ugly cast
+                                                 (audio_output_flags_t) flags,
+                                                 false /*exactMatchRequiredForInputFlags*/)) {
+                    firstInexact = profile;
+                }
+            }
+        }
+
+        if (firstInexact != nullptr) {
+            samplingRate = updatedSamplingRate;
+            format = updatedFormat;
+            channelMask = updatedChannelMask;
+            return firstInexact;
+        } else if (flags & AUDIO_INPUT_FLAG_RAW) {
+            flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_RAW); // retry
+        } else if ((flags & mustMatchFlag) == AUDIO_INPUT_FLAG_NONE &&
+                flags != AUDIO_INPUT_FLAG_NONE && audio_is_linear_pcm(format)) {
+            flags = AUDIO_INPUT_FLAG_NONE;
+        } else { // fail
+            ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
+                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
+                  samplingRate, format, channelMask, oriFlags);
+            break;
         }
     }
-    if (firstInexact != nullptr) {
-        samplingRate = updatedSamplingRate;
-        format = updatedFormat;
-        channelMask = updatedChannelMask;
-        return firstInexact;
-    }
-    return NULL;
+
+    return nullptr;
 }
 
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
@@ -7312,7 +7726,8 @@
     // if sco and call follow same curves, bypass forceUseForComm
     if ((callVolSrc != btScoVolSrc) &&
             ((isVoiceVolSrc && isScoRequested) ||
-             (isBtScoVolSrc && !(isScoRequested || isHAUsed)))) {
+             (isBtScoVolSrc && !(isScoRequested || isHAUsed))) &&
+            !isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
         ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
              volumeSource, isScoRequested ? " " : " not ");
         // Do not return an error here as AudioService will always set both voice call
@@ -7480,14 +7895,18 @@
     return is_state_in_call(state);
 }
 
-bool AudioPolicyManager::isCallAudioAccessible()
-{
+bool AudioPolicyManager::isCallAudioAccessible() const {
     audio_mode_t mode = mEngine->getPhoneState();
     return (mode == AUDIO_MODE_IN_CALL)
             || (mode == AUDIO_MODE_CALL_SCREEN)
             || (mode == AUDIO_MODE_CALL_REDIRECT);
 }
 
+bool AudioPolicyManager::isInCallOrScreening() const {
+    audio_mode_t mode = mEngine->getPhoneState();
+    return isStateInCall(mode) || mode == AUDIO_MODE_CALL_SCREEN;
+}
+
 void AudioPolicyManager::cleanUpForDevice(const sp<DeviceDescriptor>& deviceDesc)
 {
     for (ssize_t i = (ssize_t)mAudioSources.size() - 1; i >= 0; i--)  {
@@ -7536,7 +7955,7 @@
     std::unordered_set<audio_format_t> enforcedSurround(
             devDesc->encodedFormats().begin(), devDesc->encodedFormats().end());
     std::unordered_set<audio_format_t> allSurround;  // A flat set of all known surround formats
-    for (const auto& pair : mConfig.getSurroundFormats()) {
+    for (const auto& pair : mConfig->getSurroundFormats()) {
         allSurround.insert(pair.first);
         for (const auto& subformat : pair.second) allSurround.insert(subformat);
     }
@@ -7633,12 +8052,17 @@
                 ioHandle, String8(AudioParameter::keyStreamSupportedFormats));
         ALOGV("%s: supported formats %d, %s", __FUNCTION__, ioHandle, reply.string());
         AudioParameter repliedParameters(reply);
+        FormatVector formats;
         if (repliedParameters.get(
-                String8(AudioParameter::keyStreamSupportedFormats), reply) != NO_ERROR) {
-            ALOGE("%s: failed to retrieve format, bailing out", __FUNCTION__);
+                String8(AudioParameter::keyStreamSupportedFormats), reply) == NO_ERROR) {
+            formats = formatsFromString(reply.string());
+        } else if (devDesc->hasValidAudioProfile()) {
+            ALOGD("%s: using the device profiles", __func__);
+            formats = devDesc->getAudioProfiles().getSupportedFormats();
+        } else {
+            ALOGE("%s: failed to retrieve format, bailing out", __func__);
             return;
         }
-        FormatVector formats = formatsFromString(reply.string());
         mReportedFormatsMap[devDesc] = formats;
         if (device == AUDIO_DEVICE_OUT_HDMI
                 || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
@@ -7648,7 +8072,7 @@
     }
 
     for (audio_format_t format : profiles.getSupportedFormats()) {
-        ChannelMaskSet channelMasks;
+        std::optional<ChannelMaskSet> channelMasks;
         SampleRateSet samplingRates;
         AudioParameter requestedParameters;
         requestedParameters.addInt(String8(AudioParameter::keyFormat), format);
@@ -7663,6 +8087,8 @@
             if (repliedParameters.get(
                     String8(AudioParameter::keyStreamSupportedSamplingRates), reply) == NO_ERROR) {
                 samplingRates = samplingRatesFromString(reply.string());
+            } else {
+                samplingRates = devDesc->getAudioProfiles().getSampleRatesFor(format);
             }
         }
         if (profiles.hasDynamicChannelsFor(format)) {
@@ -7674,14 +8100,17 @@
             if (repliedParameters.get(
                     String8(AudioParameter::keyStreamSupportedChannels), reply) == NO_ERROR) {
                 channelMasks = channelMasksFromString(reply.string());
-                if (device == AUDIO_DEVICE_OUT_HDMI
-                        || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD)) {
-                    modifySurroundChannelMasks(&channelMasks);
-                }
+            } else {
+                channelMasks = devDesc->getAudioProfiles().getChannelMasksFor(format);
+            }
+            if (channelMasks.has_value() && (device == AUDIO_DEVICE_OUT_HDMI
+                    || isDeviceOfModule(devDesc, AUDIO_HARDWARE_MODULE_ID_MSD))) {
+                modifySurroundChannelMasks(&channelMasks.value());
             }
         }
         addDynamicAudioProfileAndSort(
-                profiles, new AudioProfile(format, channelMasks, samplingRates));
+                profiles, new AudioProfile(
+                        format, channelMasks.value_or(ChannelMaskSet()), samplingRates));
     }
 }
 
@@ -7780,19 +8209,23 @@
 
 sp<SwAudioOutputDescriptor> AudioPolicyManager::openOutputWithProfileAndDevice(
         const sp<IOProfile>& profile, const DeviceVector& devices,
-        const audio_config_base_t *mixerConfig)
+        const audio_config_base_t *mixerConfig, const audio_config_t *halConfig,
+        audio_output_flags_t flags)
 {
     for (const auto& device : devices) {
         // TODO: This should be checking if the profile supports the device combo.
         if (!profile->supportsDevice(device)) {
+            ALOGE("%s profile(%s) doesn't support device %#x", __func__, profile->getName().c_str(),
+                  device->type());
             return nullptr;
         }
     }
     sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-    status_t status = desc->open(nullptr /* halConfig */, mixerConfig, devices,
-            AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+    status_t status = desc->open(halConfig, mixerConfig, devices,
+            AUDIO_STREAM_DEFAULT, flags, &output);
     if (status != NO_ERROR) {
+        ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
     }
 
@@ -7810,7 +8243,9 @@
         ALOGW("%s() missing param", __func__);
         desc->close();
         return nullptr;
-    } else if (profile->hasDynamicAudioProfile()) {
+    } else if (profile->hasDynamicAudioProfile() && halConfig == nullptr) {
+        // Reopen the output with the best audio profile picked by APM when the profile supports
+        // dynamic audio profile and the hal config is not specified.
         desc->close();
         output = AUDIO_IO_HANDLE_NONE;
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -7820,8 +8255,7 @@
         config.offload_info.channel_mask = config.channel_mask;
         config.offload_info.format = config.format;
 
-        status = desc->open(&config, mixerConfig, devices,
-                            AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
         if (status != NO_ERROR) {
             return nullptr;
         }
@@ -7829,6 +8263,9 @@
 
     addOutput(output, desc);
 
+    sp<DeviceDescriptor> speaker = mAvailableOutputDevices.getDevice(
+            AUDIO_DEVICE_OUT_SPEAKER, String8(""), AUDIO_FORMAT_DEFAULT);
+
     if (audio_is_remote_submix_device(deviceType) && address != "0") {
         sp<AudioPolicyMix> policyMix;
         if (mPolicyMixes.getAudioPolicyMix(deviceType, address, policyMix) == NO_ERROR) {
@@ -7839,13 +8276,13 @@
                     address.string());
         }
 
-    } else if (hasPrimaryOutput() && profile->getModule()
-                != mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)
+    } else if (hasPrimaryOutput() && speaker != nullptr
+            && mPrimaryOutput->supportsDevice(speaker) && !desc->supportsDevice(speaker)
             && ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0)) {
         // no duplicated output for:
         // - direct outputs
         // - outputs used by dynamic policy mixes
-        // - outputs opened on the primary HW module
+        // - outputs that supports SPEAKER while the primary output does not.
         audio_io_handle_t duplicatedOutput = AUDIO_IO_HANDLE_NONE;
 
         //TODO: configure audio effect output stage here
@@ -7873,4 +8310,153 @@
     return desc;
 }
 
+status_t AudioPolicyManager::getDevicesForAttributes(
+        const audio_attributes_t &attr, DeviceVector &devices, bool forVolume) {
+    // Devices are determined in the following precedence:
+    //
+    // 1) Devices associated with a dynamic policy matching the attributes.  This is often
+    //    a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.
+    //
+    // If no such dynamic policy then
+    // 2) Devices containing an active client using setPreferredDevice
+    //    with same strategy as the attributes.
+    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
+    //
+    // If no corresponding active client with setPreferredDevice then
+    // 3) Devices associated with the strategy determined by the attributes
+    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
+    //
+    // See related getOutputForAttrInt().
+
+    // check dynamic policies but only for primary descriptors (secondary not used for audible
+    // audio routing, only used for duplication for playback capture)
+    sp<AudioPolicyMix> policyMix;
+    bool unneededUsePrimaryOutputFromPolicyMixes = false;
+    status_t status = mPolicyMixes.getOutputForAttr(attr, AUDIO_CONFIG_BASE_INITIALIZER,
+            0 /*uid unknown here*/, AUDIO_SESSION_NONE, AUDIO_OUTPUT_FLAG_NONE,
+            mAvailableOutputDevices, nullptr /* requestedDevice */, policyMix,
+            nullptr /* secondaryMixes */, unneededUsePrimaryOutputFromPolicyMixes);
+    if (status != OK) {
+        return status;
+    }
+
+    if (policyMix != nullptr && policyMix->getOutput() != nullptr &&
+            // For volume control, skip LOOPBACK mixes which use AUDIO_DEVICE_OUT_REMOTE_SUBMIX
+            // as they are unaffected by device/stream volume
+            // (per SwAudioOutputDescriptor::isFixedVolume()).
+            (!forVolume || policyMix->mDeviceType != AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
+            ) {
+        sp<DeviceDescriptor> deviceDesc = mAvailableOutputDevices.getDevice(
+                policyMix->mDeviceType, policyMix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
+        devices.add(deviceDesc);
+    } else {
+        // The default Engine::getOutputDevicesForAttributes() uses findPreferredDevice()
+        // which selects setPreferredDevice if active.  This means forVolume call
+        // will take an active setPreferredDevice, if such exists.
+
+        devices = mEngine->getOutputDevicesForAttributes(
+                attr, nullptr /* preferredDevice */, false /* fromCache */);
+    }
+
+    if (forVolume) {
+        // We alias the device AUDIO_DEVICE_OUT_SPEAKER_SAFE to AUDIO_DEVICE_OUT_SPEAKER
+        // for single volume control in AudioService (such relationship should exist if
+        // SPEAKER_SAFE is present).
+        //
+        // (This is unrelated to a different device grouping as Volume::getDeviceCategory)
+        DeviceVector speakerSafeDevices =
+                devices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
+        if (!speakerSafeDevices.isEmpty()) {
+            devices.merge(mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
+            devices.remove(speakerSafeDevices);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::getProfilesForDevices(const DeviceVector& devices,
+                                                   AudioProfileVector& audioProfiles,
+                                                   uint32_t flags,
+                                                   bool isInput) {
+    for (const auto& hwModule : mHwModules) {
+        // the MSD module checks for different conditions
+        if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+            continue;
+        }
+        IOProfileCollection ioProfiles = isInput ? hwModule->getInputProfiles()
+                                                 : hwModule->getOutputProfiles();
+        for (const auto& profile : ioProfiles) {
+            if (!profile->areAllDevicesSupported(devices) ||
+                    !profile->isCompatibleProfileForFlags(
+                            flags, false /*exactMatchRequiredForInputFlags*/)) {
+                continue;
+            }
+            audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
+        }
+    }
+
+    if (!isInput) {
+        // add the direct profiles from MSD if present and has audio patches to all the output(s)
+        const auto &msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+        if (msdModule != nullptr) {
+            if (msdHasPatchesToAllDevices(devices.toTypeAddrVector())) {
+                ALOGV("%s: MSD audio patches set to all output devices.", __func__);
+                for (const auto &profile: msdModule->getOutputProfiles()) {
+                    if (!profile->asAudioPort()->isDirectOutput()) {
+                        continue;
+                    }
+                    audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
+                }
+            } else {
+                ALOGV("%s: MSD audio patches NOT set to all output devices.", __func__);
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+sp<SwAudioOutputDescriptor> AudioPolicyManager::reopenOutput(sp<SwAudioOutputDescriptor> outputDesc,
+                                                             const audio_config_t *config,
+                                                             audio_output_flags_t flags,
+                                                             const char* caller) {
+    closeOutput(outputDesc->mIoHandle);
+    sp<SwAudioOutputDescriptor> preferredOutput = openOutputWithProfileAndDevice(
+            outputDesc->mProfile, outputDesc->devices(), nullptr /*mixerConfig*/, config, flags);
+    if (preferredOutput == nullptr) {
+        ALOGE("%s failed to reopen output device=%d, caller=%s",
+              __func__, outputDesc->devices()[0]->getId(), caller);
+    }
+    return preferredOutput;
+}
+
+void AudioPolicyManager::reopenOutputsWithDevices(
+        const std::map<audio_io_handle_t, DeviceVector> &outputsToReopen) {
+    for (const auto& [output, devices] : outputsToReopen) {
+        sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+        closeOutput(output);
+        openOutputWithProfileAndDevice(desc->mProfile, devices);
+    }
+}
+
+PortHandleVector AudioPolicyManager::getClientsForStream(
+        audio_stream_type_t streamType) const {
+    PortHandleVector clients;
+    for (size_t i = 0; i < mOutputs.size(); ++i) {
+        PortHandleVector clientsForStream = mOutputs.valueAt(i)->getClientsForStream(streamType);
+        clients.insert(clients.end(), clientsForStream.begin(), clientsForStream.end());
+    }
+    return clients;
+}
+
+void AudioPolicyManager::invalidateStreams(StreamTypeVector streams) const {
+    PortHandleVector clients;
+    for (auto stream : streams) {
+        PortHandleVector clientsForStream = getClientsForStream(stream);
+        clients.insert(clients.end(), clientsForStream.begin(), clientsForStream.end());
+    }
+    mpClientInterface->invalidateTracks(clients);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index db0ee15..88bafef 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -35,6 +35,7 @@
 #include <media/PatchBuilder.h>
 #include "AudioPolicyInterface.h"
 
+#include <android/media/DeviceConnectedState.h>
 #include <android/media/audio/common/AudioPort.h>
 #include <AudioPolicyManagerObserver.h>
 #include <AudioPolicyConfig.h>
@@ -47,6 +48,7 @@
 #include <AudioOutputDescriptor.h>
 #include <AudioPolicyMix.h>
 #include <EffectDescriptor.h>
+#include <PreferredMixerAttributesInfo.h>
 #include <SoundTriggerSession.h>
 #include "EngineLibrary.h"
 #include "TypeConverter.h"
@@ -92,7 +94,9 @@
 {
 
 public:
-        explicit AudioPolicyManager(AudioPolicyClientInterface *clientInterface);
+        AudioPolicyManager(const sp<const AudioPolicyConfig>& config,
+                           EngineInstance&& engine,
+                           AudioPolicyClientInterface *clientInterface);
         virtual ~AudioPolicyManager();
 
         // AudioPolicyInterface
@@ -117,13 +121,14 @@
                                   audio_session_t session,
                                   audio_stream_type_t *stream,
                                   const AttributionSourceState& attributionSource,
-                                  const audio_config_t *config,
+                                  audio_config_t *config,
                                   audio_output_flags_t *flags,
                                   audio_port_handle_t *selectedDeviceId,
                                   audio_port_handle_t *portId,
                                   std::vector<audio_io_handle_t> *secondaryOutputs,
                                   output_type_t *outputType,
-                                  bool *isSpatialized) override;
+                                  bool *isSpatialized,
+                                  bool *isBitPerfect) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
         virtual bool releaseOutput(audio_port_handle_t portId);
@@ -132,7 +137,7 @@
                                          audio_unique_id_t riid,
                                          audio_session_t session,
                                          const AttributionSourceState& attributionSource,
-                                         const audio_config_base_t *config,
+                                         audio_config_base_t *config,
                                          audio_input_flags_t flags,
                                          audio_port_handle_t *selectedDeviceId,
                                          input_type_t *inputType,
@@ -261,6 +266,8 @@
                                         unsigned int *num_ports,
                                         struct audio_port_v7 *ports,
                                         unsigned int *generation);
+                status_t listDeclaredDevicePorts(media::AudioPortRole role,
+                                                 std::vector<media::AudioPortFw>* result) override;
         virtual status_t getAudioPort(struct audio_port_v7 *port);
         virtual status_t createAudioPatch(const struct audio_patch *patch,
                                            audio_patch_handle_t *handle,
@@ -297,8 +304,11 @@
                                                    const AudioDeviceTypeAddrVector &devices);
 
         virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
-                                                      device_role_t role);
+                                                      device_role_t role,
+                                                      const AudioDeviceTypeAddrVector &devices);
 
+        virtual status_t clearDevicesRoleForStrategy(product_strategy_t strategy,
+                                                     device_role_t role);
 
         virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
                                                       device_role_t role,
@@ -350,17 +360,18 @@
 
         virtual bool isUltrasoundSupported();
 
+        bool isHotwordStreamSupported(bool lookbackAudio) override;
+
         virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
         {
             return mEngine->listAudioProductStrategies(strategies);
         }
 
         virtual status_t getProductStrategyFromAudioAttributes(
-                const AudioAttributes &aa, product_strategy_t &productStrategy,
+                const audio_attributes_t &aa, product_strategy_t &productStrategy,
                 bool fallbackOnDefault)
         {
-            productStrategy = mEngine->getProductStrategyForAttributes(
-                    aa.getAttributes(), fallbackOnDefault);
+            productStrategy = mEngine->getProductStrategyForAttributes(aa, fallbackOnDefault);
             return (fallbackOnDefault && productStrategy == PRODUCT_STRATEGY_NONE) ?
                     BAD_VALUE : NO_ERROR;
         }
@@ -371,10 +382,9 @@
         }
 
         virtual status_t getVolumeGroupFromAudioAttributes(
-                const AudioAttributes &aa, volume_group_t &volumeGroup, bool fallbackOnDefault)
+                const audio_attributes_t &aa, volume_group_t &volumeGroup, bool fallbackOnDefault)
         {
-            volumeGroup = mEngine->getVolumeGroupForAttributes(
-                        aa.getAttributes(), fallbackOnDefault);
+            volumeGroup = mEngine->getVolumeGroupForAttributes(aa, fallbackOnDefault);
             return (fallbackOnDefault && volumeGroup == VOLUME_GROUP_NONE) ?
                     BAD_VALUE : NO_ERROR;
         }
@@ -397,6 +407,21 @@
         virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
                                                          AudioProfileVector& audioProfiles);
 
+        status_t getSupportedMixerAttributes(
+                audio_port_handle_t portId,
+                std::vector<audio_mixer_attributes_t>& mixerAttrs) override;
+        status_t setPreferredMixerAttributes(
+                const audio_attributes_t* attr,
+                audio_port_handle_t portId,
+                uid_t uid,
+                const audio_mixer_attributes_t* mixerAttributes) override;
+        status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+                                             audio_port_handle_t portId,
+                                             audio_mixer_attributes_t* mixerAttributes) override;
+        status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+                                               audio_port_handle_t portId,
+                                               uid_t uid) override;
+
         bool isCallScreenModeSupported() override;
 
         void onNewAudioModulesAvailable() override;
@@ -404,19 +429,7 @@
         status_t initialize();
 
 protected:
-        // A constructor that allows more fine-grained control over initialization process,
-        // used in automatic tests.
-        AudioPolicyManager(AudioPolicyClientInterface *clientInterface, bool forTesting);
-
-        // These methods should be used when finer control over APM initialization
-        // is needed, e.g. in tests. Must be used in conjunction with the constructor
-        // that only performs fields initialization. The public constructor comprises
-        // these steps in the following sequence:
-        //   - field initializing constructor;
-        //   - loadConfig;
-        //   - initialize.
-        AudioPolicyConfig& getConfig() { return mConfig; }
-        void loadConfig();
+        const AudioPolicyConfig& getConfig() const { return *(mConfig.get()); }
 
         // From AudioPolicyManagerObserver
         virtual const AudioPatchCollection &getAudioPatches() const
@@ -450,7 +463,7 @@
         }
         virtual const sp<DeviceDescriptor> &getDefaultOutputDevice() const
         {
-            return mDefaultOutputDevice;
+            return mConfig->getDefaultOutputDevice();
         }
 
         std::vector<volume_group_t> getVolumeGroups() const
@@ -602,7 +615,9 @@
         // true if given state represents a device in a telephony or VoIP call
         virtual bool isStateInCall(int state) const;
         // true if playback to call TX or capture from call RX is possible
-        bool isCallAudioAccessible();
+        bool isCallAudioAccessible() const;
+        // true if device is in a telephony or VoIP call or call screening is active
+        bool isInCallOrScreening() const;
 
         // when a device is connected, checks if an open output can be routed
         // to this device. If none is open, tries to open one of the available outputs.
@@ -639,6 +654,10 @@
             return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
         }
 
+        bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
+            return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
+        }
+
         void connectTelephonyRxAudioSource();
 
         void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
@@ -903,6 +922,8 @@
                 sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs);
 
         const uid_t mUidCached;                         // AID_AUDIOSERVER
+        sp<const AudioPolicyConfig> mConfig;
+        EngineInstance mEngine;                         // Audio Policy Engine instance
         AudioPolicyClientInterface *mpClientInterface;  // audio policy client interface
         sp<SwAudioOutputDescriptor> mPrimaryOutput;     // primary output descriptor
         // list of descriptors for outputs currently opened
@@ -915,8 +936,6 @@
         SwAudioOutputCollection mPreviousOutputs;
         AudioInputCollection mInputs;     // list of input descriptors
 
-        DeviceVector  mOutputDevicesAll; // all output devices from the config
-        DeviceVector  mInputDevicesAll;  // all input devices from the config
         DeviceVector  mAvailableOutputDevices; // all available output devices
         DeviceVector  mAvailableInputDevices;  // all available input devices
 
@@ -926,11 +945,7 @@
         bool    mA2dpSuspended;  // true if A2DP output is suspended
 
         EffectDescriptorCollection mEffects;  // list of registered audio effects
-        sp<DeviceDescriptor> mDefaultOutputDevice; // output device selected by default at boot time
         HwModuleCollection mHwModules; // contains modules that have been loaded successfully
-        HwModuleCollection mHwModulesAll; // contains all modules declared in the config
-
-        AudioPolicyConfig mConfig;
 
         std::atomic<uint32_t> mAudioPortGeneration;
 
@@ -961,9 +976,6 @@
 
         uint32_t nextAudioPortGeneration();
 
-        // Audio Policy Engine Interface.
-        EngineInstance mEngine;
-
         // Surround formats that are enabled manually. Taken into account when
         // "encoded surround" is forced into "manual" mode.
         std::unordered_set<audio_format_t> mManualSurroundFormats;
@@ -981,6 +993,10 @@
         sp<SourceClientDescriptor> mCallRxSourceClient;
         sp<SourceClientDescriptor> mCallTxSourceClient;
 
+        std::map<audio_port_handle_t,
+                 std::map<product_strategy_t,
+                          sp<PreferredMixerAttributesInfo>>> mPreferredMixerAttrInfos;
+
         // Support for Multi-Stream Decoder (MSD) module
         sp<DeviceDescriptor> getMsdAudioInDevice() const;
         DeviceVector getMsdAudioOutDevices() const;
@@ -1010,7 +1026,7 @@
 
         // Called by setDeviceConnectionState()
         status_t deviceToAudioPort(audio_devices_t deviceType, const char* device_address,
-                                   const char* device_name, media::AudioPort* aidPort);
+                                   const char* device_name, media::AudioPortFw* aidPort);
         bool isMsdPatch(const audio_patch_handle_t &handle) const;
 
 private:
@@ -1028,13 +1044,16 @@
         void updateAudioProfiles(const sp<DeviceDescriptor>& devDesc, audio_io_handle_t ioHandle,
                 AudioProfileVector &profiles);
 
+        // Notify the policy client to prepare for disconnecting external device.
+        void prepareToDisconnectExternalDevice(const sp<DeviceDescriptor> &device);
+
         // Notify the policy client of any change of device state with AUDIO_IO_HANDLE_NONE,
         // so that the client interprets it as global to audio hardware interfaces.
         // It can give a chance to HAL implementer to retrieve dynamic capabilities associated
         // to this device for example.
         // TODO avoid opening stream to retrieve capabilities of a profile.
         void broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
-                                            audio_policy_dev_state_t state);
+                                            media::DeviceConnectedState state);
 
         // updates device caching and output for streams that can influence the
         //    routing of notifications
@@ -1052,13 +1071,14 @@
                 const audio_attributes_t *attr,
                 audio_stream_type_t *stream,
                 uid_t uid,
-                const audio_config_t *config,
+                audio_config_t *config,
                 audio_output_flags_t *flags,
                 audio_port_handle_t *selectedDeviceId,
                 bool *isRequestedDeviceForExclusiveUse,
                 std::vector<sp<AudioPolicyMix>> *secondaryMixes,
                 output_type_t *outputType,
-                bool *isSpatialized);
+                bool *isSpatialized,
+                bool *isBitPerfect);
         // internal method to return the output handle for the given device and format
         audio_io_handle_t getOutputForDevices(
                 const DeviceVector &devices,
@@ -1067,6 +1087,7 @@
                 const audio_config_t *config,
                 audio_output_flags_t *flags,
                 bool *isSpatialized,
+                sp<PreferredMixerAttributesInfo> prefMixerAttrInfo = nullptr,
                 bool forceMutingHaptic = false);
 
         // Internal method checking if a direct output can be opened matching the requested
@@ -1099,6 +1120,18 @@
                                       const audio_config_t *config,
                                       const AudioDeviceTypeAddrVector &devices) const;
 
+
+        /**
+         * @brief Gets an IOProfile for a spatializer output with the best match with
+         * provided arguments.
+         * The caller can have the devices criteria ignored by passing and empty vector, and
+         * getSpatializerOutputProfile() will ignore the devices when looking for a match.
+         * Otherwise an output profile supporting a spatializer effect that can be routed
+         * to the specified devices must exist.
+         * @param config audio configuration describing the audio format, channels, sample rate...
+         * @param devices the sink audio device selected for playback
+         * @return an IOProfile that canbe used to open a spatializer output.
+         */
         sp<IOProfile> getSpatializerOutputProfile(const audio_config_t *config,
                                                   const AudioDeviceTypeAddrVector &devices) const;
 
@@ -1121,7 +1154,9 @@
          * @param session requester session id
          * @param uid requester uid
          * @param attributes requester audio attributes (e.g. input source and tags matter)
-         * @param config requester audio configuration (e.g. sample rate, format, channel mask).
+         * @param config requested audio configuration (e.g. sample rate, format, channel mask),
+         *               will be updated if current configuration doesn't support but another
+         *               one does
          * @param flags requester input flags
          * @param policyMix may be null, policy rules to be followed by the requester
          * @return input io handle aka unique input identifier selected for this device.
@@ -1129,7 +1164,7 @@
         audio_io_handle_t getInputForDevice(const sp<DeviceDescriptor> &device,
                 audio_session_t session,
                 const audio_attributes_t &attributes,
-                const audio_config_base_t *config,
+                audio_config_base_t *config,
                 audio_input_flags_t flags,
                 const sp<AudioPolicyMix> &policyMix);
 
@@ -1203,7 +1238,8 @@
         bool areAllDevicesSupported(
                 const AudioDeviceTypeAddrVector& devices,
                 std::function<bool(audio_devices_t)> predicate,
-                const char* context);
+                const char* context,
+                bool matchAddress = true);
 
         bool isScoRequestedForComm() const;
 
@@ -1220,11 +1256,15 @@
          * @param[in] profile IOProfile to use as template
          * @param[in] devices initial route to apply to this output stream
          * @param[in] mixerConfig if not null, use this to configure the mixer
+         * @param[in] halConfig if not null, use this to configure the HAL
+         * @param[in] flags the flags to be used to open the output
          * @return an output descriptor for the newly opened stream or null in case of error.
          */
         sp<SwAudioOutputDescriptor> openOutputWithProfileAndDevice(
                 const sp<IOProfile>& profile, const DeviceVector& devices,
-                const audio_config_base_t *mixerConfig = nullptr);
+                const audio_config_base_t *mixerConfig = nullptr,
+                const audio_config_t *halConfig = nullptr,
+                audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE);
 
         bool isOffloadPossible(const audio_offload_info_t& offloadInfo,
                                bool durationIgnored = false);
@@ -1248,6 +1288,30 @@
 
         // Filters only the relevant flags for getProfileForOutput
         audio_output_flags_t getRelevantFlags (audio_output_flags_t flags, bool directOnly);
+
+        status_t getDevicesForAttributes(const audio_attributes_t &attr,
+                                         DeviceVector &devices,
+                                         bool forVolume);
+
+        status_t getProfilesForDevices(const DeviceVector& devices,
+                                       AudioProfileVector& audioProfiles,
+                                       uint32_t flags,
+                                       bool isInput);
+
+        sp<PreferredMixerAttributesInfo> getPreferredMixerAttributesInfo(
+                audio_port_handle_t devicePortId, product_strategy_t strategy);
+
+        sp<SwAudioOutputDescriptor> reopenOutput(
+                sp<SwAudioOutputDescriptor> outputDesc,
+                const audio_config_t *config,
+                audio_output_flags_t flags,
+                const char* caller);
+
+        void reopenOutputsWithDevices(
+                const std::map<audio_io_handle_t, DeviceVector>& outputsToReopen);
+
+        PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
+        void invalidateStreams(StreamTypeVector streams) const;
 };
 
 };
diff --git a/services/audiopolicy/managerdefault/EngineLibrary.cpp b/services/audiopolicy/managerdefault/EngineLibrary.cpp
index ef699aa..939fbc5 100644
--- a/services/audiopolicy/managerdefault/EngineLibrary.cpp
+++ b/services/audiopolicy/managerdefault/EngineLibrary.cpp
@@ -23,9 +23,27 @@
 
 namespace android {
 
-// static
-std::shared_ptr<EngineLibrary> EngineLibrary::load(std::string libraryPath)
+EngineInstance loadApmEngineLibraryAndCreateEngine(const std::string& librarySuffix,
+        const std::string& configXmlFilePath)
 {
+    auto engLib = EngineLibrary::load(librarySuffix);
+    if (!engLib) {
+        ALOGE("%s: Failed to load the engine library, suffix \"%s\"",
+                __func__, librarySuffix.c_str());
+        return nullptr;
+    }
+    auto engine = engLib->createEngineUsingXmlConfig(configXmlFilePath);
+    if (engine == nullptr) {
+        ALOGE("%s: Failed to instantiate the APM engine", __func__);
+        return nullptr;
+    }
+    return engine;
+}
+
+// static
+std::shared_ptr<EngineLibrary> EngineLibrary::load(const std::string& librarySuffix)
+{
+    std::string libraryPath = "libaudiopolicyengine" + librarySuffix + ".so";
     std::shared_ptr<EngineLibrary> engLib(new EngineLibrary());
     return engLib->init(std::move(libraryPath)) ? engLib : nullptr;
 }
@@ -35,6 +53,20 @@
     close();
 }
 
+EngineInstance EngineLibrary::createEngineUsingXmlConfig(const std::string& xmlFilePath) {
+    auto instance = createEngine();
+    if (instance != nullptr) {
+        if (status_t status = instance->loadFromXmlConfigWithFallback(xmlFilePath);
+                status == OK) {
+            return instance;
+        } else {
+            ALOGE("%s: loading of the engine config with XML configuration file \"%s\" failed: %d",
+                    __func__, xmlFilePath.empty() ? "default" : xmlFilePath.c_str(), status);
+        }
+    }
+    return nullptr;
+}
+
 bool EngineLibrary::init(std::string libraryPath)
 {
     mLibraryHandle = dlopen(libraryPath.c_str(), 0);
diff --git a/services/audiopolicy/managerdefault/EngineLibrary.h b/services/audiopolicy/managerdefault/EngineLibrary.h
index f143916..dc138a1 100644
--- a/services/audiopolicy/managerdefault/EngineLibrary.h
+++ b/services/audiopolicy/managerdefault/EngineLibrary.h
@@ -26,9 +26,12 @@
 
 using EngineInstance = std::unique_ptr<EngineInterface, std::function<void (EngineInterface*)>>;
 
+EngineInstance loadApmEngineLibraryAndCreateEngine(const std::string& librarySuffix,
+        const std::string& configXmlFilePath = "");
+
 class EngineLibrary : public std::enable_shared_from_this<EngineLibrary> {
 public:
-    static std::shared_ptr<EngineLibrary> load(std::string libraryPath);
+    static std::shared_ptr<EngineLibrary> load(const std::string& librarySuffix);
     ~EngineLibrary();
 
     EngineLibrary(const EngineLibrary&) = delete;
@@ -36,11 +39,12 @@
     EngineLibrary& operator=(const EngineLibrary&) = delete;
     EngineLibrary& operator=(EngineLibrary&&) = delete;
 
-    EngineInstance createEngine();
+    EngineInstance createEngineUsingXmlConfig(const std::string& xmlFilePath);
 
 private:
     EngineLibrary() = default;
     bool init(std::string libraryPath);
+    EngineInstance createEngine();
     void close();
 
     void *mLibraryHandle = nullptr;
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index cdad9a6..2f677da 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -10,6 +10,10 @@
 cc_library_shared {
     name: "libaudiopolicyservice",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
     srcs: [
         "AudioPolicyClientImpl.cpp",
         "AudioPolicyEffects.cpp",
@@ -32,6 +36,7 @@
         "libaudiohal",
         "libaudiopolicy",
         "libaudiopolicymanagerdefault",
+        "libaudiousecasevalidation",
         "libaudioutils",
         "libbinder",
         "libcutils",
@@ -47,9 +52,9 @@
         "libsensor",
         "libsensorprivacy",
         "libshmemcompat",
-        "libutils",
         "libstagefright_foundation",
-        "android.media.audio.common.types-V1-cpp",
+        "libutils",
+        "libxml2",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -82,6 +87,7 @@
 
     export_shared_lib_headers: [
         "libactivitymanager_aidl",
+        "libaudiousecasevalidation",
         "libheadtracking",
         "libheadtracking-binding",
         "libsensorprivacy",
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index c766a15..13e682b 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -27,6 +27,18 @@
 
 /* implementation of the client interface from the policy manager */
 
+status_t AudioPolicyService::AudioPolicyClient::getAudioPolicyConfig(
+        media::AudioPolicyConfig *config)
+{
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == 0) {
+        ALOGW("%s: could not get AudioFlinger", __func__);
+        return AUDIO_MODULE_HANDLE_NONE;
+    }
+
+    return af->getAudioPolicyConfig(config);
+}
+
 audio_module_handle_t AudioPolicyService::AudioPolicyClient::loadHwModule(const char *name)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
@@ -69,6 +81,12 @@
         *halConfig = VALUE_OR_RETURN_STATUS(
                 aidl2legacy_AudioConfig_audio_config_t(response.config, false /*isInput*/));
         *latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(response.latencyMs));
+
+        audio_config_base_t config = {.sample_rate = halConfig->sample_rate,
+            .channel_mask = halConfig->channel_mask,
+            .format = halConfig->format,
+        };
+        mAudioPolicyService->registerOutput(*output, config, flags);
     }
     return status;
 }
@@ -91,7 +109,7 @@
     if (af == 0) {
         return PERMISSION_DENIED;
     }
-
+    mAudioPolicyService->unregisterOutput(output);
     return af->closeOutput(output);
 }
 
@@ -168,16 +186,6 @@
                                                delay_ms);
 }
 
-status_t AudioPolicyService::AudioPolicyClient::invalidateStream(audio_stream_type_t stream)
-{
-    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
-    if (af == 0) {
-        return PERMISSION_DENIED;
-    }
-
-    return af->invalidateStream(stream);
-}
-
 void AudioPolicyService::AudioPolicyClient::setParameters(audio_io_handle_t io_handle,
                    const String8& keyValuePairs,
                    int delay_ms)
@@ -313,14 +321,23 @@
 }
 
 status_t AudioPolicyService::AudioPolicyClient::setDeviceConnectedState(
-        const struct audio_port_v7 *port, bool connected) {
+        const struct audio_port_v7 *port, media::DeviceConnectedState state) {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == nullptr) {
         ALOGW("%s: could not get AudioFlinger", __func__);
         return PERMISSION_DENIED;
     }
-    return af->setDeviceConnectedState(port, connected);
+    return af->setDeviceConnectedState(port, state);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::invalidateTracks(
+        const std::vector<audio_port_handle_t>& portIds) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == 0) {
+        return PERMISSION_DENIED;
+    }
+
+    return af->invalidateTracks(portIds);
+}
 
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 70fdfcb..70a1785 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -41,26 +41,25 @@
 // AudioPolicyEffects Implementation
 // ----------------------------------------------------------------------------
 
-AudioPolicyEffects::AudioPolicyEffects()
-{
-    status_t loadResult = loadAudioEffectXmlConfig();
+AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+    // load xml config with effectsFactoryHal
+    status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
     if (loadResult == NO_ERROR) {
-        mDefaultDeviceEffectFuture = std::async(
-                    std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
+        mDefaultDeviceEffectFuture =
+                std::async(std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
     } else if (loadResult < 0) {
-        ALOGW("Failed to load XML effect configuration, fallback to .conf");
+        ALOGW("Failed to query effect configuration, fallback to load .conf");
         // load automatic audio effect modules
         if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
-            loadAudioEffectConfig(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
+            loadAudioEffectConfigLegacy(AUDIO_EFFECT_VENDOR_CONFIG_FILE);
         } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) {
-            loadAudioEffectConfig(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
+            loadAudioEffectConfigLegacy(AUDIO_EFFECT_DEFAULT_CONFIG_FILE);
         }
     } else if (loadResult > 0) {
         ALOGE("Effect config is partially invalid, skipped %d elements", loadResult);
     }
 }
 
-
 AudioPolicyEffects::~AudioPolicyEffects()
 {
     size_t i = 0;
@@ -127,7 +126,8 @@
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
             sp<AudioEffect> fx = new AudioEffect(attributionSource);
-            fx->set(NULL, &effect->mUuid, -1, 0, 0, audioSession, input);
+            fx->set(nullptr /*type */, &effect->mUuid, -1 /* priority */, nullptr /* callback */,
+                    audioSession, input);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGW("addInputEffects(): failed to create Fx %s on source %d",
@@ -279,7 +279,8 @@
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
             sp<AudioEffect> fx = new AudioEffect(attributionSource);
-            fx->set(NULL, &effect->mUuid, 0, 0, 0, audioSession, output);
+            fx->set(nullptr /* type */, &effect->mUuid, 0 /* priority */, nullptr /* callback */,
+                    audioSession, output);
             status_t status = fx->initCheck();
             if (status != NO_ERROR && status != ALREADY_EXISTS) {
                 ALOGE("addOutputSessionEffects(): failed to create Fx  %s on session %d",
@@ -905,30 +906,35 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyEffects::loadAudioEffectXmlConfig() {
-    auto result = effectsConfig::parse();
-    if (result.parsedConfig == nullptr) {
-        return -ENOENT;
+status_t AudioPolicyEffects::loadAudioEffectConfig(
+        const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
+    if (!effectsFactoryHal) {
+        ALOGE("%s Null EffectsFactoryHalInterface", __func__);
+        return UNEXPECTED_NULL;
+    }
+
+    const auto skippedElements = VALUE_OR_RETURN_STATUS(effectsFactoryHal->getSkippedElements());
+    const auto processings = effectsFactoryHal->getProcessings();
+    if (!processings) {
+        ALOGE("%s Null processings with %zu skipped elements", __func__, skippedElements);
+        return UNEXPECTED_NULL;
     }
 
     auto loadProcessingChain = [](auto& processingChain, auto& streams) {
         for (auto& stream : processingChain) {
             auto effectDescs = std::make_unique<EffectDescVector>();
             for (auto& effect : stream.effects) {
-                effectDescs->mEffects.add(
-                        new EffectDesc{effect.get().name.c_str(), effect.get().uuid});
+                effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
             }
             streams.add(stream.type, effectDescs.release());
         }
     };
 
-    auto loadDeviceProcessingChain = [](auto &processingChain, auto& devicesEffects) {
+    auto loadDeviceProcessingChain = [](auto& processingChain, auto& devicesEffects) {
         for (auto& deviceProcess : processingChain) {
-
             auto effectDescs = std::make_unique<EffectDescVector>();
             for (auto& effect : deviceProcess.effects) {
-                effectDescs->mEffects.add(
-                        new EffectDesc{effect.get().name.c_str(), effect.get().uuid});
+                effectDescs->mEffects.add(new EffectDesc{effect->name.c_str(), effect->uuid});
             }
             auto deviceEffects = std::make_unique<DeviceEffects>(
                         std::move(effectDescs), deviceProcess.type, deviceProcess.address);
@@ -936,17 +942,18 @@
         }
     };
 
-    loadProcessingChain(result.parsedConfig->preprocess, mInputSources);
-    loadProcessingChain(result.parsedConfig->postprocess, mOutputStreams);
+    loadProcessingChain(processings->preprocess, mInputSources);
+    loadProcessingChain(processings->postprocess, mOutputStreams);
+
     {
         Mutex::Autolock _l(mLock);
-        loadDeviceProcessingChain(result.parsedConfig->deviceprocess, mDeviceEffects);
+        loadDeviceProcessingChain(processings->deviceprocess, mDeviceEffects);
     }
-    // Casting from ssize_t to status_t is probably safe, there should not be more than 2^31 errors
-    return result.nbSkippedElement;
+
+    return skippedElements;
 }
 
-status_t AudioPolicyEffects::loadAudioEffectConfig(const char *path)
+status_t AudioPolicyEffects::loadAudioEffectConfigLegacy(const char *path)
 {
     cnode *root;
     char *data;
@@ -984,8 +991,8 @@
             attributionSource.packageName = "android";
             attributionSource.token = sp<BBinder>::make();
             sp<AudioEffect> fx = new AudioEffect(attributionSource);
-            fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
-                    nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
+            fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0 /* priority */, nullptr /* callback */,
+                    AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
                     AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
                                         deviceEffects->getDeviceAddress()});
             status_t status = fx->initCheck();
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 13d5d0c..9f65a96 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -20,22 +20,33 @@
 #include <stdlib.h>
 #include <stdio.h>
 #include <string.h>
+#include <future>
+
+#include <android-base/thread_annotations.h>
 #include <cutils/misc.h>
 #include <media/AudioEffect.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <system/audio.h>
 #include <utils/Vector.h>
 #include <utils/SortedVector.h>
-#include <android-base/thread_annotations.h>
-
-#include <future>
 
 namespace android {
 
 // ----------------------------------------------------------------------------
 
-// AudioPolicyEffects class
-// This class will manage all effects attached to input and output streams in
-// AudioPolicyService as configured in audio_effects.conf.
+/**
+ * AudioPolicyEffects class.
+ *
+ * This class manages all effects attached to input and output streams in AudioPolicyService.
+ * The effect configurations can be queried in several ways:
+ *
+ * With HIDL HAL, the configuration file `audio_effects.xml` will be loaded by libAudioHal. If this
+ * file does not exist, AudioPolicyEffects class will fallback to load configuration from
+ * `/vendor/etc/audio_effects.conf` (AUDIO_EFFECT_VENDOR_CONFIG_FILE). If this file also does not
+ * exist, the configuration will be loaded from the file `/system/etc/audio_effects.conf`.
+ *
+ * With AIDL HAL, the configuration will be queried with the method `IFactory::queryProcessing()`.
+ */
 class AudioPolicyEffects : public RefBase
 {
 
@@ -44,7 +55,7 @@
     // The constructor will parse audio_effects.conf
     // First it will look whether vendor specific file exists,
     // otherwise it will parse the system default file.
-	         AudioPolicyEffects();
+    explicit AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
     virtual ~AudioPolicyEffects();
 
     // NOTE: methods on AudioPolicyEffects should never be called with the AudioPolicyService
@@ -218,7 +229,6 @@
 
     };
 
-
     static const char * const kInputSourceNames[AUDIO_SOURCE_CNT -1];
     static audio_source_t inputSourceNameToEnum(const char *name);
 
@@ -226,8 +236,8 @@
     audio_stream_type_t streamNameToEnum(const char *name);
 
     // Parse audio_effects.conf
-    status_t loadAudioEffectConfig(const char *path); // TODO: add legacy in the name
-    status_t loadAudioEffectXmlConfig(); // TODO: remove "Xml" in the name
+    status_t loadAudioEffectConfigLegacy(const char *path);
+    status_t loadAudioEffectConfig(const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
 
     // Load all effects descriptors in configuration file
     status_t loadEffects(cnode *root, Vector <EffectDesc *>& effects);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index df49bba..2e7b3ff 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -301,7 +301,8 @@
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
 
-    if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
+    if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT
+          && stream != AUDIO_STREAM_ASSISTANT && stream != AUDIO_STREAM_CALL_ASSISTANT) {
         *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_io_handle_t_int32_t(AUDIO_IO_HANDLE_NONE));
         return Status::ok();
@@ -352,31 +353,20 @@
     ALOGV("%s()", __func__);
     Mutex::Autolock _l(mLock);
 
-    // TODO b/182392553: refactor or remove
-    AttributionSourceState adjAttributionSource = attributionSource;
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (!isAudioServerOrMediaServerUid(callingUid) || attributionSource.uid == -1) {
-        int32_t callingUidAidl = VALUE_OR_RETURN_BINDER_STATUS(
-            legacy2aidl_uid_t_int32_t(callingUid));
-        ALOGW_IF(attributionSource.uid != -1 && attributionSource.uid != callingUidAidl,
-                "%s uid %d tried to pass itself off as %d", __func__,
-                callingUidAidl, attributionSource.uid);
-        adjAttributionSource.uid = callingUidAidl;
-    }
     if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
-        aidl2legacy_int32_t_uid_t(adjAttributionSource.uid)))) {
+        aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
         attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
     if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(adjAttributionSource)) {
+            && !bypassInterruptionPolicyAllowed(attributionSource)) {
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
 
     if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+        if (!accessUltrasoundAllowed(attributionSource)) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
-                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+                    __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
         }
     }
@@ -384,14 +374,16 @@
     AutoCallerClear acc;
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized = false;
+    bool isBitPerfect = false;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
-                                                            adjAttributionSource,
+                                                            attributionSource,
                                                             &config,
                                                             &flags, &selectedDeviceId, &portId,
                                                             &secondaryOutputs,
                                                             &outputType,
-                                                            &isSpatialized);
+                                                            &isSpatialized,
+                                                            &isBitPerfect);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if (result == NO_ERROR) {
@@ -401,20 +393,20 @@
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
             if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
-                && !callAudioInterceptionAllowed(adjAttributionSource)) {
+                && !callAudioInterceptionAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: call redirection not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
-            } else if (!modifyPhoneStateAllowed(adjAttributionSource)) {
+            } else if (!modifyPhoneStateAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
+            if (!modifyAudioRoutingAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
@@ -426,8 +418,11 @@
     }
 
     if (result == NO_ERROR) {
+        attr = VALUE_OR_RETURN_BINDER_STATUS(
+                mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+
         sp<AudioPlaybackClient> client =
-                new AudioPlaybackClient(attr, output, adjAttributionSource, session,
+                new AudioPlaybackClient(attr, output, attributionSource, session,
                     portId, selectedDeviceId, stream, isSpatialized);
         mAudioPlaybackClients.add(portId, client);
 
@@ -443,6 +438,16 @@
                 convertContainer<std::vector<int32_t>>(secondaryOutputs,
                                                        legacy2aidl_audio_io_handle_t_int32_t));
         _aidl_return->isSpatialized = isSpatialized;
+        _aidl_return->isBitPerfect = isBitPerfect;
+        _aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
+    } else {
+        _aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
+        _aidl_return->configBase.channelMask = VALUE_OR_RETURN_BINDER_STATUS(
+                legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                        config.channel_mask, false /*isInput*/));
+        _aidl_return->configBase.sampleRate = config.sample_rate;
     }
     return binderStatusFromStatusT(result);
 }
@@ -471,14 +476,14 @@
     }
     ALOGV("startOutput()");
     sp<AudioPlaybackClient> client;
-    sp<AudioPolicyEffects>audioPolicyEffects;
+    sp<AudioPolicyEffects> audioPolicyEffects;
 
     getPlaybackClientAndEffects(portId, client, audioPolicyEffects, __func__);
 
     if (audioPolicyEffects != 0) {
         // create audio processors according to stream
-        status_t status = audioPolicyEffects->addOutputSessionEffects(
-            client->io, client->stream, client->session);
+        status_t status = audioPolicyEffects->addOutputSessionEffects(client->io, client->stream,
+                                                                      client->session);
         if (status != NO_ERROR && status != ALREADY_EXISTS) {
             ALOGW("Failed to add effects on session %d", client->session);
         }
@@ -487,6 +492,10 @@
     AutoCallerClear acc;
     status_t status = mAudioPolicyManager->startOutput(portId);
     if (status == NO_ERROR) {
+        //TODO b/257922898: decide if/how we need to handle attributes update when playback starts
+        // or during playback
+        (void)mUsecaseValidator->startClient(client->io, client->portId, client->attributionSource,
+                client->attributes, nullptr /* callback */);
         client->active = true;
         onUpdateActiveSpatializerTracks_l();
     }
@@ -527,6 +536,7 @@
     if (status == NO_ERROR) {
         client->active = false;
         onUpdateActiveSpatializerTracks_l();
+        mUsecaseValidator->stopClient(client->io, client->portId);
     }
     return status;
 }
@@ -613,33 +623,8 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    // Make sure attribution source represents the current caller
-    AttributionSourceState adjAttributionSource = attributionSource;
-    // TODO b/182392553: refactor or remove
-    bool updatePid = (attributionSource.pid == -1);
-    const uid_t callingUid =IPCThreadState::self()->getCallingUid();
-    const uid_t currentUid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(
-            attributionSource.uid));
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
-        ALOGW_IF(currentUid != (uid_t)-1 && currentUid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid,
-                currentUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_uid_t_int32_t(
-                callingUid));
-        updatePid = true;
-    }
-
-    if (updatePid) {
-        const int32_t callingPid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_pid_t_int32_t(
-            IPCThreadState::self()->getCallingPid()));
-        ALOGW_IF(attributionSource.pid != -1 && attributionSource.pid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, adjAttributionSource.uid, callingPid, attributionSource.pid);
-        adjAttributionSource.pid = callingPid;
-    }
-
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
-            adjAttributionSource)));
+            attributionSource)));
 
     // check calling permissions.
     // Capturing from the following sources does not require permission RECORD_AUDIO
@@ -650,17 +635,17 @@
     // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
     // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
     // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
-    if (!(recordingAllowed(adjAttributionSource, inputSource)
+    if (!(recordingAllowed(attributionSource, inputSource)
             || inputSource == AUDIO_SOURCE_FM_TUNER
             || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
             || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
         ALOGE("%s permission denied: recording not allowed for %s",
-                __func__, adjAttributionSource.toString().c_str());
+                __func__, attributionSource.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(adjAttributionSource);
-    bool canInterceptCallAudio = callAudioInterceptionAllowed(adjAttributionSource);
+    bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
+    bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
     bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
              || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
              || inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -674,26 +659,28 @@
     }
     if (inputSource == AUDIO_SOURCE_FM_TUNER
         && !canCaptureOutput
-        && !captureTunerAudioInputAllowed(adjAttributionSource)) {
+        && !captureTunerAudioInputAllowed(attributionSource)) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(adjAttributionSource);
+    bool canCaptureHotword = captureHotwordAllowed(attributionSource);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
+    if (((flags & (AUDIO_INPUT_FLAG_HW_HOTWORD |
+                        AUDIO_INPUT_FLAG_HOTWORD_TAP |
+                        AUDIO_INPUT_FLAG_HW_LOOKBACK)) != 0)
             && !canCaptureHotword) {
         ALOGE("%s: permission denied: hotword mode not allowed"
-              " for uid %d pid %d", __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+              " for uid %d pid %d", __func__, attributionSource.uid, attributionSource.pid);
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+        if (!accessUltrasoundAllowed(attributionSource)) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
-                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+                    __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
         }
     }
@@ -708,7 +695,7 @@
             AutoCallerClear acc;
             // the audio_in_acoustics_t parameter is ignored by get_input()
             status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
-                                                          adjAttributionSource, &config,
+                                                          attributionSource, &config,
                                                           flags, &selectedDeviceId,
                                                           &inputType, &portId);
 
@@ -737,7 +724,7 @@
                 }
                 break;
             case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!(modifyAudioRoutingAllowed(adjAttributionSource)
+                if (!(modifyAudioRoutingAllowed(attributionSource)
                         || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
                             && canInterceptCallAudio))) {
                     ALOGE("%s permission denied for remote submix capture", __func__);
@@ -755,12 +742,15 @@
             if (status == PERMISSION_DENIED) {
                 AutoCallerClear acc;
                 mAudioPolicyManager->releaseInput(portId);
+            } else {
+                _aidl_return->config = VALUE_OR_RETURN_BINDER_STATUS(
+                        legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/));
             }
             return binderStatusFromStatusT(status);
         }
 
         sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
-                                                             selectedDeviceId, adjAttributionSource,
+                                                             selectedDeviceId, attributionSource,
                                                              canCaptureOutput, canCaptureHotword,
                                                              mOutputCommandThread);
         mAudioRecordClients.add(portId, client);
@@ -828,8 +818,29 @@
 
     Mutex::Autolock _l(mLock);
 
+    ALOGW_IF(client->silenced, "startInput on silenced input for port %d, uid %d. Unsilencing.",
+            portIdAidl,
+            client->attributionSource.uid);
+
+    if (client->active) {
+        ALOGE("Client should never be active before startInput. Uid %d port %d",
+                client->attributionSource.uid, portId);
+        finishRecording(client->attributionSource, client->attributes.source);
+        return binderStatusFromStatusT(INVALID_OPERATION);
+    }
+
+    // Force the possibly silenced client to be unsilenced since we just called
+    // startRecording (i.e. we have assumed it is unsilenced).
+    // At this point in time, the client is inactive, so no calls to appops are sent in
+    // setAppState_l.
+    // This ensures existing clients have the same behavior as new clients (starting unsilenced).
+    // TODO(b/282076713)
+    setAppState_l(client, APP_STATE_TOP);
+
     client->active = true;
     client->startTimeNs = systemTime();
+    // This call updates the silenced state, and since we are active, appropriately notifies appops
+    // if we silence the track.
     updateUidStates_l();
 
     status_t status;
@@ -1166,12 +1177,12 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesEx& attrAidl,
+Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesInternal& attrAidl,
                                                    bool forVolume,
                                                    std::vector<AudioDevice>* _aidl_return)
 {
-    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesEx_AudioAttributes(attrAidl));
+    audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
     AudioDeviceTypeAddrVector devices;
 
     if (mAudioPolicyManager == NULL) {
@@ -1180,8 +1191,7 @@
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->getDevicesForAttributes(
-                    aa.getAttributes(), &devices, forVolume)));
+            mAudioPolicyManager->getDevicesForAttributes(aa, &devices, forVolume)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             convertContainer<std::vector<AudioDevice>>(devices,
                                                        legacy2aidl_AudioDeviceTypeAddress));
@@ -1521,7 +1531,7 @@
 
 Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
                                           media::AudioPortType typeAidl, Int* count,
-                                          std::vector<media::AudioPort>* portsAidl,
+                                          std::vector<media::AudioPortFw>* portsAidl,
                                           int32_t* _aidl_return) {
     audio_port_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPortRole_audio_port_role_t(roleAidl));
@@ -1546,14 +1556,25 @@
     numPortsReq = std::min(numPortsReq, num_ports);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
-                         legacy2aidl_audio_port_v7_AudioPort)));
+                         legacy2aidl_audio_port_v7_AudioPortFw)));
     count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(num_ports));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(generation));
     return Status::ok();
 }
 
+Status AudioPolicyService::listDeclaredDevicePorts(media::AudioPortRole role,
+                                                    std::vector<media::AudioPortFw>* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    AutoCallerClear acc;
+    return binderStatusFromStatusT(mAudioPolicyManager->listDeclaredDevicePorts(
+                    role, _aidl_return));
+}
+
 Status AudioPolicyService::getAudioPort(int portId,
-                                        media::AudioPort* _aidl_return) {
+                                        media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
@@ -1561,14 +1582,15 @@
     }
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
-    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPort(port));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
     return Status::ok();
 }
 
-Status AudioPolicyService::createAudioPatch(const media::AudioPatch& patchAidl, int32_t handleAidl,
+Status AudioPolicyService::createAudioPatch(const media::AudioPatchFw& patchAidl,
+                                            int32_t handleAidl,
                                             int32_t* _aidl_return) {
     audio_patch patch = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioPatch_audio_patch(patchAidl));
+            aidl2legacy_AudioPatchFw_audio_patch(patchAidl));
     audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_port_handle_t(handleAidl));
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPatch(patch)));
@@ -1606,7 +1628,7 @@
 }
 
 Status AudioPolicyService::listAudioPatches(Int* count,
-                                            std::vector<media::AudioPatch>* patchesAidl,
+                                            std::vector<media::AudioPatchFw>* patchesAidl,
                                             int32_t* _aidl_return) {
     unsigned int num_patches = VALUE_OR_RETURN_BINDER_STATUS(
             convertIntegral<unsigned int>(count->value));
@@ -1627,16 +1649,16 @@
     numPatchesReq = std::min(numPatchesReq, num_patches);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(patches.get(), patches.get() + numPatchesReq,
-                         std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatch)));
+                         std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
     count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(num_patches));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(generation));
     return Status::ok();
 }
 
-Status AudioPolicyService::setAudioPortConfig(const media::AudioPortConfig& configAidl)
+Status AudioPolicyService::setAudioPortConfig(const media::AudioPortConfigFw& configAidl)
 {
     audio_port_config config = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioPortConfig_audio_port_config(configAidl));
+            aidl2legacy_AudioPortConfigFw_audio_port_config(configAidl));
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioPortConfig(config)));
 
@@ -1806,11 +1828,11 @@
     return binderStatusFromStatusT(mAudioPolicyManager->removeUserIdDeviceAffinities(userId));
 }
 
-Status AudioPolicyService::startAudioSource(const media::AudioPortConfig& sourceAidl,
+Status AudioPolicyService::startAudioSource(const media::AudioPortConfigFw& sourceAidl,
                                             const media::AudioAttributesInternal& attributesAidl,
                                             int32_t* _aidl_return) {
     audio_port_config source = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioPortConfig_audio_port_config(sourceAidl));
+            aidl2legacy_AudioPortConfigFw_audio_port_config(sourceAidl));
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioAttributesInternal_audio_attributes_t(attributesAidl));
     audio_port_handle_t portId;
@@ -2056,6 +2078,17 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::isHotwordStreamSupported(bool lookbackAudio, bool* _aidl_return)
+{
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    AutoCallerClear acc;
+    *_aidl_return = mAudioPolicyManager->isHotwordStreamSupported(lookbackAudio);
+    return Status::ok();
+}
+
 Status AudioPolicyService::listAudioProductStrategies(
         std::vector<media::AudioProductStrategy>* _aidl_return) {
     AudioProductStrategyVector strategies;
@@ -2074,9 +2107,10 @@
 }
 
 Status AudioPolicyService::getProductStrategyFromAudioAttributes(
-        const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
-    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+        const media::AudioAttributesInternal& aaAidl,
+        bool fallbackOnDefault, int32_t* _aidl_return) {
+    audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
     product_strategy_t productStrategy;
 
     if (mAudioPolicyManager == NULL) {
@@ -2107,9 +2141,10 @@
 }
 
 Status AudioPolicyService::getVolumeGroupFromAudioAttributes(
-        const media::AudioAttributesEx& aaAidl, bool fallbackOnDefault, int32_t* _aidl_return) {
-    AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioAttributesEx_AudioAttributes(aaAidl));
+        const media::AudioAttributesInternal& aaAidl,
+        bool fallbackOnDefault, int32_t* _aidl_return) {
+    audio_attributes_t aa = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(aaAidl));
     volume_group_t volumeGroup;
 
     if (mAudioPolicyManager == NULL) {
@@ -2165,8 +2200,31 @@
     return binderStatusFromStatusT(status);
 }
 
-Status AudioPolicyService::removeDevicesRoleForStrategy(int32_t strategyAidl,
-                                                        media::DeviceRole roleAidl) {
+Status AudioPolicyService::removeDevicesRoleForStrategy(
+        int32_t strategyAidl,
+        media::DeviceRole roleAidl,
+        const std::vector<AudioDevice>& devicesAidl) {
+    product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_product_strategy_t(strategyAidl));
+    device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_DeviceRole_device_role_t(roleAidl));
+    AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+                                                        aidl2legacy_AudioDeviceTypeAddress));
+
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    status_t status = mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role, devices);
+    if (status == NO_ERROR) {
+       onCheckSpatializer_l();
+    }
+    return binderStatusFromStatusT(status);
+}
+
+Status AudioPolicyService::clearDevicesRoleForStrategy(int32_t strategyAidl,
+                                                           media::DeviceRole roleAidl) {
      product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_product_strategy_t(strategyAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2175,7 +2233,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
     Mutex::Autolock _l(mLock);
-    status_t status = mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role);
+    status_t status = mAudioPolicyManager->clearDevicesRoleForStrategy(strategy, role);
     if (status == NO_ERROR) {
        onCheckSpatializer_l();
     }
@@ -2390,4 +2448,89 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::getSupportedMixerAttributes(
+        int32_t portIdAidl, std::vector<media::AudioMixerAttributesInternal>* _aidl_return) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    std::vector<audio_mixer_attributes_t> mixerAttrs;
+    Mutex::Autolock _l(mLock);
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(mAudioPolicyManager->getSupportedMixerAttributes(
+                    portId, mixerAttrs)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::AudioMixerAttributesInternal>>(
+                    mixerAttrs,
+                    legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal));
+    return Status::ok();
+}
+
+Status AudioPolicyService::setPreferredMixerAttributes(
+        const media::AudioAttributesInternal& attrAidl,
+        int32_t portIdAidl,
+        int32_t uidAidl,
+        const media::AudioMixerAttributesInternal& mixerAttrAidl) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_mixer_attributes_t mixerAttr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(mixerAttrAidl));
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setPreferredMixerAttributes(&attr, portId, uid, &mixerAttr));
+}
+
+Status AudioPolicyService::getPreferredMixerAttributes(
+        const media::AudioAttributesInternal& attrAidl,
+        int32_t portIdAidl,
+        std::optional<media::AudioMixerAttributesInternal>* _aidl_return) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    Mutex::Autolock _l(mLock);
+    audio_mixer_attributes_t mixerAttr = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+    RETURN_IF_BINDER_ERROR(
+            binderStatusFromStatusT(mAudioPolicyManager->getPreferredMixerAttributes(
+                    &attr, portId, &mixerAttr)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(mixerAttr));
+    return Status::ok();
+}
+
+Status AudioPolicyService::clearPreferredMixerAttributes(
+        const media::AudioAttributesInternal& attrAidl,
+        int32_t portIdAidl,
+        int32_t uidAidl) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    audio_attributes_t  attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+    audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+    Mutex::Autolock _l(mLock);
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->clearPreferredMixerAttributes(&attr, portId, uid));
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index e7d945f..9367949 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -46,6 +46,7 @@
 
 #include <system/audio.h>
 #include <system/audio_policy.h>
+#include <AudioPolicyConfig.h>
 #include <AudioPolicyManager.h>
 
 namespace android {
@@ -137,6 +138,7 @@
 BINDER_METHOD_ENTRY(setCurrentImeUid) \
 BINDER_METHOD_ENTRY(isHapticPlaybackSupported) \
 BINDER_METHOD_ENTRY(isUltrasoundSupported) \
+BINDER_METHOD_ENTRY(isHotwordStreamSupported) \
 BINDER_METHOD_ENTRY(listAudioProductStrategies) \
 BINDER_METHOD_ENTRY(getProductStrategyFromAudioAttributes) \
 BINDER_METHOD_ENTRY(listAudioVolumeGroups) \
@@ -145,6 +147,7 @@
 BINDER_METHOD_ENTRY(isCallScreenModeSupported) \
 BINDER_METHOD_ENTRY(setDevicesRoleForStrategy) \
 BINDER_METHOD_ENTRY(removeDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(clearDevicesRoleForStrategy) \
 BINDER_METHOD_ENTRY(getDevicesForRoleAndStrategy) \
 BINDER_METHOD_ENTRY(setDevicesRoleForCapturePreset) \
 BINDER_METHOD_ENTRY(addDevicesRoleForCapturePreset) \
@@ -155,7 +158,11 @@
 BINDER_METHOD_ENTRY(getSpatializer) \
 BINDER_METHOD_ENTRY(canBeSpatialized) \
 BINDER_METHOD_ENTRY(getDirectPlaybackSupport) \
-BINDER_METHOD_ENTRY(getDirectProfilesForAttributes) \
+BINDER_METHOD_ENTRY(getDirectProfilesForAttributes)  \
+BINDER_METHOD_ENTRY(getSupportedMixerAttributes) \
+BINDER_METHOD_ENTRY(setPreferredMixerAttributes) \
+BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
+BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
 
 // singleton for Binder Method Statistics for IAudioPolicyService
 static auto& getIAudioPolicyServiceStatistics() {
@@ -179,7 +186,10 @@
 
 static AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface)
 {
-    AudioPolicyManager *apm = new AudioPolicyManager(clientInterface);
+    auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback();  // This can't fail.
+    AudioPolicyManager *apm = new AudioPolicyManager(
+            config, loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix()),
+            clientInterface);
     status_t status = apm->initialize();
     if (status != NO_ERROR) {
         delete apm;
@@ -201,7 +211,9 @@
       mPhoneState(AUDIO_MODE_INVALID),
       mCaptureStateNotifier(false),
       mCreateAudioPolicyManager(createAudioPolicyManager),
-      mDestroyAudioPolicyManager(destroyAudioPolicyManager) {
+      mDestroyAudioPolicyManager(destroyAudioPolicyManager),
+      mUsecaseValidator(media::createUsecaseValidator()) {
+      setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 void AudioPolicyService::loadAudioPolicyManager()
@@ -251,7 +263,8 @@
     }
 
     // load audio processing modules
-    sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects();
+    const sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
+    sp<AudioPolicyEffects> audioPolicyEffects = new AudioPolicyEffects(effectsFactoryHal);
     sp<UidPolicy> uidPolicy = new UidPolicy(this);
     sp<SensorPrivacyPolicy> sensorPrivacyPolicy = new SensorPrivacyPolicy(this);
     {
@@ -270,7 +283,12 @@
         AudioDeviceTypeAddrVector devices;
         bool hasSpatializer = mAudioPolicyManager->canBeSpatialized(&attr, nullptr, devices);
         if (hasSpatializer) {
-            mSpatializer = Spatializer::create(this);
+            mSpatializer = Spatializer::create(this, effectsFactoryHal);
+        }
+        if (mSpatializer == nullptr) {
+            // No spatializer created, signal the reason: NO_INIT a failure, OK means intended.
+            const status_t createStatus = hasSpatializer ? NO_INIT : OK;
+            Spatializer::sendEmptyCreateSpatializerMetricWithStatus(createStatus);
         }
     }
     AudioSystem::audioPolicyReady();
@@ -516,9 +534,8 @@
 
 void AudioPolicyService::doOnCheckSpatializer()
 {
-    Mutex::Autolock _l(mLock);
-
-    ALOGI("%s mSpatializer %p level %d", __func__, mSpatializer.get(), (int)mSpatializer->getLevel());
+    ALOGV("%s mSpatializer %p level %d",
+        __func__, mSpatializer.get(), (int)mSpatializer->getLevel());
 
     if (mSpatializer != nullptr) {
         // Note: mSpatializer != nullptr =>  mAudioPolicyManager != nullptr
@@ -527,6 +544,8 @@
             audio_io_handle_t newOutput;
             const audio_attributes_t attr = attributes_initializer(AUDIO_USAGE_MEDIA);
             audio_config_base_t config = mSpatializer->getAudioInConfig();
+
+            Mutex::Autolock _l(mLock);
             status_t status =
                     mAudioPolicyManager->getSpatializerOutput(&config, &attr, &newOutput);
             ALOGV("%s currentOutput %d newOutput %d channel_mask %#x",
@@ -538,21 +557,19 @@
             mLock.unlock();
             // It is OK to call detachOutput() is none is already attached.
             mSpatializer->detachOutput();
-            if (status != NO_ERROR || newOutput == AUDIO_IO_HANDLE_NONE) {
-                mLock.lock();
-                return;
+            if (status == NO_ERROR && newOutput != AUDIO_IO_HANDLE_NONE) {
+                status = mSpatializer->attachOutput(newOutput, numActiveTracks);
             }
-            status = mSpatializer->attachOutput(newOutput, numActiveTracks);
             mLock.lock();
             if (status != NO_ERROR) {
                 mAudioPolicyManager->releaseSpatializerOutput(newOutput);
             }
         } else if (mSpatializer->getLevel() == media::SpatializationLevel::NONE
                                && mSpatializer->getOutput() != AUDIO_IO_HANDLE_NONE) {
-            mLock.unlock();
             audio_io_handle_t output = mSpatializer->detachOutput();
-            mLock.lock();
+
             if (output != AUDIO_IO_HANDLE_NONE) {
+                Mutex::Autolock _l(mLock);
                 mAudioPolicyManager->releaseSpatializerOutput(output);
             }
         }
@@ -581,19 +598,16 @@
 
 void AudioPolicyService::doOnUpdateActiveSpatializerTracks()
 {
-    sp<Spatializer> spatializer;
+    if (mSpatializer == nullptr) {
+        return;
+    }
+    audio_io_handle_t output = mSpatializer->getOutput();
     size_t activeClients;
     {
         Mutex::Autolock _l(mLock);
-        if (mSpatializer == nullptr) {
-            return;
-        }
-        spatializer = mSpatializer;
-        activeClients = countActiveClientsOnOutput_l(mSpatializer->getOutput());
+        activeClients = countActiveClientsOnOutput_l(output);
     }
-    if (spatializer != nullptr) {
-        spatializer->updateActiveTracks(activeClients);
-    }
+    mSpatializer->updateActiveTracks(activeClients);
 }
 
 status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
@@ -1005,10 +1019,11 @@
         //     AND is on TOP or latest started
         //     AND there is no active privacy sensitive capture or call
         //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+        bool allowSensitiveCapture =
+            !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
         bool allowCapture = !isAssistantOnTop
                 && (isTopOrLatestActive || isTopOrLatestSensitive)
-                && !(isSensitiveActive
-                    && !(isTopOrLatestSensitive || current->canCaptureOutput))
+                && allowSensitiveCapture
                 && canCaptureIfInCallOrCommunication(current);
 
         if (!current->hasOp()) {
@@ -1031,7 +1046,7 @@
                 if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
                     allowCapture = true;
                 }
-            } else if (!(isSensitiveActive && !current->canCaptureOutput)
+            } else if (allowSensitiveCapture
                     && canCaptureIfInCallOrCommunication(current)) {
                 if (isTopOrLatestAssistant
                     && (source == AUDIO_SOURCE_VOICE_RECOGNITION
@@ -1052,7 +1067,7 @@
                 if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
                     allowCapture = true;
                 }
-            } else if (!(isSensitiveActive && !current->canCaptureOutput)
+            } else if (allowSensitiveCapture
                         && canCaptureIfInCallOrCommunication(current)) {
                 if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
                 {
@@ -1067,7 +1082,7 @@
             //     OR
             //         Is on TOP AND the source is VOICE_RECOGNITION or HOTWORD
             if (!isAssistantOnTop
-                    && !(isSensitiveActive && !current->canCaptureOutput)
+                    && allowSensitiveCapture
                     && canCaptureIfInCallOrCommunication(current)) {
                 allowCapture = true;
             }
@@ -1215,6 +1230,14 @@
 
         dumpReleaseLock(mLock, locked);
 
+        if (mSpatializer != nullptr) {
+            std::string dumpString = mSpatializer->toString(1 /* level */);
+            write(fd, dumpString.c_str(), dumpString.size());
+        } else {
+            String8 spatializerPtr = String8::format("Spatializer no supportted on this device\n");
+            write(fd, spatializerPtr.c_str(), spatializerPtr.size());
+        }
+
         {
             std::string timeCheckStats = getIAudioPolicyServiceStatistics().dump();
             dprintf(fd, "\nIAudioPolicyService binder call profile\n");
@@ -1297,11 +1320,13 @@
         case TRANSACTION_getVolumeGroupFromAudioAttributes:
         case TRANSACTION_acquireSoundTriggerSession:
         case TRANSACTION_releaseSoundTriggerSession:
+        case TRANSACTION_isHotwordStreamSupported:
         case TRANSACTION_setRttEnabled:
         case TRANSACTION_isCallScreenModeSupported:
         case TRANSACTION_setDevicesRoleForStrategy:
         case TRANSACTION_setSupportedSystemUsages:
         case TRANSACTION_removeDevicesRoleForStrategy:
+        case TRANSACTION_clearDevicesRoleForStrategy:
         case TRANSACTION_getDevicesForRoleAndStrategy:
         case TRANSACTION_getDevicesForAttributes:
         case TRANSACTION_setAllowedCapturePolicy:
@@ -1313,7 +1338,9 @@
         case TRANSACTION_removeDevicesRoleForCapturePreset:
         case TRANSACTION_clearDevicesRoleForCapturePreset:
         case TRANSACTION_getDevicesForRoleAndCapturePreset:
-        case TRANSACTION_getSpatializer: {
+        case TRANSACTION_getSpatializer:
+        case TRANSACTION_setPreferredMixerAttributes:
+        case TRANSACTION_clearPreferredMixerAttributes: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1338,7 +1365,9 @@
         } else {
             getIAudioPolicyServiceStatistics().event(code, elapsedMs);
         }
-    });
+    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
+    mediautils::TimeCheck::kDefaultSecondChanceDuration,
+    true /* crashOnTimeout */);
 
     switch (code) {
         case SHELL_COMMAND_TRANSACTION: {
@@ -1518,6 +1547,16 @@
         "  help print this message\n");
 }
 
+status_t AudioPolicyService::registerOutput(audio_io_handle_t output,
+                        const audio_config_base_t& config,
+                        const audio_output_flags_t flags) {
+    return mUsecaseValidator->registerStream(output, config, flags);
+}
+
+status_t AudioPolicyService::unregisterOutput(audio_io_handle_t output) {
+    return mUsecaseValidator->unregisterStream(output);
+}
+
 // -----------  AudioPolicyService::UidPolicy implementation ----------
 
 void AudioPolicyService::UidPolicy::registerSelf() {
@@ -1660,7 +1699,7 @@
     }
 }
 
-void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused) {
+void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused, int32_t adj __unused) {
 }
 
 void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
@@ -1721,6 +1760,7 @@
 }
 
 bool AudioPolicyService::UidPolicy::isA11yOnTop() {
+    Mutex::Autolock _l(mLock);
     for (const auto &uid : mCachedUids) {
         if (!isA11yUid(uid.first)) {
             continue;
@@ -1877,6 +1917,7 @@
     // since it controls the mic permission for legacy apps.
     mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
         mAttributionSource.packageName.value_or(""))),
+        AppOpsManager::WATCH_FOREGROUND_CHANGES,
         mOpCallback);
 }
 
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 3a08cf8..4710a8a 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -36,6 +36,7 @@
 #include <media/ToneGenerator.h>
 #include <media/AudioEffect.h>
 #include <media/AudioPolicy.h>
+#include <media/UsecaseValidator.h>
 #include <mediautils/ServiceUtilities.h>
 #include "AudioPolicyEffects.h"
 #include "CaptureStateNotifier.h"
@@ -69,7 +70,7 @@
     public IBinder::DeathRecipient,
     public SpatializerPolicyCallback
 {
-    friend class BinderService<AudioPolicyService>;
+    friend class sp<AudioPolicyService>;
 
 public:
     // for BinderService
@@ -134,7 +135,7 @@
                                                   int32_t* _aidl_return) override;
     binder::Status getStrategyForStream(AudioStreamType stream,
                                         int32_t* _aidl_return) override;
-    binder::Status getDevicesForAttributes(const media::AudioAttributesEx& attr,
+    binder::Status getDevicesForAttributes(const media::AudioAttributesInternal& attr,
                                            bool forVolume,
                                            std::vector<AudioDevice>* _aidl_return) override;
     binder::Status getOutputForEffect(const media::EffectDescriptor& desc,
@@ -172,16 +173,18 @@
                                            const media::AudioAttributesInternal& attributes,
                                            bool* _aidl_return) override;
     binder::Status listAudioPorts(media::AudioPortRole role, media::AudioPortType type,
-                                  Int* count, std::vector<media::AudioPort>* ports,
+                                  Int* count, std::vector<media::AudioPortFw>* ports,
                                   int32_t* _aidl_return) override;
+    binder::Status listDeclaredDevicePorts(media::AudioPortRole role,
+                                           std::vector<media::AudioPortFw>* _aidl_return) override;
     binder::Status getAudioPort(int portId,
-                                media::AudioPort* _aidl_return) override;
-    binder::Status createAudioPatch(const media::AudioPatch& patch, int32_t handle,
+                                media::AudioPortFw* _aidl_return) override;
+    binder::Status createAudioPatch(const media::AudioPatchFw& patch, int32_t handle,
                                     int32_t* _aidl_return) override;
     binder::Status releaseAudioPatch(int32_t handle) override;
-    binder::Status listAudioPatches(Int* count, std::vector<media::AudioPatch>* patches,
+    binder::Status listAudioPatches(Int* count, std::vector<media::AudioPatchFw>* patches,
                                     int32_t* _aidl_return) override;
-    binder::Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+    binder::Status setAudioPortConfig(const media::AudioPortConfigFw& config) override;
     binder::Status registerClient(const sp<media::IAudioPolicyServiceClient>& client) override;
     binder::Status setAudioPortCallbacksEnabled(bool enabled) override;
     binder::Status setAudioVolumeGroupCallbacksEnabled(bool enabled) override;
@@ -197,7 +200,7 @@
             int32_t userId,
             const std::vector<AudioDevice>& devices) override;
     binder::Status removeUserIdDeviceAffinities(int32_t userId) override;
-    binder::Status startAudioSource(const media::AudioPortConfig& source,
+    binder::Status startAudioSource(const media::AudioPortConfigFw& source,
                                     const media::AudioAttributesInternal& attributes,
                                     int32_t* _aidl_return) override;
     binder::Status stopAudioSource(int32_t portId) override;
@@ -222,14 +225,15 @@
     binder::Status setCurrentImeUid(int32_t uid) override;
     binder::Status isHapticPlaybackSupported(bool* _aidl_return) override;
     binder::Status isUltrasoundSupported(bool* _aidl_return) override;
+    binder::Status isHotwordStreamSupported(bool lookbackAudio, bool* _aidl_return) override;
     binder::Status listAudioProductStrategies(
             std::vector<media::AudioProductStrategy>* _aidl_return) override;
-    binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesEx& aa,
+    binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesInternal& aa,
                                                          bool fallbackOnDefault,
                                                          int32_t* _aidl_return) override;
     binder::Status listAudioVolumeGroups(
             std::vector<media::AudioVolumeGroup>* _aidl_return) override;
-    binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesEx& aa,
+    binder::Status getVolumeGroupFromAudioAttributes(const media::AudioAttributesInternal& aa,
                                                      bool fallbackOnDefault,
                                                      int32_t* _aidl_return) override;
     binder::Status setRttEnabled(bool enabled) override;
@@ -237,7 +241,12 @@
     binder::Status setDevicesRoleForStrategy(
             int32_t strategy, media::DeviceRole role,
             const std::vector<AudioDevice>& devices) override;
-    binder::Status removeDevicesRoleForStrategy(int32_t strategy, media::DeviceRole role) override;
+    binder::Status removeDevicesRoleForStrategy(
+            int32_t strategy, media::DeviceRole role,
+            const std::vector<AudioDevice>& devices) override;
+    binder::Status clearDevicesRoleForStrategy(
+            int32_t strategy,
+            media::DeviceRole role) override;
     binder::Status getDevicesForRoleAndStrategy(
             int32_t strategy, media::DeviceRole role,
             std::vector<AudioDevice>* _aidl_return) override;
@@ -277,6 +286,22 @@
     binder::Status getDirectProfilesForAttributes(const media::AudioAttributesInternal& attr,
                         std::vector<media::audio::common::AudioProfile>* _aidl_return) override;
 
+    binder::Status getSupportedMixerAttributes(
+            int32_t portId,
+            std::vector<media::AudioMixerAttributesInternal>* _aidl_return) override;
+    binder::Status setPreferredMixerAttributes(
+            const media::AudioAttributesInternal& attr,
+            int32_t portId,
+            int32_t uid,
+            const media::AudioMixerAttributesInternal& mixerAttr) override;
+    binder::Status getPreferredMixerAttributes(
+            const media::AudioAttributesInternal& attr,
+            int32_t portId,
+            std::optional<media::AudioMixerAttributesInternal>* _aidl_return) override;
+    binder::Status clearPreferredMixerAttributes(const media::AudioAttributesInternal& attr,
+                                                 int32_t portId,
+                                                 int32_t uid) override;
+
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
     // IBinder::DeathRecipient
@@ -415,6 +440,11 @@
      */
     static bool isAppOpSource(audio_source_t source);
 
+    status_t registerOutput(audio_io_handle_t output,
+                            const audio_config_base_t& config,
+                            const audio_output_flags_t flags);
+    status_t unregisterOutput(audio_io_handle_t output);
+
     // If recording we need to make sure the UID is allowed to do that. If the UID is idle
     // then it cannot record and gets buffers with zeros - silence. As soon as the UID
     // transitions to an active state we will start reporting buffers with data. This approach
@@ -454,7 +484,7 @@
         void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
                 int32_t capability) override;
-        void onUidProcAdjChanged(uid_t uid) override;
+        void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
 
         void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
         void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
@@ -475,8 +505,8 @@
         Mutex mLock;
         ActivityManager mAm;
         bool mObserverRegistered = false;
-        std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids;
-        std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids;
+        std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids GUARDED_BY(mLock);
+        std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids GUARDED_BY(mLock);
         std::vector<uid_t> mAssistantUids;
         std::vector<uid_t> mActiveAssistantUids;
         std::vector<uid_t> mA11yUids;
@@ -705,6 +735,8 @@
         explicit AudioPolicyClient(AudioPolicyService *service) : mAudioPolicyService(service) {}
         virtual ~AudioPolicyClient() {}
 
+        virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig *config);
+
         //
         // Audio HW module functions
         //
@@ -760,9 +792,6 @@
         // for each output (destination device) it is attached to.
         virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0);
 
-        // invalidate a stream type, causing a reroute to an unspecified new output
-        virtual status_t invalidateStream(audio_stream_type_t stream);
-
         // function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
         virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
         // function enabling to receive proprietary informations directly from audio hardware interface to audio policy manager.
@@ -820,7 +849,9 @@
                 const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
 
         status_t setDeviceConnectedState(
-                const struct audio_port_v7 *port, bool connected) override;
+                const struct audio_port_v7 *port, media::DeviceConnectedState state) override;
+
+        status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
 
      private:
         AudioPolicyService *mAudioPolicyService;
@@ -887,7 +918,7 @@
 
         const audio_attributes_t attributes; // source, flags ...
         const audio_io_handle_t io;          // audio HAL stream IO handle
-        const AttributionSourceState& attributionSource; //client attributionsource
+        const AttributionSourceState attributionSource; //client attributionsource
         const audio_session_t session;       // audio session ID
         const audio_port_handle_t portId;
         const audio_port_handle_t deviceId;  // selected input device port ID
@@ -1060,11 +1091,13 @@
 
     CaptureStateNotifier mCaptureStateNotifier;
 
+    // created in onFirstRef() and never cleared: does not need to be guarded by mLock
     sp<Spatializer> mSpatializer;
 
     void *mLibraryHandle = nullptr;
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
+    std::unique_ptr<media::UsecaseValidator> mUsecaseValidator;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index c199a76..ee2e0eb 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -15,11 +15,13 @@
 ** limitations under the License.
 */
 
-
+#include <string>
 #define LOG_TAG "Spatializer"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
+#include <algorithm>
+#include <inttypes.h>
 #include <limits.h>
 #include <stdint.h>
 #include <sys/types.h>
@@ -28,11 +30,12 @@
 #include <audio_utils/fixedfft.h>
 #include <cutils/bitops.h>
 #include <hardware/sensors.h>
-#include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/MediaMetricsItem.h>
+#include <media/QuaternionUtil.h>
 #include <media/ShmemCompat.h>
+#include <mediautils/SchedulingPolicyService.h>
 #include <mediautils/ServiceUtilities.h>
 #include <utils/Thread.h>
 
@@ -58,11 +61,13 @@
        if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
        std::move(_tmp.value()); })
 
-audio_channel_mask_t getMaxChannelMask(std::vector<audio_channel_mask_t> masks) {
+static audio_channel_mask_t getMaxChannelMask(
+        const std::vector<audio_channel_mask_t>& masks, size_t channelLimit = SIZE_MAX) {
     uint32_t maxCount = 0;
     audio_channel_mask_t maxMask = AUDIO_CHANNEL_NONE;
     for (auto mask : masks) {
         const size_t count = audio_channel_count_from_out_mask(mask);
+        if (count > channelLimit) continue;  // ignore masks greater than channelLimit
         if (count > maxCount) {
             maxMask = mask;
             maxCount = count;
@@ -71,6 +76,34 @@
     return maxMask;
 }
 
+static std::vector<float> recordFromTranslationRotationVector(
+        const std::vector<float>& trVector) {
+    auto headToStageOpt = Pose3f::fromVector(trVector);
+    if (!headToStageOpt) return {};
+
+    const auto stageToHead = headToStageOpt.value().inverse();
+    const auto stageToHeadTranslation = stageToHead.translation();
+    constexpr float RAD_TO_DEGREE = 180.f / M_PI;
+    std::vector<float> record{
+        stageToHeadTranslation[0], stageToHeadTranslation[1], stageToHeadTranslation[2],
+        0.f, 0.f, 0.f};
+    media::quaternionToAngles(stageToHead.rotation(), &record[3], &record[4], &record[5]);
+    record[3] *= RAD_TO_DEGREE;
+    record[4] *= RAD_TO_DEGREE;
+    record[5] *= RAD_TO_DEGREE;
+    return record;
+}
+
+template<typename T>
+static constexpr const T& safe_clamp(const T& value, const T& low, const T& high) {
+    if constexpr (std::is_floating_point_v<T>) {
+        return value != value /* constexpr isnan */
+                ? low : std::clamp(value, low, high);
+    } else /* constexpr */ {
+        return std::clamp(value, low, high);
+    }
+}
+
 // ---------------------------------------------------------------------------
 
 class Spatializer::EngineCallbackHandler : public AHandler {
@@ -84,6 +117,7 @@
         kWhatOnFramesProcessed,    // AudioEffect::EVENT_FRAMES_PROCESSED
         kWhatOnHeadToStagePose,    // SpatializerPoseController::Listener::onHeadToStagePose
         kWhatOnActualModeChange,   // SpatializerPoseController::Listener::onActualModeChange
+        kWhatOnLatencyModesChanged, // Spatializer::onSupportedLatencyModesChanged
     };
     static constexpr const char *kNumFramesKey = "numFrames";
     static constexpr const char *kModeKey = "mode";
@@ -93,15 +127,35 @@
     static constexpr const char *kRotation0Key = "rotation0";
     static constexpr const char *kRotation1Key = "rotation1";
     static constexpr const char *kRotation2Key = "rotation2";
+    static constexpr const char *kLatencyModesKey = "latencyModes";
+
+    class LatencyModes : public RefBase {
+    public:
+        LatencyModes(audio_io_handle_t output,
+                const std::vector<audio_latency_mode_t>& latencyModes)
+            : mOutput(output), mLatencyModes(latencyModes) {}
+        ~LatencyModes() = default;
+
+        audio_io_handle_t mOutput;
+        std::vector<audio_latency_mode_t> mLatencyModes;
+    };
 
     void onMessageReceived(const sp<AMessage> &msg) override {
+        // No ALooper method to get the tid so update
+        // Spatializer priority on the first message received.
+        std::call_once(mPrioritySetFlag, [](){
+            const pid_t pid = getpid();
+            const pid_t tid = gettid();
+            (void)requestSpatializerPriority(pid, tid);
+        });
+
+        sp<Spatializer> spatializer = mSpatializer.promote();
+        if (spatializer == nullptr) {
+            ALOGW("%s: Cannot promote spatializer", __func__);
+            return;
+        }
         switch (msg->what()) {
             case kWhatOnFramesProcessed: {
-                sp<Spatializer> spatializer = mSpatializer.promote();
-                if (spatializer == nullptr) {
-                    ALOGW("%s: Cannot promote spatializer", __func__);
-                    return;
-                }
                 int numFrames;
                 if (!msg->findInt32(kNumFramesKey, &numFrames)) {
                     ALOGE("%s: Cannot find num frames!", __func__);
@@ -112,11 +166,6 @@
                 }
                 } break;
             case kWhatOnHeadToStagePose: {
-                sp<Spatializer> spatializer = mSpatializer.promote();
-                if (spatializer == nullptr) {
-                    ALOGW("%s: Cannot promote spatializer", __func__);
-                    return;
-                }
                 std::vector<float> headToStage(sHeadPoseKeys.size());
                 for (size_t i = 0 ; i < sHeadPoseKeys.size(); i++) {
                     if (!msg->findFloat(sHeadPoseKeys[i], &headToStage[i])) {
@@ -127,24 +176,32 @@
                 spatializer->onHeadToStagePoseMsg(headToStage);
                 } break;
             case kWhatOnActualModeChange: {
-                sp<Spatializer> spatializer = mSpatializer.promote();
-                if (spatializer == nullptr) {
-                    ALOGW("%s: Cannot promote spatializer", __func__);
-                    return;
-                }
                 int mode;
-                if (!msg->findInt32(EngineCallbackHandler::kModeKey, &mode)) {
+                if (!msg->findInt32(kModeKey, &mode)) {
                     ALOGE("%s: Cannot find actualMode!", __func__);
                     return;
                 }
                 spatializer->onActualModeChangeMsg(static_cast<HeadTrackingMode>(mode));
                 } break;
+
+            case kWhatOnLatencyModesChanged: {
+                sp<RefBase> object;
+                if (!msg->findObject(kLatencyModesKey, &object)) {
+                    ALOGE("%s: Cannot find latency modes!", __func__);
+                    return;
+                }
+                sp<LatencyModes> latencyModes = static_cast<LatencyModes*>(object.get());
+                spatializer->onSupportedLatencyModesChangedMsg(
+                    latencyModes->mOutput, std::move(latencyModes->mLatencyModes));
+                } break;
+
             default:
                 LOG_ALWAYS_FATAL("Invalid callback message %d", msg->what());
         }
     }
 private:
     wp<Spatializer> mSpatializer;
+    std::once_flag mPrioritySetFlag;
 };
 
 const std::vector<const char *> Spatializer::sHeadPoseKeys = {
@@ -157,18 +214,17 @@
 };
 
 // ---------------------------------------------------------------------------
-sp<Spatializer> Spatializer::create(SpatializerPolicyCallback *callback) {
+sp<Spatializer> Spatializer::create(SpatializerPolicyCallback* callback,
+                                    const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
     sp<Spatializer> spatializer;
 
-    sp<EffectsFactoryHalInterface> effectsFactoryHal = EffectsFactoryHalInterface::create();
     if (effectsFactoryHal == nullptr) {
         ALOGW("%s failed to create effect factory interface", __func__);
         return spatializer;
     }
 
     std::vector<effect_descriptor_t> descriptors;
-    status_t status =
-            effectsFactoryHal->getDescriptors(FX_IID_SPATIALIZER, &descriptors);
+    status_t status = effectsFactoryHal->getDescriptors(FX_IID_SPATIALIZER, &descriptors);
     if (status != NO_ERROR) {
         ALOGW("%s failed to get spatializer descriptor, error %d", __func__, status);
         return spatializer;
@@ -176,17 +232,22 @@
     ALOG_ASSERT(!descriptors.empty(),
             "%s getDescriptors() returned no error but empty list", __func__);
 
-    //TODO: get supported spatialization modes from FX engine or descriptor
-
+    // TODO: get supported spatialization modes from FX engine or descriptor
     sp<EffectHalInterface> effect;
     status = effectsFactoryHal->createEffect(&descriptors[0].uuid, AUDIO_SESSION_OUTPUT_STAGE,
             AUDIO_IO_HANDLE_NONE, AUDIO_PORT_HANDLE_NONE, &effect);
-    ALOGI("%s FX create status %d effect %p", __func__, status, effect.get());
+    ALOGI("%s FX create status %d effect ID %" PRId64, __func__, status,
+          effect ? effect->effectId() : 0);
 
     if (status == NO_ERROR && effect != nullptr) {
         spatializer = new Spatializer(descriptors[0], callback);
         if (spatializer->loadEngineConfiguration(effect) != NO_ERROR) {
             spatializer.clear();
+            ALOGW("%s loadEngine error: %d  effect Id %" PRId64,
+                    __func__, status, effect ? effect->effectId() : 0);
+        } else {
+            spatializer->mLocalLog.log("%s with effect Id %" PRId64, __func__,
+                                       effect ? effect->effectId() : 0);
         }
     }
 
@@ -197,6 +258,7 @@
     : mEngineDescriptor(engineDescriptor),
       mPolicyCallback(callback) {
     ALOGV("%s", __func__);
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 void Spatializer::onFirstRef() {
@@ -205,7 +267,7 @@
     mLooper->start(
             /*runOnCallingThread*/false,
             /*canCallJava*/       false,
-            PRIORITY_AUDIO);
+            PRIORITY_URGENT_AUDIO);
 
     mHandler = new EngineCallbackHandler(this);
     mLooper->registerHandler(mHandler);
@@ -221,6 +283,16 @@
     mHandler.clear();
 }
 
+static std::string channelMaskVectorToString(
+        const std::vector<audio_channel_mask_t>& masks) {
+    std::stringstream ss;
+    for (const auto &mask : masks) {
+        if (ss.tellp() != 0) ss << "|";
+        ss << mask;
+    }
+    return ss.str();
+}
+
 status_t Spatializer::loadEngineConfiguration(sp<EffectHalInterface> effect) {
     ALOGV("%s", __func__);
 
@@ -268,6 +340,7 @@
         ALOGW("%s: cannot get SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES", __func__);
         return status;
     }
+
     for (const auto spatializationMode : spatializationModes) {
         if (!aidl_utils::isValidEnum(spatializationMode)) {
             ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
@@ -309,7 +382,7 @@
     }
     mediametrics::LogItem(mMetricsId)
         .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
-        .set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)getMaxChannelMask(mChannelMasks))
+        .set(AMEDIAMETRICS_PROP_CHANNELMASKS, channelMaskVectorToString(mChannelMasks))
         .set(AMEDIAMETRICS_PROP_LEVELS, aidl_utils::enumsToString(mLevels))
         .set(AMEDIAMETRICS_PROP_MODES, aidl_utils::enumsToString(mSpatializationModes))
         .set(AMEDIAMETRICS_PROP_HEADTRACKINGMODES, aidl_utils::enumsToString(mHeadTrackingModes))
@@ -318,12 +391,24 @@
     return NO_ERROR;
 }
 
+/* static */
+void Spatializer::sendEmptyCreateSpatializerMetricWithStatus(status_t status) {
+    mediametrics::LogItem(kDefaultMetricsId)
+        .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
+        .set(AMEDIAMETRICS_PROP_CHANNELMASKS, "")
+        .set(AMEDIAMETRICS_PROP_LEVELS, "")
+        .set(AMEDIAMETRICS_PROP_MODES, "")
+        .set(AMEDIAMETRICS_PROP_HEADTRACKINGMODES, "")
+        .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+        .record();
+}
+
 /** Gets the channel mask, sampling rate and format set for the spatializer input. */
 audio_config_base_t Spatializer::getAudioInConfig() const {
     std::lock_guard lock(mLock);
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     // For now use highest supported channel count
-    config.channel_mask = getMaxChannelMask(mChannelMasks);
+    config.channel_mask = getMaxChannelMask(mChannelMasks, FCC_LIMIT);
     return config;
 }
 
@@ -334,6 +419,17 @@
         return BAD_VALUE;
     }
 
+    if (mSpatializerCallback != nullptr) {
+        if (IInterface::asBinder(callback) == IInterface::asBinder(mSpatializerCallback)) {
+            ALOGW("%s: Registering callback %p again",
+                __func__, mSpatializerCallback.get());
+            return NO_ERROR;
+        }
+        ALOGE("%s: Already one client registered with callback %p",
+            __func__, mSpatializerCallback.get());
+        return INVALID_OPERATION;
+    }
+
     sp<IBinder> binder = IInterface::asBinder(callback);
     status_t status = binder->linkToDeath(this);
     if (status == NO_ERROR) {
@@ -366,7 +462,8 @@
 }
 
 Status Spatializer::setLevel(SpatializationLevel level) {
-    ALOGV("%s level %d", __func__, (int)level);
+    ALOGV("%s level %s", __func__, media::toString(level).c_str());
+    mLocalLog.log("%s with %s", __func__, media::toString(level).c_str());
     if (level != SpatializationLevel::NONE
             && std::find(mLevels.begin(), mLevels.end(), level) == mLevels.end()) {
         return binderStatusFromStatusT(BAD_VALUE);
@@ -426,11 +523,12 @@
 }
 
 Status Spatializer::setDesiredHeadTrackingMode(SpatializerHeadTrackingMode mode) {
-    ALOGV("%s mode %d", __func__, (int)mode);
+    ALOGV("%s mode %s", __func__, media::toString(mode).c_str());
 
     if (!mSupportsHeadTracking) {
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
+    mLocalLog.log("%s with %s", __func__, media::toString(mode).c_str());
     std::lock_guard lock(mLock);
     switch (mode) {
         case SpatializerHeadTrackingMode::OTHER:
@@ -485,6 +583,8 @@
     }
     std::lock_guard lock(mLock);
     if (mPoseController != nullptr) {
+        mLocalLog.log("%s with screenToStage %s", __func__,
+                media::VectorRecorder::toString<float>(screenToStage).c_str());
         mPoseController->setScreenToStagePose(maybePose.value());
     }
     return Status::ok();
@@ -520,6 +620,7 @@
     }
     std::lock_guard lock(mLock);
     if (mHeadSensor != sensorHandle) {
+        mLocalLog.log("%s with 0x%08x", __func__, sensorHandle);
         mHeadSensor = sensorHandle;
         checkPoseController_l();
         checkSensorsState_l();
@@ -534,6 +635,7 @@
     }
     std::lock_guard lock(mLock);
     if (mScreenSensor != sensorHandle) {
+        mLocalLog.log("%s with 0x%08x", __func__, sensorHandle);
         mScreenSensor = sensorHandle;
         // TODO: consider a new method setHeadAndScreenSensor()
         // because we generally set both at the same time.
@@ -545,26 +647,48 @@
 
 Status Spatializer::setDisplayOrientation(float physicalToLogicalAngle) {
     ALOGV("%s physicalToLogicalAngle %f", __func__, physicalToLogicalAngle);
-    if (!mSupportsHeadTracking) {
-        return binderStatusFromStatusT(INVALID_OPERATION);
-    }
+    mLocalLog.log("%s with %f", __func__, physicalToLogicalAngle);
+    const float angle = safe_clamp(physicalToLogicalAngle, 0.f, (float)(2. * M_PI));
+    // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+    ALOGI_IF(angle != physicalToLogicalAngle,
+            "%s: clamping %f to %f", __func__, physicalToLogicalAngle, angle);
     std::lock_guard lock(mLock);
-    mDisplayOrientation = physicalToLogicalAngle;
+    mDisplayOrientation = angle;
     if (mPoseController != nullptr) {
-        mPoseController->setDisplayOrientation(mDisplayOrientation);
+        // This turns on the rate-limiter.
+        mPoseController->setDisplayOrientation(angle);
     }
     if (mEngine != nullptr) {
         setEffectParameter_l(
-            SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{physicalToLogicalAngle});
+            SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{angle});
     }
     return Status::ok();
 }
 
 Status Spatializer::setHingeAngle(float hingeAngle) {
-    std::lock_guard lock(mLock);
     ALOGV("%s hingeAngle %f", __func__, hingeAngle);
+    mLocalLog.log("%s with %f", __func__, hingeAngle);
+    const float angle = safe_clamp(hingeAngle, 0.f, (float)(2. * M_PI));
+    // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+    ALOGI_IF(angle != hingeAngle,
+            "%s: clamping %f to %f", __func__, hingeAngle, angle);
+    std::lock_guard lock(mLock);
+    mHingeAngle = angle;
     if (mEngine != nullptr) {
-        setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{hingeAngle});
+        setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{angle});
+    }
+    return Status::ok();
+}
+
+Status Spatializer::setFoldState(bool folded) {
+    ALOGV("%s foldState %d", __func__, (int)folded);
+    mLocalLog.log("%s with %d", __func__, (int)folded);
+    std::lock_guard lock(mLock);
+    mFoldedState = folded;
+    if (mEngine != nullptr) {
+        // we don't suppress multiple calls with the same folded state - that's
+        // done at the caller.
+        setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE, std::vector<uint8_t>{mFoldedState});
     }
     return Status::ok();
 }
@@ -642,6 +766,17 @@
     msg->post();
 }
 
+void Spatializer::resetEngineHeadPose_l() {
+    ALOGV("%s mEngine %p", __func__, mEngine.get());
+    if (mEngine == nullptr) {
+        return;
+    }
+    const std::vector<float> headToStage(6, 0.0);
+    setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
+    setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+            std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+}
+
 void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
     ALOGV("%s", __func__);
     sp<media::ISpatializerHeadTrackingCallback> callback;
@@ -650,6 +785,9 @@
         callback = mHeadTrackingCallback;
         if (mEngine != nullptr) {
             setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
+            const auto record = recordFromTranslationRotationVector(headToStage);
+            mPoseRecorder.record(record);
+            mPoseDurableRecorder.record(record);
         }
     }
 
@@ -659,9 +797,9 @@
 }
 
 void Spatializer::onActualModeChange(HeadTrackingMode mode) {
-    ALOGV("%s(%d)", __func__, (int)mode);
-    sp<AMessage> msg =
-            new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
+    std::string modeStr = media::toString(mode);
+    ALOGV("%s(%s)", __func__, modeStr.c_str());
+    sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
     msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
     msg->post();
 }
@@ -691,10 +829,15 @@
         }
         mActualHeadTrackingMode = spatializerMode;
         if (mEngine != nullptr) {
-            setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-                                 std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+            if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+                resetEngineHeadPose_l();
+            } else {
+                setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                                     std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+            }
         }
         callback = mHeadTrackingCallback;
+        mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
     }
     if (callback != nullptr) {
         callback->onHeadTrackingModeChanged(spatializerMode);
@@ -708,19 +851,24 @@
     {
         std::lock_guard lock(mLock);
         ALOGV("%s output %d mOutput %d", __func__, (int)output, (int)mOutput);
+        mLocalLog.log("%s with output %d tracks %zu (mOutput %d)", __func__, (int)output,
+                      numActiveTracks, (int)mOutput);
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             LOG_ALWAYS_FATAL_IF(mEngine == nullptr, "%s output set without FX engine", __func__);
             // remove FX instance
             mEngine->setEnabled(false);
             mEngine.clear();
             mPoseController.reset();
+            AudioSystem::removeSupportedLatencyModesCallback(this);
         }
+
         // create FX instance on output
         AttributionSourceState attributionSource = AttributionSourceState();
         mEngine = new AudioEffect(attributionSource);
-        mEngine->set(nullptr, &mEngineDescriptor.uuid, 0, Spatializer::engineCallback /* cbf */,
-                     this /* user */, AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */,
-                     false /* probe */, true /* notifyFramesProcessed */);
+        mEngine->set(nullptr /* type */, &mEngineDescriptor.uuid, 0 /* priority */,
+                     wp<AudioEffect::IAudioEffectCallback>::fromExisting(this),
+                     AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */, false /* probe */,
+                     true /* notifyFramesProcessed */);
         status_t status = mEngine->initCheck();
         ALOGV("%s mEngine create status %d", __func__, (int)status);
         if (status != NO_ERROR) {
@@ -730,6 +878,13 @@
         outputChanged = mOutput != output;
         mOutput = output;
         mNumActiveTracks = numActiveTracks;
+        AudioSystem::addSupportedLatencyModesCallback(this);
+
+        std::vector<audio_latency_mode_t> latencyModes;
+        status = AudioSystem::getSupportedLatencyModes(mOutput, &latencyModes);
+        if (status == OK) {
+            mSupportedLatencyModes = latencyModes;
+        }
 
         checkEngineState_l();
         if (mSupportsHeadTracking) {
@@ -737,6 +892,14 @@
             checkSensorsState_l();
         }
         callback = mSpatializerCallback;
+
+        // Restore common effect state.
+        setEffectParameter_l(SPATIALIZER_PARAM_DISPLAY_ORIENTATION,
+                std::vector<float>{mDisplayOrientation});
+        setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE,
+                std::vector<uint8_t>{mFoldedState});
+        setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE,
+                std::vector<float>{mHingeAngle});
     }
 
     if (outputChanged && callback != nullptr) {
@@ -752,6 +915,7 @@
 
     {
         std::lock_guard lock(mLock);
+        mLocalLog.log("%s with output %d tracks %zu", __func__, (int)mOutput, mNumActiveTracks);
         ALOGV("%s mOutput %d", __func__, (int)mOutput);
         if (mOutput == AUDIO_IO_HANDLE_NONE) {
             return output;
@@ -759,6 +923,7 @@
         // remove FX instance
         mEngine->setEnabled(false);
         mEngine.clear();
+        AudioSystem::removeSupportedLatencyModesCallback(this);
         output = mOutput;
         mOutput = AUDIO_IO_HANDLE_NONE;
         mPoseController.reset();
@@ -771,9 +936,31 @@
     return output;
 }
 
+void Spatializer::onSupportedLatencyModesChanged(
+        audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) {
+    ALOGV("%s output %d num modes %zu", __func__, (int)output, modes.size());
+    sp<AMessage> msg =
+            new AMessage(EngineCallbackHandler::kWhatOnLatencyModesChanged, mHandler);
+    msg->setObject(EngineCallbackHandler::kLatencyModesKey,
+        sp<EngineCallbackHandler::LatencyModes>::make(output, modes));
+    msg->post();
+}
+
+void Spatializer::onSupportedLatencyModesChangedMsg(
+        audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes) {
+    std::lock_guard lock(mLock);
+    ALOGV("%s output %d mOutput %d num modes %zu",
+            __func__, (int)output, (int)mOutput, modes.size());
+    if (output == mOutput) {
+        mSupportedLatencyModes = std::move(modes);
+        checkSensorsState_l();
+    }
+}
+
 void Spatializer::updateActiveTracks(size_t numActiveTracks) {
     std::lock_guard lock(mLock);
     if (mNumActiveTracks != numActiveTracks) {
+        mLocalLog.log("%s from %zu to %zu", __func__, mNumActiveTracks, numActiveTracks);
         mNumActiveTracks = numActiveTracks;
         checkEngineState_l();
         checkSensorsState_l();
@@ -781,17 +968,40 @@
 }
 
 void Spatializer::checkSensorsState_l() {
-    if (mSupportsHeadTracking && mPoseController != nullptr) {
-        if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
-            && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
-            && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
-            mPoseController->setHeadSensor(mHeadSensor);
-            mPoseController->setScreenSensor(mScreenSensor);
+    audio_latency_mode_t requestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
+    const bool supportsSetLatencyMode = !mSupportedLatencyModes.empty();
+    const bool supportsLowLatencyMode = supportsSetLatencyMode && std::find(
+            mSupportedLatencyModes.begin(), mSupportedLatencyModes.end(),
+            AUDIO_LATENCY_MODE_LOW) != mSupportedLatencyModes.end();
+    if (mSupportsHeadTracking) {
+        if (mPoseController != nullptr) {
+            // TODO(b/253297301, b/255433067) reenable low latency condition check
+            // for Head Tracking after Bluetooth HAL supports it correctly.
+            if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+                && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+                && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+                if (mEngine != nullptr) {
+                    setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                            std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+                }
+                mPoseController->setHeadSensor(mHeadSensor);
+                mPoseController->setScreenSensor(mScreenSensor);
+                if (supportsLowLatencyMode) requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+            } else {
+                mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+                mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+                resetEngineHeadPose_l();
+            }
         } else {
-            mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
-            mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+            resetEngineHeadPose_l();
         }
     }
+    if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
+        const status_t status =
+                AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+        ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d",
+                __func__, mOutput, toString(requestedLatencyMode).c_str(), status);
+    }
 }
 
 void Spatializer::checkEngineState_l() {
@@ -800,8 +1010,6 @@
             mEngine->setEnabled(true);
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
                     std::vector<SpatializationLevel>{mLevel});
-            setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-                    std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
         } else {
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
                     std::vector<SpatializationLevel>{SpatializationLevel::NONE});
@@ -823,6 +1031,7 @@
         mPoseController->setDisplayOrientation(mDisplayOrientation);
     } else if (!isControllerNeeded && mPoseController != nullptr) {
         mPoseController.reset();
+        resetEngineHeadPose_l();
     }
     if (mPoseController != nullptr) {
         mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
@@ -837,28 +1046,87 @@
     }
 }
 
-void Spatializer::engineCallback(int32_t event, void *user, void *info) {
-    if (user == nullptr) {
-        return;
-    }
-    Spatializer* const me = reinterpret_cast<Spatializer *>(user);
-    switch (event) {
-        case AudioEffect::EVENT_FRAMES_PROCESSED: {
-            int frames = info == nullptr ? 0 : *(int*)info;
-            ALOGV("%s frames processed %d for me %p", __func__, frames, me);
-            me->postFramesProcessedMsg(frames);
-        } break;
-        default:
-            ALOGV("%s event %d", __func__, event);
-            break;
-    }
-}
-
-void Spatializer::postFramesProcessedMsg(int frames) {
+void Spatializer::onFramesProcessed(int32_t framesProcessed) {
     sp<AMessage> msg =
             new AMessage(EngineCallbackHandler::kWhatOnFramesProcessed, mHandler);
-    msg->setInt32(EngineCallbackHandler::kNumFramesKey, frames);
+    msg->setInt32(EngineCallbackHandler::kNumFramesKey, framesProcessed);
     msg->post();
 }
 
+std::string Spatializer::toString(unsigned level) const {
+    std::string prefixSpace(level, ' ');
+    std::string ss = prefixSpace + "Spatializer:\n";
+    bool needUnlock = false;
+
+    prefixSpace += ' ';
+    if (!mLock.try_lock()) {
+        // dumpsys even try_lock failed, information dump can be useful although may not accurate
+        ss.append(prefixSpace).append("try_lock failed, dumpsys below maybe INACCURATE!\n");
+    } else {
+        needUnlock = true;
+    }
+
+    // Spatializer class information.
+    // 1. Capabilities (mLevels, mHeadTrackingModes, mSpatializationModes, mChannelMasks, etc)
+    ss.append(prefixSpace).append("Supported levels: [");
+    for (auto& level : mLevels) {
+        base::StringAppendF(&ss, " %s", media::toString(level).c_str());
+    }
+    base::StringAppendF(&ss, "], mLevel: %s", media::toString(mLevel).c_str());
+
+    base::StringAppendF(&ss, "\n%smHeadTrackingModes: [", prefixSpace.c_str());
+    for (auto& mode : mHeadTrackingModes) {
+        base::StringAppendF(&ss, " %s", media::toString(mode).c_str());
+    }
+    base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
+                        media::toString(mDesiredHeadTrackingMode).c_str(),
+                        media::toString(mActualHeadTrackingMode).c_str());
+
+    base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
+    for (auto& mode : mSpatializationModes) {
+        base::StringAppendF(&ss, " %s", media::toString(mode).c_str());
+    }
+    ss += "]\n";
+
+    base::StringAppendF(&ss, "%smChannelMasks: ", prefixSpace.c_str());
+    for (auto& mask : mChannelMasks) {
+        base::StringAppendF(&ss, "%s", audio_channel_out_mask_to_string(mask));
+    }
+    base::StringAppendF(&ss, "\n%smSupportsHeadTracking: %s\n", prefixSpace.c_str(),
+                        mSupportsHeadTracking ? "true" : "false");
+    // 2. Settings (Output, tracks)
+    base::StringAppendF(&ss, "%smNumActiveTracks: %zu\n", prefixSpace.c_str(), mNumActiveTracks);
+    base::StringAppendF(&ss, "%sOutputStreamHandle: %d\n", prefixSpace.c_str(), (int)mOutput);
+
+    // 3. Sensors, Effect information.
+    base::StringAppendF(&ss, "%sHeadSensorHandle: 0x%08x\n", prefixSpace.c_str(), mHeadSensor);
+    base::StringAppendF(&ss, "%sScreenSensorHandle: 0x%08x\n", prefixSpace.c_str(), mScreenSensor);
+    base::StringAppendF(&ss, "%sEffectHandle: %p\n", prefixSpace.c_str(), mEngine.get());
+    base::StringAppendF(&ss, "%sDisplayOrientation: %f\n", prefixSpace.c_str(),
+                        mDisplayOrientation);
+
+    ss.append(prefixSpace + "CommandLog:\n");
+    ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
+
+    // PostController dump.
+    if (mPoseController != nullptr) {
+        ss.append(mPoseController->toString(level + 1))
+            .append(prefixSpace)
+            .append("Pose (active stage-to-head) [tx, ty, tz : pitch, roll, yaw]:\n")
+            .append(prefixSpace)
+            .append(" PerMinuteHistory:\n")
+            .append(mPoseDurableRecorder.toString(level + 3))
+            .append(prefixSpace)
+            .append(" PerSecondHistory:\n")
+            .append(mPoseRecorder.toString(level + 3));
+    } else {
+        ss.append(prefixSpace).append("SpatializerPoseController not exist\n");
+    }
+
+    if (needUnlock) {
+        mLock.unlock();
+    }
+    return ss;
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 29f4b08..0d4d3f6 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -17,15 +17,22 @@
 #ifndef ANDROID_MEDIA_SPATIALIZER_H
 #define ANDROID_MEDIA_SPATIALIZER_H
 
+#include <android-base/stringprintf.h>
 #include <android/media/BnEffect.h>
 #include <android/media/BnSpatializer.h>
 #include <android/media/SpatializationLevel.h>
 #include <android/media/SpatializationMode.h>
 #include <android/media/SpatializerHeadTrackingMode.h>
+#include <android/media/audio/common/AudioLatencyMode.h>
+#include <audio_utils/SimpleLog.h>
+#include <math.h>
+#include <media/AudioEffect.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <media/VectorRecorder.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/stagefright/foundation/ALooper.h>
-#include <media/AudioEffect.h>
 #include <system/audio_effects/effect_spatializer.h>
+#include <string>
 
 #include "SpatializerPoseController.h"
 
@@ -84,10 +91,13 @@
  * spatializer mixer thread is destroyed.
  */
 class Spatializer : public media::BnSpatializer,
+                    public AudioEffect::IAudioEffectCallback,
                     public IBinder::DeathRecipient,
-                    private SpatializerPoseController::Listener {
+                    private SpatializerPoseController::Listener,
+                    public virtual AudioSystem::SupportedLatencyModesCallback {
   public:
-    static sp<Spatializer> create(SpatializerPolicyCallback *callback);
+    static sp<Spatializer> create(SpatializerPolicyCallback* callback,
+                                  const sp<EffectsFactoryHalInterface>& effectsFactoryHal);
 
            ~Spatializer() override;
 
@@ -112,6 +122,7 @@
     binder::Status setScreenSensor(int sensorHandle) override;
     binder::Status setDisplayOrientation(float physicalToLogicalAngle) override;
     binder::Status setHingeAngle(float hingeAngle) override;
+    binder::Status setFoldState(bool folded) override;
     binder::Status getSupportedModes(std::vector<media::SpatializationMode>* modes) override;
     binder::Status registerHeadTrackingCallback(
         const sp<media::ISpatializerHeadTrackingCallback>& callback) override;
@@ -122,6 +133,10 @@
     /** IBinder::DeathRecipient. Listen to the death of the INativeSpatializerCallback. */
     virtual void binderDied(const wp<IBinder>& who);
 
+    /** SupportedLatencyModesCallback */
+    void onSupportedLatencyModesChanged(
+            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) override;
+
     /** Registers a INativeSpatializerCallback when a client is attached to this Spatializer
      * by audio policy service.
      */
@@ -150,6 +165,23 @@
 
     void calculateHeadPose();
 
+    /** Convert fields in Spatializer and sub-modules to a string. Disable thread-safety-analysis
+     * here because we want to dump mutex guarded members even try_lock failed to provide as much
+     * information as possible for debugging purpose. */
+    std::string toString(unsigned level) const NO_THREAD_SAFETY_ANALYSIS;
+
+    static std::string toString(audio_latency_mode_t mode) {
+        // We convert to the AIDL type to print (eventually the legacy type will be removed).
+        const auto result = legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode);
+        return result.has_value() ?
+                media::audio::common::toString(*result) : "unknown_latency_mode";
+    }
+
+    // If the Spatializer is not created, we send the status for metrics purposes.
+    // OK:      Spatializer not expected to be created.
+    // NO_INIT: Spatializer creation failed.
+    static void sendEmptyCreateSpatializerMetricWithStatus(status_t status);
+
 private:
     Spatializer(effect_descriptor_t engineDescriptor,
                      SpatializerPolicyCallback *callback);
@@ -162,6 +194,8 @@
 
     void onHeadToStagePoseMsg(const std::vector<float>& headToStage);
     void onActualModeChangeMsg(media::HeadTrackingMode mode);
+    void onSupportedLatencyModesChangedMsg(
+            audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
 
     static constexpr int kMaxEffectParamValues = 10;
     /**
@@ -274,7 +308,7 @@
         return NO_ERROR;
     }
 
-    void postFramesProcessedMsg(int frames);
+    virtual void onFramesProcessed(int32_t framesProcessed) override;
 
     /**
      * Checks if head and screen sensors must be actively monitored based on
@@ -295,13 +329,21 @@
      */
     void checkEngineState_l() REQUIRES(mLock);
 
+    /**
+     * Reset head tracking mode and recenter pose in engine: Called when the head tracking
+     * is disabled.
+     */
+    void resetEngineHeadPose_l() REQUIRES(mLock);
+
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
     SpatializerPolicyCallback* const mPolicyCallback;
 
     /** Currently there is only one version of the spatializer running */
-    const std::string mMetricsId = AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "0";
+    static constexpr const char* kDefaultMetricsId =
+            AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "0";
+    const std::string mMetricsId = kDefaultMetricsId;
 
     /** Mutex protecting internal state */
     mutable std::mutex mLock;
@@ -338,8 +380,13 @@
     int32_t mScreenSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
 
     /** Last display orientation received */
-    static constexpr float kDisplayOrientationInvalid = 1000;
-    float mDisplayOrientation GUARDED_BY(mLock) = kDisplayOrientationInvalid;
+    float mDisplayOrientation GUARDED_BY(mLock) = 0.f;  // aligned to natural up orientation.
+
+    /** Last folded state */
+    bool mFoldedState GUARDED_BY(mLock) = false;  // foldable: true means folded.
+
+    /** Last hinge angle */
+    float mHingeAngle GUARDED_BY(mLock) = 0.f;  // foldable: 0.f is closed, M_PI flat open.
 
     std::vector<media::SpatializationLevel> mLevels;
     std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
@@ -354,10 +401,25 @@
     sp<EngineCallbackHandler> mHandler;
 
     size_t mNumActiveTracks GUARDED_BY(mLock) = 0;
+    std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mLock);
 
-    static const std::vector<const char *> sHeadPoseKeys;
-};
+    static const std::vector<const char*> sHeadPoseKeys;
 
+    // Local log for command messages.
+    static constexpr int mMaxLocalLogLine = 10;
+    SimpleLog mLocalLog{mMaxLocalLogLine};
+
+    /**
+     * @brief Calculate and record sensor data.
+     * Dump to local log with max/average pose angle every mPoseRecordThreshold.
+     */
+    // Record one log line per second (up to mMaxLocalLogLine) to capture most recent sensor data.
+    media::VectorRecorder mPoseRecorder GUARDED_BY(mLock) {
+        6 /* vectorSize */, std::chrono::seconds(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
+    // Record one log line per minute (up to mMaxLocalLogLine) to capture durable sensor data.
+    media::VectorRecorder mPoseDurableRecorder  GUARDED_BY(mLock) {
+        6 /* vectorSize */, std::chrono::minutes(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
+};  // Spatializer
 
 }; // namespace android
 
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 304d44a..874bde4 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -13,12 +13,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 #include "SpatializerPoseController.h"
+#include <android-base/stringprintf.h>
+#include <chrono>
+#include <cstdint>
+#include <string>
 
 #define LOG_TAG "SpatializerPoseController"
 //#define LOG_NDEBUG 0
+#include <cutils/properties.h>
 #include <sensor/Sensor.h>
 #include <media/MediaMetricsItem.h>
+#include <media/QuaternionUtil.h>
 #include <utils/Log.h>
 #include <utils/SystemClock.h>
 
@@ -39,16 +46,24 @@
 constexpr float kMaxTranslationalVelocity = 2;
 
 // This is how fast, in rad/s, we allow rotation angle to shift during rate-limiting.
-constexpr float kMaxRotationalVelocity = 8;
+constexpr float kMaxRotationalVelocity = 0.8f;
 
-// This is how far into the future we predict the head pose, using linear extrapolation based on
-// twist (velocity). It should be set to a value that matches the characteristic durations of moving
-// one's head. The higher we set this, the more latency we are able to reduce, but setting this too
-// high will result in high prediction errors whenever the head accelerates (changes velocity).
-constexpr auto kPredictionDuration = 50ms;
+// This is how far into the future we predict the head pose.
+// The prediction duration should be based on the actual latency from
+// head-tracker to audio output, though setting the prediction duration too
+// high may result in higher prediction errors when the head accelerates or
+// decelerates (changes velocity).
+//
+// The head tracking predictor will do a best effort to achieve the requested
+// prediction duration.  If the duration is too far in the future based on
+// current sensor variance, the predictor may internally restrict duration to what
+// is achievable with reasonable confidence as the "best prediction".
+constexpr auto kPredictionDuration = 120ms;
 
 // After not getting a pose sample for this long, we would treat the measurement as stale.
-constexpr auto kFreshnessTimeout = 50ms;
+// The max connection interval is 50ms, and HT sensor event interval can differ depending on the
+// sampling rate, scheduling, sensor eventQ FIFO etc. 120 (2 * 50 + 20) ms seems reasonable for now.
+constexpr auto kFreshnessTimeout = 120ms;
 
 // Auto-recenter kicks in after the head has been still for this long.
 constexpr auto kAutoRecenterWindowDuration = 6s;
@@ -61,13 +76,13 @@
 
 // Screen is considered to be unstable (not still) if it has moved significantly within the last
 // time window of this duration.
-constexpr auto kScreenStillnessWindowDuration = 3s;
+constexpr auto kScreenStillnessWindowDuration = 750ms;
 
 // Screen is considered to have moved significantly if translated by this much (in meter, approx).
 constexpr float kScreenStillnessTranslationThreshold = 0.1f;
 
 // Screen is considered to have moved significantly if rotated by this much (in radians, approx).
-constexpr float kScreenStillnessRotationThreshold = 7.0f / 180 * M_PI;
+constexpr float kScreenStillnessRotationThreshold = 15.0f / 180 * M_PI;
 
 // Time units for system clock ticks. This is what the Sensor Framework timestamps represent and
 // what we use for pose filtering.
@@ -91,7 +106,15 @@
               .maxTranslationalVelocity = kMaxTranslationalVelocity / kTicksPerSecond,
               .maxRotationalVelocity = kMaxRotationalVelocity / kTicksPerSecond,
               .freshnessTimeout = Ticks(kFreshnessTimeout).count(),
-              .predictionDuration = Ticks(kPredictionDuration).count(),
+              .predictionDuration = []() -> float {
+                  const int duration_ms =
+                          property_get_int32("audio.spatializer.prediction_duration_ms", -1);
+                  if (duration_ms >= 0) {
+                      return duration_ms * 1'000'000LL;
+                  } else {
+                      return Ticks(kPredictionDuration).count();
+                  }
+              }(),
               .autoRecenterWindowDuration = Ticks(kAutoRecenterWindowDuration).count(),
               .autoRecenterTranslationalThreshold = kAutoRecenterTranslationThreshold,
               .autoRecenterRotationalThreshold = kAutoRecenterRotationThreshold,
@@ -100,7 +123,10 @@
               .screenStillnessRotationalThreshold = kScreenStillnessRotationThreshold,
       })),
       mPoseProvider(SensorPoseProvider::create("headtracker", this)),
-      mThread([this, maxUpdatePeriod] {
+      mThread([this, maxUpdatePeriod] { // It's important that mThread is initialized after
+                                        // everything else because it runs a member
+                                        // function that may use any member
+                                        // of this class.
           while (true) {
               Pose3f headToStage;
               std::optional<HeadTrackingMode> modeIfChanged;
@@ -136,7 +162,14 @@
                   mShouldCalculate = false;
               }
           }
-      }) {}
+      }) {
+          const media::PosePredictorType posePredictorType =
+                  (media::PosePredictorType)
+                  property_get_int32("audio.spatializer.pose_predictor_type", -1);
+          if (isValidPosePredictorType(posePredictorType)) {
+              mProcessor->setPosePredictorType(posePredictorType);
+          }
+      }
 
 SpatializerPoseController::~SpatializerPoseController() {
     {
@@ -183,7 +216,7 @@
         mHeadSensor = INVALID_SENSOR;
     }
 
-    mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */);
+    mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */, __func__);
 }
 
 void SpatializerPoseController::setScreenSensor(int32_t sensor) {
@@ -220,7 +253,7 @@
         mScreenSensor = INVALID_SENSOR;
     }
 
-    mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */);
+    mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */, __func__);
 }
 
 void SpatializerPoseController::setDesiredMode(HeadTrackingMode mode) {
@@ -267,25 +300,132 @@
 
 void SpatializerPoseController::recenter() {
     std::lock_guard lock(mMutex);
-    mProcessor->recenter();
+    mProcessor->recenter(true /* recenterHead */, true /* recenterScreen */, __func__);
 }
 
 void SpatializerPoseController::onPose(int64_t timestamp, int32_t sensor, const Pose3f& pose,
                                        const std::optional<Twist3f>& twist, bool isNewReference) {
     std::lock_guard lock(mMutex);
+    constexpr float NANOS_TO_MILLIS = 1e-6;
+    constexpr float RAD_TO_DEGREE = 180.f / M_PI;
+
+    const float delayMs = (elapsedRealtimeNano() - timestamp) * NANOS_TO_MILLIS; // CLOCK_BOOTTIME
+
     if (sensor == mHeadSensor) {
+        std::vector<float> pryprydt(8);  // pitch, roll, yaw, d_pitch, d_roll, d_yaw,
+                                         // discontinuity, timestamp_delay
+        media::quaternionToAngles(pose.rotation(), &pryprydt[0], &pryprydt[1], &pryprydt[2]);
+        if (twist) {
+            const auto rotationalVelocity = twist->rotationalVelocity();
+            // The rotational velocity is an intrinsic transform (i.e. based on the head
+            // coordinate system, not the world coordinate system).  It is a 3 element vector:
+            // axis (d theta / dt).
+            //
+            // We leave rotational velocity relative to the head coordinate system,
+            // as the initial head tracking sensor's world frame is arbitrary.
+            media::quaternionToAngles(media::rotationVectorToQuaternion(rotationalVelocity),
+                    &pryprydt[3], &pryprydt[4], &pryprydt[5]);
+        }
+        pryprydt[6] = isNewReference;
+        pryprydt[7] = delayMs;
+        for (size_t i = 0; i < 6; ++i) {
+            // pitch, roll, yaw in degrees, referenced in degrees on the world frame.
+            // d_pitch, d_roll, d_yaw rotational velocity in degrees/s, based on the world frame.
+            pryprydt[i] *= RAD_TO_DEGREE;
+        }
+        mHeadSensorRecorder.record(pryprydt);
+        mHeadSensorDurableRecorder.record(pryprydt);
+
         mProcessor->setWorldToHeadPose(timestamp, pose,
                                        twist.value_or(Twist3f()) / kTicksPerSecond);
         if (isNewReference) {
-            mProcessor->recenter(true, false);
+            mProcessor->recenter(true, false, __func__);
         }
     }
     if (sensor == mScreenSensor) {
+        std::vector<float> pryt{ 0.f, 0.f, 0.f, delayMs}; // pitch, roll, yaw, timestamp_delay
+        media::quaternionToAngles(pose.rotation(), &pryt[0], &pryt[1], &pryt[2]);
+        for (size_t i = 0; i < 3; ++i) {
+            pryt[i] *= RAD_TO_DEGREE;
+        }
+        mScreenSensorRecorder.record(pryt);
+        mScreenSensorDurableRecorder.record(pryt);
+
         mProcessor->setWorldToScreenPose(timestamp, pose);
         if (isNewReference) {
-            mProcessor->recenter(false, true);
+            mProcessor->recenter(false, true, __func__);
         }
     }
 }
 
+std::string SpatializerPoseController::toString(unsigned level) const {
+    std::string prefixSpace(level, ' ');
+    std::string ss = prefixSpace + "SpatializerPoseController:\n";
+    bool needUnlock = false;
+
+    prefixSpace += ' ';
+    auto now = std::chrono::steady_clock::now();
+    if (!mMutex.try_lock_until(now + media::kSpatializerDumpSysTimeOutInSecond)) {
+        ss.append(prefixSpace).append("try_lock failed, dumpsys maybe INACCURATE!\n");
+    } else {
+        needUnlock = true;
+    }
+
+    ss += prefixSpace;
+    if (mHeadSensor == INVALID_SENSOR) {
+        ss += "HeadSensor: INVALID\n";
+    } else {
+        base::StringAppendF(&ss, "HeadSensor: 0x%08x "
+            "(active world-to-head : head-relative velocity) "
+            "[ pitch, roll, yaw : d_pitch, d_roll, d_yaw : disc : delay ] "
+            "(degrees, degrees/s, bool, ms)\n", mHeadSensor);
+        ss.append(prefixSpace)
+            .append(" PerMinuteHistory:\n")
+            .append(mHeadSensorDurableRecorder.toString(level + 3))
+            .append(prefixSpace)
+            .append(" PerSecondHistory:\n")
+            .append(mHeadSensorRecorder.toString(level + 3));
+    }
+
+    ss += prefixSpace;
+    if (mScreenSensor == INVALID_SENSOR) {
+        ss += "ScreenSensor: INVALID\n";
+    } else {
+        base::StringAppendF(&ss, "ScreenSensor: 0x%08x (active world-to-screen) "
+            "[ pitch, roll, yaw : delay ] "
+            "(degrees, ms)\n", mScreenSensor);
+        ss.append(prefixSpace)
+            .append(" PerMinuteHistory:\n")
+            .append(mScreenSensorDurableRecorder.toString(level + 3))
+            .append(prefixSpace)
+            .append(" PerSecondHistory:\n")
+            .append(mScreenSensorRecorder.toString(level + 3));
+    }
+
+    ss += prefixSpace;
+    if (mActualMode.has_value()) {
+        base::StringAppendF(&ss, "ActualMode: %s\n", media::toString(mActualMode.value()).c_str());
+    } else {
+        ss += "ActualMode NOTEXIST\n";
+    }
+
+    if (mProcessor) {
+        ss += mProcessor->toString_l(level + 1);
+    } else {
+        ss.append(prefixSpace.c_str()).append("HeadTrackingProcessor not exist\n");
+    }
+
+    if (mPoseProvider) {
+        ss += mPoseProvider->toString(level + 1);
+    } else {
+        ss.append(prefixSpace.c_str()).append("SensorPoseProvider not exist\n");
+    }
+
+    if (needUnlock) {
+        mMutex.unlock();
+    }
+    // TODO: 233092747 add history sensor info with SimpleLog.
+    return ss;
+}
+
 }  // namespace android
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 2c6d79a..7fa4f86 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -24,6 +24,7 @@
 
 #include <media/HeadTrackingProcessor.h>
 #include <media/SensorPoseProvider.h>
+#include <media/VectorRecorder.h>
 
 namespace android {
 
@@ -113,22 +114,43 @@
      */
     void waitUntilCalculated();
 
+    // convert fields to a printable string
+    std::string toString(unsigned level) const;
+
   private:
-    mutable std::mutex mMutex;
+    mutable std::timed_mutex mMutex;
     Listener* const mListener;
     const std::chrono::microseconds mSensorPeriod;
-    // Order matters for the following two members to ensure correct destruction.
     std::unique_ptr<media::HeadTrackingProcessor> mProcessor;
-    std::unique_ptr<media::SensorPoseProvider> mPoseProvider;
     int32_t mHeadSensor = media::SensorPoseProvider::INVALID_HANDLE;
     int32_t mScreenSensor = media::SensorPoseProvider::INVALID_HANDLE;
     std::optional<media::HeadTrackingMode> mActualMode;
-    std::thread mThread;
-    std::condition_variable mCondVar;
+    std::condition_variable_any mCondVar;
     bool mShouldCalculate = true;
     bool mShouldExit = false;
     bool mCalculated = false;
 
+    media::VectorRecorder mHeadSensorRecorder{
+        8 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        { 3, 6, 7 } /* delimiterIdx */};
+    media::VectorRecorder mHeadSensorDurableRecorder{
+        8 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        { 3, 6, 7 } /* delimiterIdx */};
+
+    media::VectorRecorder mScreenSensorRecorder{
+        4 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+        { 3 } /* delimiterIdx */};
+    media::VectorRecorder mScreenSensorDurableRecorder{
+        4 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+        { 3 } /* delimiterIdx */};
+
+    // Next to last variable as releasing this stops the callbacks
+    std::unique_ptr<media::SensorPoseProvider> mPoseProvider;
+
+    // It's important that mThread is the last variable in this class
+    // since we starts mThread in initializer list
+    std::thread mThread;
+
     void onPose(int64_t timestamp, int32_t sensor, const media::Pose3f& pose,
                 const std::optional<media::Twist3f>& twist, bool isNewReference) override;
 
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 2e220bc..285b354 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -10,26 +10,33 @@
 cc_test {
     name: "audiopolicy_tests",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_static",
+    ],
+
     include_dirs: [
         "frameworks/av/services/audiopolicy",
     ],
 
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libaudioclient",
         "libaudiofoundation",
         "libaudiopolicy",
         "libaudiopolicymanagerdefault",
         "libbase",
+        "libbinder",
+        "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia_helper",
         "libutils",
+        "libcutils",
         "libxml2",
-        "framework-permission-aidl-cpp",
-        "libbinder",
     ],
 
     static_libs: [
+        "audioclient-types-aidl-cpp",
         "libaudiopolicycomponents",
         "libgmock",
     ],
@@ -49,28 +56,37 @@
         "-Wall",
     ],
 
-    test_suites: ["device-tests"],
+    test_suites: [
+        "device-tests",
+        "automotive-tests",
+    ],
 
 }
 
 
 cc_test {
     name: "audio_health_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
     require_root: true,
 
     shared_libs: [
-        "libaudiofoundation",
+        "audioclient-types-aidl-cpp",
         "libaudioclient",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
         "libaudiopolicymanagerdefault",
+        "libcutils",
         "liblog",
         "libmedia_helper",
-        "libutils",
-        "android.media.audio.common.types-V1-cpp",
-        "libaudioclient_aidl_conversion",
-        "libstagefright_foundation",
         "libshmemcompat",
         "libshmemutil",
-        "audioclient-types-aidl-cpp",
+        "libstagefright_foundation",
+        "libutils",
+        "libxml2",
     ],
 
     static_libs: ["libaudiopolicycomponents"],
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 057fa58..3629c16 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -103,8 +103,13 @@
         ++mAudioPortListUpdateCount;
     }
 
-    status_t setDeviceConnectedState(
-            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port,
+                                     media::DeviceConnectedState state) override {
+        if (state == media::DeviceConnectedState::CONNECTED) {
+            mConnectedDevicePorts.push_back(*port);
+        } else if (state == media::DeviceConnectedState::DISCONNECTED){
+            mDisconnectedDevicePorts.push_back(*port);
+        }
         return NO_ERROR;
     }
 
@@ -127,8 +132,6 @@
 
     size_t getAudioPortListUpdateCount() const { return mAudioPortListUpdateCount; }
 
-    virtual void addSupportedFormat(audio_format_t /* format */) {}
-
     void onRoutingUpdated() override {
         mRoutingUpdatedUpdateCount++;
     }
@@ -150,6 +153,62 @@
         return NO_ERROR;
     }
 
+    size_t getConnectedDevicePortCount() const {
+        return mConnectedDevicePorts.size();
+    }
+
+    const struct audio_port_v7 *getLastConnectedDevicePort() const {
+        if (mConnectedDevicePorts.empty()) {
+            return nullptr;
+        }
+        auto it = --mConnectedDevicePorts.end();
+        return &(*it);
+    }
+
+    size_t getDisconnectedDevicePortCount() const {
+        return mDisconnectedDevicePorts.size();
+    }
+
+    const struct audio_port_v7 *getLastDisconnectedDevicePort() const {
+        if (mDisconnectedDevicePorts.empty()) {
+            return nullptr;
+        }
+        auto it = --mDisconnectedDevicePorts.end();
+        return &(*it);
+    }
+
+    String8 getParameters(audio_io_handle_t /* ioHandle */, const String8&  /* keys*/ ) override {
+        AudioParameter mAudioParameters;
+        std::string formats;
+        for (const auto& f : mSupportedFormats) {
+            if (!formats.empty()) formats += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+            formats += audio_format_to_string(f);
+        }
+        mAudioParameters.add(
+                String8(AudioParameter::keyStreamSupportedFormats),
+                String8(formats.c_str()));
+        mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
+        std::string channelMasks;
+        for (const auto& cm : mSupportedChannelMasks) {
+            if (!audio_channel_mask_is_valid(cm)) {
+                continue;
+            }
+            if (!channelMasks.empty()) channelMasks += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+            channelMasks += audio_channel_mask_to_string(cm);
+        }
+        mAudioParameters.add(
+                String8(AudioParameter::keyStreamSupportedChannels), String8(channelMasks.c_str()));
+        return mAudioParameters.toString();
+    }
+
+    void addSupportedFormat(audio_format_t format) {
+        mSupportedFormats.insert(format);
+    }
+
+    void addSupportedChannelMask(audio_channel_mask_t channelMask) {
+        mSupportedChannelMasks.insert(channelMask);
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -158,6 +217,10 @@
     std::set<std::string> mAllowedModuleNames;
     size_t mAudioPortListUpdateCount = 0;
     size_t mRoutingUpdatedUpdateCount = 0;
+    std::vector<struct audio_port_v7> mConnectedDevicePorts;
+    std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
+    std::set<audio_format_t> mSupportedFormats;
+    std::set<audio_channel_mask_t> mSupportedChannelMasks;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
deleted file mode 100644
index 7343b9b..0000000
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <map>
-#include <set>
-
-#include <system/audio.h>
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-#include "AudioPolicyTestClient.h"
-
-namespace android {
-
-class AudioPolicyManagerTestClientForHdmi : public AudioPolicyManagerTestClient {
-public:
-    String8 getParameters(audio_io_handle_t /* ioHandle */, const String8&  /* keys*/ ) override {
-        AudioParameter mAudioParameters;
-        std::string formats;
-        for (const auto& f : mSupportedFormats) {
-            if (!formats.empty()) formats += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
-            formats += audio_format_to_string(f);
-        }
-        mAudioParameters.add(
-                String8(AudioParameter::keyStreamSupportedFormats),
-                String8(formats.c_str()));
-        mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
-        mAudioParameters.add(String8(AudioParameter::keyStreamSupportedChannels), String8(""));
-        return mAudioParameters.toString();
-    }
-
-    void addSupportedFormat(audio_format_t format) override {
-        mSupportedFormats.insert(format);
-    }
-
-private:
-    std::set<audio_format_t> mSupportedFormats;
-};
-
-} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 8a85fee..2ae0e97 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -25,6 +25,9 @@
     virtual ~AudioPolicyTestClient() = default;
 
     // AudioPolicyClientInterface Implementation
+    status_t getAudioPolicyConfig(media::AudioPolicyConfig* /*config*/) override {
+        return INVALID_OPERATION;
+    }
     audio_module_handle_t loadHwModule(const char* /*name*/) override {
         return AUDIO_MODULE_HANDLE_NONE;
     }
@@ -54,7 +57,6 @@
                              float /*volume*/,
                              audio_io_handle_t /*output*/,
                              int /*delayMs*/) override { return NO_INIT; }
-    status_t invalidateStream(audio_stream_type_t /*stream*/) override { return NO_INIT; }
     void setParameters(audio_io_handle_t /*ioHandle*/,
                        const String8& /*keyValuePairs*/,
                        int /*delayMs*/) override { }
@@ -97,8 +99,11 @@
             const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
         return NO_INIT;
     }
-    status_t setDeviceConnectedState(
-            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port __unused,
+                                     media::DeviceConnectedState state __unused) override {
+        return NO_INIT;
+    }
+    status_t invalidateTracks(const std::vector<audio_port_handle_t>& /*portIds*/) override {
         return NO_INIT;
     }
 };
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 7441f20..31ee252 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -22,9 +22,13 @@
 class AudioPolicyTestManager : public AudioPolicyManager {
   public:
     explicit AudioPolicyTestManager(AudioPolicyClientInterface *clientInterface)
-            : AudioPolicyManager(clientInterface, true /*forTesting*/) { }
+            : AudioPolicyTestManager(AudioPolicyConfig::createDefault(), clientInterface) {}
+    AudioPolicyTestManager(const sp<const AudioPolicyConfig>& config,
+            AudioPolicyClientInterface *clientInterface)
+            : AudioPolicyManager(config,
+                    loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix()),
+                    clientInterface) {}
     using AudioPolicyManager::getConfig;
-    using AudioPolicyManager::loadConfig;
     using AudioPolicyManager::initialize;
     using AudioPolicyManager::getOutputs;
     using AudioPolicyManager::getAvailableOutputDevices;
@@ -37,6 +41,7 @@
     using AudioPolicyManager::getDirectProfilesForAttributes;
     using AudioPolicyManager::setDeviceConnectionState;
     using AudioPolicyManager::deviceToAudioPort;
+    using AudioPolicyManager::handleDeviceConfigChange;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
 };
 
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index 10f8dc0..70a3022 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -21,6 +21,7 @@
 
 #include <gtest/gtest.h>
 
+#include <AudioPolicyConfig.h>
 #include <media/AudioSystem.h>
 #include <media/TypeConverter.h>
 #include <system/audio.h>
@@ -65,19 +66,17 @@
     }
     free(audioPorts);
 
-    AudioPolicyManagerTestClient client;
-    AudioPolicyTestManager manager(&client);
-    manager.loadConfig();
-    ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
+    auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback();
+    ASSERT_NE(AudioPolicyConfig::kDefaultConfigSource, config->getSource());
 
-    for (auto desc : manager.getConfig().getInputDevices()) {
+    for (const auto& desc : config->getInputDevices()) {
         if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
             std::string deviceType;
             (void)DeviceConverter::toString(desc->type(), deviceType);
             ADD_FAILURE() << "Input device \"" << deviceType << "\" not found";
         }
     }
-    for (auto desc : manager.getConfig().getOutputDevices()) {
+    for (const auto& desc : config->getOutputDevices()) {
         if (attachedDevices.find(desc->type()) == attachedDevices.end()) {
             std::string deviceType;
             (void)DeviceConverter::toString(desc->type(), deviceType);
@@ -87,13 +86,13 @@
 }
 
 TEST(AudioHealthTest, ConnectSupportedDevice) {
+    auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback();
+    ASSERT_NE(AudioPolicyConfig::kDefaultConfigSource, config->getSource());
     AudioPolicyManagerTestClient client;
-    AudioPolicyTestManager manager(&client);
-    manager.loadConfig();
-    ASSERT_NE("AudioPolicyConfig::setDefault", manager.getConfig().getSource());
+    AudioPolicyTestManager manager(config, &client);
 
     DeviceVector devices;
-    for (const auto& hwModule : manager.getConfig().getHwModules()) {
+    for (const auto& hwModule : config->getHwModules()) {
         for (const auto& profile : hwModule->getOutputProfiles()) {
             devices.merge(profile->getSupportedDevices());
         }
@@ -111,7 +110,7 @@
             continue;
         }
         std::string address = "11:22:33:44:55:66";
-        media::AudioPort aidlPort;
+        media::AudioPortFw aidlPort;
         ASSERT_EQ(OK, manager.deviceToAudioPort(device->type(), address.c_str(), "" /*name*/,
                                                  &aidlPort));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 5429176..5e58dbb 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <cstring>
 #include <memory>
 #include <string>
 #include <sys/wait.h>
@@ -31,10 +32,10 @@
 #include <media/RecordingActivityTracker.h>
 #include <utils/Log.h>
 #include <utils/Vector.h>
+#include <cutils/multiuser.h>
 
 #include "AudioPolicyInterface.h"
 #include "AudioPolicyManagerTestClient.h"
-#include "AudioPolicyManagerTestClientForHdmi.h"
 #include "AudioPolicyTestClient.h"
 #include "AudioPolicyTestManager.h"
 
@@ -42,11 +43,93 @@
 using testing::UnorderedElementsAre;
 using android::content::AttributionSourceState;
 
+namespace {
+
+AudioMixMatchCriterion createUidCriterion(uint32_t uid, bool exclude = false) {
+    AudioMixMatchCriterion criterion;
+    criterion.mValue.mUid = uid;
+    criterion.mRule = exclude ? RULE_EXCLUDE_UID : RULE_MATCH_UID;
+    return criterion;
+}
+
+AudioMixMatchCriterion createUserIdCriterion(int userId, bool exclude = false) {
+    AudioMixMatchCriterion criterion;
+    criterion.mValue.mUserId = userId;
+    criterion.mRule = exclude ? RULE_EXCLUDE_USERID : RULE_MATCH_USERID;
+    return criterion;
+}
+
+AudioMixMatchCriterion createUsageCriterion(audio_usage_t usage, bool exclude = false) {
+    AudioMixMatchCriterion criterion;
+    criterion.mValue.mUsage = usage;
+    criterion.mRule = exclude ? RULE_EXCLUDE_ATTRIBUTE_USAGE : RULE_MATCH_ATTRIBUTE_USAGE;
+    return criterion;
+}
+
+AudioMixMatchCriterion createCapturePresetCriterion(audio_source_t source, bool exclude = false) {
+    AudioMixMatchCriterion criterion;
+    criterion.mValue.mSource = source;
+    criterion.mRule = exclude ?
+        RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET : RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET;
+    return criterion;
+}
+
+AudioMixMatchCriterion createSessionIdCriterion(audio_session_t session, bool exclude = false) {
+    AudioMixMatchCriterion criterion;
+    criterion.mValue.mAudioSessionId = session;
+    criterion.mRule = exclude ?
+        RULE_EXCLUDE_AUDIO_SESSION_ID : RULE_MATCH_AUDIO_SESSION_ID;
+    return criterion;
+}
+
+// TODO b/182392769: use attribution source util
+AttributionSourceState createAttributionSourceState(uid_t uid) {
+    AttributionSourceState attributionSourceState;
+    attributionSourceState.uid = uid;
+    attributionSourceState.token = sp<BBinder>::make();
+    return attributionSourceState;
+}
+
+} // namespace
+
+TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
+    auto config = AudioPolicyConfig::createWritableForTests();
+    EXPECT_TRUE(config->getSource().empty());
+    EXPECT_TRUE(config->getHwModules().isEmpty());
+    EXPECT_TRUE(config->getInputDevices().isEmpty());
+    EXPECT_TRUE(config->getOutputDevices().isEmpty());
+}
+
+TEST(AudioPolicyConfigTest, FallbackToDefault) {
+    auto config = AudioPolicyConfig::loadFromApmXmlConfigWithFallback(
+            base::GetExecutableDirectory() + "/test_invalid_audio_policy_configuration.xml");
+    EXPECT_EQ(AudioPolicyConfig::kDefaultConfigSource, config->getSource());
+}
+
+TEST(AudioPolicyConfigTest, LoadForTests) {
+    {
+        auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(
+                base::GetExecutableDirectory() + "/test_invalid_audio_policy_configuration.xml");
+        EXPECT_FALSE(result.ok());
+    }
+    {
+        const std::string source =
+                base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml";
+        auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(source);
+        ASSERT_TRUE(result.ok());
+        EXPECT_EQ(source, result.value()->getSource());
+        EXPECT_FALSE(result.value()->getHwModules().isEmpty());
+        EXPECT_FALSE(result.value()->getInputDevices().isEmpty());
+        EXPECT_FALSE(result.value()->getOutputDevices().isEmpty());
+    }
+}
+
 TEST(AudioPolicyManagerTestInit, EngineFailure) {
     AudioPolicyTestClient client;
-    AudioPolicyTestManager manager(&client);
-    manager.getConfig().setDefault();
-    manager.getConfig().setEngineLibraryNameSuffix("non-existent");
+    auto config = AudioPolicyConfig::createWritableForTests();
+    config->setDefault();
+    config->setEngineLibraryNameSuffix("non-existent");
+    AudioPolicyTestManager manager(config, &client);
     ASSERT_EQ(NO_INIT, manager.initialize());
     ASSERT_EQ(NO_INIT, manager.initCheck());
 }
@@ -54,41 +137,12 @@
 TEST(AudioPolicyManagerTestInit, ClientFailure) {
     AudioPolicyTestClient client;
     AudioPolicyTestManager manager(&client);
-    manager.getConfig().setDefault();
     // Since the default client fails to open anything,
     // APM should indicate that the initialization didn't succeed.
     ASSERT_EQ(NO_INIT, manager.initialize());
     ASSERT_EQ(NO_INIT, manager.initCheck());
 }
 
-// Verifies that a failure while loading a config doesn't leave
-// APM config in a "dirty" state. Since AudioPolicyConfig object
-// is a proxy for the data hosted by APM, it isn't possible
-// to "deep copy" it, and thus we have to test its elements
-// individually.
-TEST(AudioPolicyManagerTestInit, ConfigLoadingIsTransactional) {
-    AudioPolicyTestClient client;
-    AudioPolicyTestManager manager(&client);
-    ASSERT_TRUE(manager.getConfig().getHwModules().isEmpty());
-    ASSERT_TRUE(manager.getConfig().getInputDevices().isEmpty());
-    ASSERT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
-    status_t status = deserializeAudioPolicyFile(
-            (base::GetExecutableDirectory() +
-                    "/test_invalid_audio_policy_configuration.xml").c_str(),
-            &manager.getConfig());
-    ASSERT_NE(NO_ERROR, status);
-    EXPECT_TRUE(manager.getConfig().getHwModules().isEmpty());
-    EXPECT_TRUE(manager.getConfig().getInputDevices().isEmpty());
-    EXPECT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
-    status = deserializeAudioPolicyFile(
-            (base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml").c_str(),
-            &manager.getConfig());
-    ASSERT_EQ(NO_ERROR, status);
-    EXPECT_FALSE(manager.getConfig().getHwModules().isEmpty());
-    EXPECT_FALSE(manager.getConfig().getInputDevices().isEmpty());
-    EXPECT_FALSE(manager.getConfig().getOutputDevices().isEmpty());
-}
-
 
 class PatchCountCheck {
   public:
@@ -125,9 +179,13 @@
             audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
             audio_io_handle_t *output = nullptr,
             audio_port_handle_t *portId = nullptr,
-            audio_attributes_t attr = {});
+            audio_attributes_t attr = {},
+            audio_session_t session = AUDIO_SESSION_NONE,
+            int uid = 0,
+            bool* isBitPerfect = nullptr);
     void getInputForAttr(
             const audio_attributes_t &attr,
+            audio_session_t session,
             audio_unique_id_t riid,
             audio_port_handle_t *selectedDeviceId,
             audio_format_t format,
@@ -146,14 +204,17 @@
     static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch* patch);
     virtual AudioPolicyManagerTestClient* getClient() { return new AudioPolicyManagerTestClient; }
 
+    sp<AudioPolicyConfig> mConfig;
     std::unique_ptr<AudioPolicyManagerTestClient> mClient;
     std::unique_ptr<AudioPolicyTestManager> mManager;
+
+    const uint32_t k48000SamplingRate = 48000;
 };
 
 void AudioPolicyManagerTest::SetUp() {
     mClient.reset(getClient());
-    mManager.reset(new AudioPolicyTestManager(mClient.get()));
     ASSERT_NO_FATAL_FAILURE(SetUpManagerConfig());  // Subclasses may want to customize the config.
+    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get()));
     ASSERT_EQ(NO_ERROR, mManager->initialize());
     ASSERT_EQ(NO_ERROR, mManager->initCheck());
 }
@@ -164,7 +225,8 @@
 }
 
 void AudioPolicyManagerTest::SetUpManagerConfig() {
-    mManager->getConfig().setDefault();
+    mConfig = AudioPolicyConfig::createWritableForTests();
+    mConfig->setDefault();
 }
 
 void AudioPolicyManagerTest::dumpToLog() {
@@ -205,7 +267,10 @@
         audio_output_flags_t flags,
         audio_io_handle_t *output,
         audio_port_handle_t *portId,
-        audio_attributes_t attr) {
+        audio_attributes_t attr,
+        audio_session_t session,
+        int uid,
+        bool* isBitPerfect) {
     audio_io_handle_t localOutput;
     if (!output) output = &localOutput;
     *output = AUDIO_IO_HANDLE_NONE;
@@ -219,19 +284,19 @@
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
-    // TODO b/182392769: use attribution source util
-    AttributionSourceState attributionSource = AttributionSourceState();
-    attributionSource.uid = 0;
-    attributionSource.token = sp<BBinder>::make();
+    bool isBitPerfectInternal;
+    AttributionSourceState attributionSource = createAttributionSourceState(uid);
     ASSERT_EQ(OK, mManager->getOutputForAttr(
-                    &attr, output, AUDIO_SESSION_NONE, &stream, attributionSource, &config, &flags,
-                    selectedDeviceId, portId, {}, &outputType, &isSpatialized));
+                    &attr, output, session, &stream, attributionSource, &config, &flags,
+                    selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+                    isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
 }
 
 void AudioPolicyManagerTest::getInputForAttr(
         const audio_attributes_t &attr,
+        const audio_session_t session,
         audio_unique_id_t riid,
         audio_port_handle_t *selectedDeviceId,
         audio_format_t format,
@@ -248,12 +313,9 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::input_type_t inputType;
-    // TODO b/182392769: use attribution source util
-    AttributionSourceState attributionSource = AttributionSourceState();
-    attributionSource.uid = 0;
-    attributionSource.token = sp<BBinder>::make();
+    AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
     ASSERT_EQ(OK, mManager->getInputForAttr(
-            &attr, &input, riid, AUDIO_SESSION_NONE, attributionSource, &config, flags,
+            &attr, &input, riid, session, attributionSource, &config, flags,
             selectedDeviceId, &inputType, portId));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
@@ -411,14 +473,13 @@
 void AudioPolicyManagerTestMsd::SetUpManagerConfig() {
     // TODO: Consider using Serializer to load part of the config from a string.
     ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUpManagerConfig());
-    AudioPolicyConfig& config = mManager->getConfig();
     mMsdOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_BUS);
     sp<AudioProfile> pcmOutputProfile = new AudioProfile(
-            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
+            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
     sp<AudioProfile> ac3OutputProfile = new AudioProfile(
-            AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000);
+            AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, k48000SamplingRate);
     sp<AudioProfile> iec958OutputProfile = new AudioProfile(
-            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_OUT_STEREO, 48000);
+            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_INDEX_MASK_24, k48000SamplingRate);
     mMsdOutputDevice->addAudioProfile(pcmOutputProfile);
     mMsdOutputDevice->addAudioProfile(ac3OutputProfile);
     mMsdOutputDevice->addAudioProfile(iec958OutputProfile);
@@ -427,26 +488,26 @@
     sp<AudioProfile> pcmInputProfile = new AudioProfile(
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, 44100);
     mMsdInputDevice->addAudioProfile(pcmInputProfile);
-    config.addDevice(mMsdOutputDevice);
-    config.addDevice(mMsdInputDevice);
+    mConfig->addDevice(mMsdOutputDevice);
+    mConfig->addDevice(mMsdInputDevice);
 
     if (mExpectedAudioPatchCount == 2) {
         // Add SPDIF device with PCM output profile as a second device for dual MSD audio patching.
         mSpdifDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPDIF);
         mSpdifDevice->addAudioProfile(pcmOutputProfile);
-        config.addDevice(mSpdifDevice);
+        mConfig->addDevice(mSpdifDevice);
 
         sp<OutputProfile> spdifOutputProfile = new OutputProfile("spdif output");
         spdifOutputProfile->addAudioProfile(pcmOutputProfile);
         spdifOutputProfile->addSupportedDevice(mSpdifDevice);
-        config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+        mConfig->getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
                 addOutputProfile(spdifOutputProfile);
     }
 
     sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 2 /*halVersionMajor*/);
-    HwModuleCollection modules = config.getHwModules();
+    HwModuleCollection modules = mConfig->getHwModules();
     modules.add(msdModule);
-    config.setHwModules(modules);
+    mConfig->setHwModules(modules);
 
     sp<OutputProfile> msdOutputProfile = new OutputProfile("msd input");
     msdOutputProfile->addAudioProfile(pcmOutputProfile);
@@ -473,16 +534,16 @@
     // Add a profile with another encoding to the default device to test routing
     // of streams that are not supported by MSD.
     sp<AudioProfile> dtsOutputProfile = new AudioProfile(
-            AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000);
-    config.getDefaultOutputDevice()->addAudioProfile(dtsOutputProfile);
+            AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, k48000SamplingRate);
+    mConfig->getDefaultOutputDevice()->addAudioProfile(dtsOutputProfile);
     sp<OutputProfile> primaryEncodedOutputProfile = new OutputProfile("encoded");
     primaryEncodedOutputProfile->addAudioProfile(dtsOutputProfile);
     primaryEncodedOutputProfile->setFlags(AUDIO_OUTPUT_FLAG_DIRECT);
-    primaryEncodedOutputProfile->addSupportedDevice(config.getDefaultOutputDevice());
-    config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+    primaryEncodedOutputProfile->addSupportedDevice(mConfig->getDefaultOutputDevice());
+    mConfig->getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
             addOutputProfile(primaryEncodedOutputProfile);
 
-    mDefaultOutputDevice = config.getDefaultOutputDevice();
+    mDefaultOutputDevice = mConfig->getDefaultOutputDevice();
     if (mExpectedAudioPatchCount == 2) {
         mSpdifDevice->addAudioProfile(dtsOutputProfile);
         primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
@@ -491,14 +552,14 @@
     // Add HDMI input device with IEC60958 profile for HDMI in -> MSD patching.
     mHdmiInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_HDMI);
     sp<AudioProfile> iec958InputProfile = new AudioProfile(
-            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_IN_STEREO, 48000);
+            AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_INDEX_MASK_24, k48000SamplingRate);
     mHdmiInputDevice->addAudioProfile(iec958InputProfile);
-    config.addDevice(mHdmiInputDevice);
+    mConfig->addDevice(mHdmiInputDevice);
     sp<InputProfile> hdmiInputProfile = new InputProfile("hdmi input");
     hdmiInputProfile->addAudioProfile(iec958InputProfile);
     hdmiInputProfile->setFlags(AUDIO_INPUT_FLAG_DIRECT);
     hdmiInputProfile->addSupportedDevice(mHdmiInputDevice);
-    config.getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
+    mConfig->getHwModules().getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)->
             addInputProfile(hdmiInputProfile);
 }
 
@@ -556,8 +617,8 @@
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId,
-            AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
     ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
@@ -566,7 +627,7 @@
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
-            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
+            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
     ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
@@ -574,13 +635,13 @@
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId,
-            AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
     ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
     selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId,
-            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
+            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
     ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
 }
@@ -588,8 +649,8 @@
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
     const PatchCountCheck patchCount = snapshotPatchCount();
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId,
-            AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
     ASSERT_EQ(0, patchCount.deltaFromSnapshot());
 }
@@ -600,9 +661,8 @@
         const PatchCountCheck patchCount = snapshotPatchCount();
         audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
         audio_port_handle_t portId;
-        getOutputForAttr(&selectedDeviceId,
-                AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
-                nullptr /*output*/, &portId);
+        getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+                k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
         ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
@@ -612,9 +672,8 @@
         const PatchCountCheck patchCount = snapshotPatchCount();
         audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
         audio_port_handle_t portId;
-        getOutputForAttr(&selectedDeviceId,
-                AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
-                nullptr /*output*/, &portId);
+        getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+                k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
         ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
         ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount), patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
@@ -623,8 +682,8 @@
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
         audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-        getOutputForAttr(&selectedDeviceId,
-                AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+        getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+                k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
         ASSERT_EQ(0, patchCount.deltaFromSnapshot());
     }
@@ -651,10 +710,10 @@
     ASSERT_EQ(AUDIO_PORT_ROLE_SINK, patch->mPatch.sinks[0].role);
     ASSERT_EQ(AUDIO_FORMAT_IEC60958, patch->mPatch.sources[0].format);
     ASSERT_EQ(AUDIO_FORMAT_IEC60958, patch->mPatch.sinks[0].format);
-    ASSERT_EQ(AUDIO_CHANNEL_IN_STEREO, patch->mPatch.sources[0].channel_mask);
-    ASSERT_EQ(AUDIO_CHANNEL_OUT_STEREO, patch->mPatch.sinks[0].channel_mask);
-    ASSERT_EQ(48000, patch->mPatch.sources[0].sample_rate);
-    ASSERT_EQ(48000, patch->mPatch.sinks[0].sample_rate);
+    ASSERT_EQ(AUDIO_CHANNEL_INDEX_MASK_24, patch->mPatch.sources[0].channel_mask);
+    ASSERT_EQ(AUDIO_CHANNEL_INDEX_MASK_24, patch->mPatch.sinks[0].channel_mask);
+    ASSERT_EQ(k48000SamplingRate, patch->mPatch.sources[0].sample_rate);
+    ASSERT_EQ(k48000SamplingRate, patch->mPatch.sinks[0].sample_rate);
     ASSERT_EQ(1, patchCount.deltaFromSnapshot());
 }
 
@@ -667,7 +726,7 @@
     int countDirectProfilesPrimary = 0;
     const auto& primary = mManager->getConfig().getHwModules()
             .getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY);
-    for (const auto outputProfile : primary->getOutputProfiles()) {
+    for (const auto& outputProfile : primary->getOutputProfiles()) {
         if (outputProfile->asAudioPort()->isDirectOutput()) {
             countDirectProfilesPrimary += outputProfile->asAudioPort()->getAudioProfiles().size();
         }
@@ -677,7 +736,7 @@
     int countDirectProfilesMsd = 0;
     const auto& msd = mManager->getConfig().getHwModules()
             .getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
-    for (const auto outputProfile : msd->getOutputProfiles()) {
+    for (const auto& outputProfile : msd->getOutputProfiles()) {
         if (outputProfile->asAudioPort()->isDirectOutput()) {
             countDirectProfilesMsd += outputProfile->asAudioPort()->getAudioProfiles().size();
         }
@@ -728,7 +787,7 @@
     audio_config_base_t msdDirectConfig2 = AUDIO_CONFIG_BASE_INITIALIZER;
     msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
     msdDirectConfig2.sample_rate = 48000;
-    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_INDEX_MASK_24;
 
     audio_config_base_t msdNonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
     msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -795,7 +854,7 @@
     audio_config_t msdDirectConfig2 = AUDIO_CONFIG_INITIALIZER;
     msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
     msdDirectConfig2.sample_rate = 48000;
-    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_INDEX_MASK_24;
 
     audio_config_t msdNonDirectConfig = AUDIO_CONFIG_INITIALIZER;
     msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -868,9 +927,9 @@
         sExecutableDir + "test_audio_policy_configuration.xml";
 
 void AudioPolicyManagerTestWithConfigurationFile::SetUpManagerConfig() {
-    status_t status = deserializeAudioPolicyFile(getConfigFile().c_str(), &mManager->getConfig());
-    ASSERT_EQ(NO_ERROR, status);
-    mManager->getConfig().setSource(getConfigFile());
+    auto result = AudioPolicyConfig::loadFromCustomXmlConfigForTests(getConfigFile());
+    ASSERT_TRUE(result.ok());
+    mConfig = result.value();
 }
 
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, InitSuccess) {
@@ -888,8 +947,8 @@
     audio_source_t source = AUDIO_SOURCE_VOICE_COMMUNICATION;
     audio_attributes_t attr = {
         AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
-    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, 1, &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
-                    AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+     AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
 
     std::vector<audio_port_v7> ports;
     ASSERT_NO_FATAL_FAILURE(
@@ -919,7 +978,274 @@
     EXPECT_TRUE(foundVoipTx);
 }
 
-using PolicyMixTuple = std::tuple<audio_usage_t, audio_source_t, uint32_t>;
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, HandleDeviceConfigChange) {
+    {
+        const auto prevCounter = mClient->getRoutingUpdatedCounter();
+
+        EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                                                               AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                               "", "", AUDIO_FORMAT_LDAC));
+        const auto currCounter = mClient->getRoutingUpdatedCounter();
+        EXPECT_GT(currCounter, prevCounter);
+    }
+    {
+        const auto prevCounter = mClient->getRoutingUpdatedCounter();
+        // Update device configuration
+        EXPECT_EQ(NO_ERROR, mManager->handleDeviceConfigChange(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                                                               "" /*address*/, "" /*name*/,
+                                                               AUDIO_FORMAT_AAC));
+
+        // As mClient marks isReconfigA2dpSupported to false, device state needs to be toggled for
+        // config changes to take effect
+        const auto currCounter = mClient->getRoutingUpdatedCounter();
+        EXPECT_GT(currCounter, prevCounter);
+    }
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferredMixerAttributes) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    audio_port_handle_t maxPortId = 0;
+    audio_port_handle_t speakerPortId;
+    audio_port_handle_t usbPortId;
+    for (auto device : devices) {
+        maxPortId = std::max(maxPortId, device->getId());
+        if (device->type() == AUDIO_DEVICE_OUT_SPEAKER) {
+            speakerPortId = device->getId();
+        } else if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            usbPortId = device->getId();
+        }
+    }
+
+    const uid_t uid = 1234;
+    const uid_t otherUid = 4321;
+    const audio_attributes_t mediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+    const audio_attributes_t alarmAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_ALARM,
+    };
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+    for (const auto attrToSet : mixerAttributes) {
+        audio_mixer_attributes_t attrFromQuery = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+
+        // The given device is not available
+        EXPECT_EQ(BAD_VALUE,
+                  mManager->setPreferredMixerAttributes(
+                          &mediaAttr, maxPortId + 1, uid, &attrToSet));
+        // The only allowed device is USB
+        EXPECT_EQ(BAD_VALUE,
+                  mManager->setPreferredMixerAttributes(
+                          &mediaAttr, speakerPortId, uid, &attrToSet));
+        // The only allowed usage is media
+        EXPECT_EQ(BAD_VALUE,
+                  mManager->setPreferredMixerAttributes(&alarmAttr, usbPortId, uid, &attrToSet));
+        // Nothing set yet, must get null when query
+        EXPECT_EQ(NAME_NOT_FOUND,
+                  mManager->getPreferredMixerAttributes(&mediaAttr, usbPortId, &attrFromQuery));
+        EXPECT_EQ(NO_ERROR,
+                  mManager->setPreferredMixerAttributes(
+                          &mediaAttr, usbPortId, uid, &attrToSet));
+        EXPECT_EQ(NO_ERROR,
+                  mManager->getPreferredMixerAttributes(&mediaAttr, usbPortId, &attrFromQuery));
+        EXPECT_EQ(attrToSet.config.format, attrFromQuery.config.format);
+        EXPECT_EQ(attrToSet.config.sample_rate, attrFromQuery.config.sample_rate);
+        EXPECT_EQ(attrToSet.config.channel_mask, attrFromQuery.config.channel_mask);
+        EXPECT_EQ(attrToSet.mixer_behavior, attrFromQuery.mixer_behavior);
+        EXPECT_EQ(NAME_NOT_FOUND,
+                  mManager->clearPreferredMixerAttributes(&mediaAttr, speakerPortId, uid));
+        EXPECT_EQ(PERMISSION_DENIED,
+                  mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, otherUid));
+        EXPECT_EQ(NO_ERROR,
+                  mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+    }
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, RoutingChangedWithPreferredMixerAttributes) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            usbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+
+    const uid_t uid = 1234;
+    const audio_attributes_t mediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &mediaAttr, usbPortId, uid, &mixerAttributes[0]));
+
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+            AUDIO_SESSION_NONE, uid);
+    status_t status = mManager->startOutput(portId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+                AUDIO_SESSION_NONE, uid);
+        status = mManager->startOutput(portId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    EXPECT_NE(nullptr, mManager->getOutputs().valueFor(output));
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+    // When BT device is connected, it will be selected as media device and trigger routing changed.
+    // When this happens, existing output that is opened with preferred mixer attributes will be
+    // closed and reopened with default config.
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(output));
+
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, BitPerfectPlayback) {
+    const audio_format_t bitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_channel_mask_t bitPerfectChannelMask = AUDIO_CHANNEL_OUT_QUAD;
+    const uint32_t bitPerfectSampleRate = 48000;
+    mClient->addSupportedFormat(bitPerfectFormat);
+    mClient->addSupportedChannelMask(bitPerfectChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            usbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+
+    const uid_t uid = 1234;
+    const uid_t anotherUid = 5678;
+    const audio_attributes_t mediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    size_t bitPerfectIndex = 0;
+    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
+        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
+            break;
+        }
+    }
+    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
+    EXPECT_EQ(bitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
+    EXPECT_EQ(bitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
+    EXPECT_EQ(bitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &mediaAttr, usbPortId, uid, &mixerAttributes[bitPerfectIndex]));
+
+    audio_io_handle_t bitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t bitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+
+    // When there is no active bit-perfect playback, the output selection will follow default
+    // routing strategy.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
+            uid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    const auto outputDesc = mManager->getOutputs().valueFor(output);
+    EXPECT_NE(nullptr, outputDesc);
+    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+    // Start bit-perfect playback
+    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
+            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
+            mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
+    status_t status = mManager->startOutput(bitPerfectPortId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
+                bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
+                mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
+        status = mManager->startOutput(bitPerfectPortId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_TRUE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, bitPerfectOutput);
+    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(bitPerfectOutput);
+    EXPECT_NE(nullptr, bitPerfectOutputDesc);
+    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
+              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+    // If the playback is from preferred mixer attributes owner but the request doesn't match
+    // preferred mixer attributes, it will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
+            uid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(bitPerfectOutput, output);
+
+    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
+            anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(bitPerfectOutput, output);
+
+    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
+    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
+            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(bitPerfectOutput, output);
+
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+}
 
 class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
@@ -927,7 +1253,7 @@
 
     status_t addPolicyMix(int mixType, int mixFlag, audio_devices_t deviceType,
             std::string mixAddress, const audio_config_t& audioConfig,
-            const std::vector<PolicyMixTuple>& rules);
+            const std::vector<AudioMixMatchCriterion>& matchCriteria);
     void clearPolicyMix();
 
     Vector<AudioMix> mAudioMixes;
@@ -941,15 +1267,8 @@
 
 status_t AudioPolicyManagerTestDynamicPolicy::addPolicyMix(int mixType, int mixFlag,
         audio_devices_t deviceType, std::string mixAddress, const audio_config_t& audioConfig,
-        const std::vector<PolicyMixTuple>& rules) {
-    Vector<AudioMixMatchCriterion> myMixMatchCriteria;
-
-    for(const auto &rule: rules) {
-        myMixMatchCriteria.add(AudioMixMatchCriterion(
-                std::get<0>(rule), std::get<1>(rule), std::get<2>(rule)));
-    }
-
-    AudioMix myAudioMix(myMixMatchCriteria, mixType, audioConfig, mixFlag,
+        const std::vector<AudioMixMatchCriterion>& matchCriteria = {}) {
+    AudioMix myAudioMix(matchCriteria, mixType, audioConfig, mixFlag,
             String8(mixAddress.c_str()), 0);
     myAudioMix.mDeviceType = deviceType;
     // Clear mAudioMix before add new one to make sure we don't add already exist mixes.
@@ -983,23 +1302,22 @@
 
     // Only capture of playback is allowed in LOOP_BACK &RENDER mode
     ret = addPolicyMix(MIX_TYPE_RECORDERS, MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 
     // Fail due to the device is already connected.
     clearPolicyMix();
     ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 
     // The first time to register policy mixes with valid parameter should succeed.
     clearPolicyMix();
     audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
     audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
-    audioConfig.sample_rate = 48000;
+    audioConfig.sample_rate = k48000SamplingRate;
     ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
-            std::vector<PolicyMixTuple>());
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig);
     ASSERT_EQ(NO_ERROR, ret);
     // Registering the same policy mixes should fail.
     ret = mManager->registerPolicyMixes(mAudioMixes);
@@ -1010,22 +1328,23 @@
     // This will need to be updated if earpiece is added in the test configuration file.
     clearPolicyMix();
     ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
-            AUDIO_DEVICE_OUT_EARPIECE, "", audioConfig, std::vector<PolicyMixTuple>());
+            AUDIO_DEVICE_OUT_EARPIECE, "", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 
     // Registration should fail due to output not found.
     clearPolicyMix();
     ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 
-    // The first time to register valid policy mixes should succeed.
+    // The first time to register valid loopback policy mix should succeed.
     clearPolicyMix();
-    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
-            AUDIO_DEVICE_OUT_SPEAKER, "", audioConfig, std::vector<PolicyMixTuple>());
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "addr", audioConfig);
     ASSERT_EQ(NO_ERROR, ret);
-    // Registering the same policy mixes should fail.
-    ret = mManager->registerPolicyMixes(mAudioMixes);
+    // Registering the render policy for the loopback address should succeed.
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "addr", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
@@ -1035,10 +1354,9 @@
 
     audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
     audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
-    audioConfig.sample_rate = 48000;
+    audioConfig.sample_rate = k48000SamplingRate;
     ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
-            std::vector<PolicyMixTuple>());
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig);
     ASSERT_EQ(NO_ERROR, ret);
 
     // After successfully registering policy mixes, it should be able to unregister.
@@ -1051,6 +1369,37 @@
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
+TEST_F(AudioPolicyManagerTestDynamicPolicy, RegisterPolicyWithConsistentMixSucceeds) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+        createUidCriterion(/*uid=*/42),
+        createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+                                AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+                                mixMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+}
+
+TEST_F(AudioPolicyManagerTestDynamicPolicy, RegisterPolicyWithInconsistentMixFails) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+        createUidCriterion(/*uid=*/42),
+        createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+        createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+                                AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+                                mixMatchCriteria);
+    ASSERT_EQ(INVALID_OPERATION, ret);
+}
+
 class AudioPolicyManagerTestForHdmi
         : public AudioPolicyManagerTestWithConfigurationFile,
           public testing::WithParamInterface<audio_format_t> {
@@ -1060,9 +1409,6 @@
     std::map<audio_format_t, bool> getSurroundFormatsHelper();
     std::vector<audio_format_t> getReportedSurroundFormatsHelper();
     std::unordered_set<audio_format_t> getFormatsFromPorts();
-    AudioPolicyManagerTestClient* getClient() override {
-        return new AudioPolicyManagerTestClientForHdmi;
-    }
     void TearDown() override;
 
     static const std::string sTvConfig;
@@ -1273,28 +1619,54 @@
     audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
     audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
     audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
-    audioConfig.sample_rate = 48000;
+    audioConfig.sample_rate = k48000SamplingRate;
     ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
     ASSERT_EQ(INVALID_OPERATION, ret);
 
     ret = mManager->unregisterPolicyMixes(mAudioMixes);
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
+struct DPTestParam {
+    DPTestParam(const std::vector<AudioMixMatchCriterion>& mixCriteria,
+                bool expected_match = false)
+     : mixCriteria(mixCriteria), attributes(defaultAttr), session(AUDIO_SESSION_NONE),
+       expected_match(expected_match) {}
+
+    DPTestParam& withUsage(audio_usage_t usage) {
+        attributes.usage = usage;
+        return *this;
+    }
+
+    DPTestParam& withTags(const char *tags) {
+        std::strncpy(attributes.tags, tags, sizeof(attributes.tags));
+        return *this;
+    }
+
+    DPTestParam& withSource(audio_source_t source) {
+        attributes.source = source;
+        return *this;
+    }
+
+    DPTestParam& withSessionId(audio_session_t sessionId) {
+        session = sessionId;
+        return *this;
+    }
+
+    std::vector<AudioMixMatchCriterion> mixCriteria;
+    audio_attributes_t attributes;
+    audio_session_t session;
+    bool expected_match;
+};
+
 class AudioPolicyManagerTestDPPlaybackReRouting : public AudioPolicyManagerTestDynamicPolicy,
-        public testing::WithParamInterface<audio_attributes_t> {
+        public testing::WithParamInterface<DPTestParam> {
 protected:
     void SetUp() override;
     void TearDown() override;
 
     std::unique_ptr<RecordingActivityTracker> mTracker;
-
-    std::vector<PolicyMixTuple> mUsageRules = {
-            {AUDIO_USAGE_MEDIA, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE},
-            {AUDIO_USAGE_ALARM, AUDIO_SOURCE_DEFAULT, RULE_MATCH_ATTRIBUTE_USAGE}
-    };
-
     struct audio_port_v7 mInjectionPort;
     audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 };
@@ -1307,9 +1679,11 @@
     audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
     audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
     audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
-    audioConfig.sample_rate = 48000;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    DPTestParam param = GetParam();
     status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, mUsageRules);
+            AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
     ASSERT_EQ(NO_ERROR, ret);
 
     struct audio_port_v7 extractionPort;
@@ -1322,8 +1696,9 @@
         AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
     std::string tags = "addr=" + mMixAddress;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
-    getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
-            AUDIO_CHANNEL_IN_STEREO, 48000 /*sampleRate*/, AUDIO_INPUT_FLAG_NONE, &mPortId);
+    getInputForAttr(attr, param.session, mTracker->getRiid(), &selectedDeviceId,
+                    AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+                    AUDIO_INPUT_FLAG_NONE, &mPortId);
     ASSERT_EQ(NO_ERROR, mManager->startInput(mPortId));
     ASSERT_EQ(extractionPort.id, selectedDeviceId);
 
@@ -1336,151 +1711,261 @@
     AudioPolicyManagerTestDynamicPolicy::TearDown();
 }
 
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, InitSuccess) {
-    // SetUp must finish with no assertions
-}
-
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, Dump) {
-    dumpToLog();
-}
-
 TEST_P(AudioPolicyManagerTestDPPlaybackReRouting, PlaybackReRouting) {
-    const audio_attributes_t attr = GetParam();
-    const audio_usage_t usage = attr.usage;
+    const DPTestParam param = GetParam();
+    const audio_attributes_t& attr = param.attributes;
 
     audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000 /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE,
-            nullptr /*output*/, nullptr /*portId*/, attr);
-    if (std::find_if(begin(mUsageRules), end(mUsageRules), [&usage](const auto &usageRule) {
-            return (std::get<0>(usageRule) == usage) &&
-            (std::get<2>(usageRule) == RULE_MATCH_ATTRIBUTE_USAGE);}) != end(mUsageRules) ||
-            (strncmp(attr.tags, "addr=", strlen("addr=")) == 0 &&
-                    strncmp(attr.tags + strlen("addr="), mMixAddress.c_str(),
-                    AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0)) {
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
+            attr, param.session);
+    if (param.expected_match) {
         EXPECT_EQ(mInjectionPort.id, playbackRoutedPortId);
     } else {
         EXPECT_NE(mInjectionPort.id, playbackRoutedPortId);
     }
 }
 
-INSTANTIATE_TEST_CASE_P(
-        PlaybackReroutingUsageMatch,
-        AudioPolicyManagerTestDPPlaybackReRouting,
-        testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
-                )
-        );
+const std::vector<AudioMixMatchCriterion> USAGE_MEDIA_ALARM_CRITERIA = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA),
+            createUsageCriterion(AUDIO_USAGE_ALARM)
+};
 
-INSTANTIATE_TEST_CASE_P(
-        PlaybackReroutingAddressPriorityMatch,
-        AudioPolicyManagerTestDPPlaybackReRouting,
-        testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                    AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"}
-                )
-        );
+INSTANTIATE_TEST_SUITE_P(
+    PlaybackReroutingUsageMatch,
+    AudioPolicyManagerTestDPPlaybackReRouting,
+    testing::Values(
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_MEDIA),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=other"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ALARM),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_GAME),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+            .withUsage(AUDIO_USAGE_ASSISTANT)));
 
-INSTANTIATE_TEST_CASE_P(
-        PlaybackReroutingUnHandledUsages,
-        AudioPolicyManagerTestDPPlaybackReRouting,
-        testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
-                                     AUDIO_USAGE_ASSISTANCE_SONIFICATION,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
-                                     AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
-                )
-        );
+INSTANTIATE_TEST_SUITE_P(
+    PlaybackReroutingAddressPriorityMatch,
+    AudioPolicyManagerTestDPPlaybackReRouting,
+    testing::Values(
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION).withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ALARM)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_GAME)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_VIRTUAL_SOURCE)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("addr=remote_submix_media"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("sometag;addr=remote_submix_media;othertag=somevalue"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("addr=remote_submix_media;othertag"),
+        DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+            .withUsage(AUDIO_USAGE_ASSISTANT)
+            .withTags("sometag;othertag;addr=remote_submix_media")));
+
+static constexpr audio_session_t TEST_SESSION_ID = static_cast<audio_session_t>(42);
+static constexpr audio_session_t OTHER_SESSION_ID = static_cast<audio_session_t>(77);
+
+INSTANTIATE_TEST_SUITE_P(
+    PlaybackReRoutingWithSessionId,
+    AudioPolicyManagerTestDPPlaybackReRouting,
+    testing::Values(
+        // Mix is matched because the session id matches the one specified by the mix rule.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                    /*expected_match=*/ true)
+            .withSessionId(TEST_SESSION_ID),
+        // Mix is not matched because the session id doesn't match the one specified
+        // by the mix rule.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                    /*expected_match=*/ false)
+            .withSessionId(OTHER_SESSION_ID),
+        // Mix is matched, the session id doesn't match the one specified by rule,
+        // but there's address specified in the tags which takes precedence.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                    /*expected_match=*/ true)
+            .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+        // Mix is matched, both the session id and the usage match ones specified by mix rule.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                      createUsageCriterion(AUDIO_USAGE_MEDIA)},
+                    /*expected_match=*/ true)
+            .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA),
+        // Mix is not matched, the session id matches the one specified by mix rule,
+        // but usage does not.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                      createUsageCriterion(AUDIO_USAGE_MEDIA)},
+                    /*expected_match=*/ false)
+                    .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_GAME),
+        // Mix is not matched, the usage matches the one specified by mix rule,
+        // but the session id is excluded.
+        DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID, /*exclude=*/ true),
+                                     createUsageCriterion(AUDIO_USAGE_MEDIA)},
+                    /*expected_match=*/ false)
+                    .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA)));
+
+struct DPMmapTestParam {
+    DPMmapTestParam(int mixRouteFlags, audio_devices_t deviceType, const std::string& deviceAddress)
+        : mixRouteFlags(mixRouteFlags), deviceType(deviceType), deviceAddress(deviceAddress) {}
+
+    int mixRouteFlags;
+    audio_devices_t deviceType;
+    std::string deviceAddress;
+};
+
+class AudioPolicyManagerTestMMapPlaybackRerouting
+    : public AudioPolicyManagerTestDynamicPolicy,
+      public ::testing::WithParamInterface<DPMmapTestParam> {
+  protected:
+    void SetUp() override {
+        AudioPolicyManagerTestDynamicPolicy::SetUp();
+        audioConfig = AUDIO_CONFIG_INITIALIZER;
+        audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+        audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+        audioConfig.sample_rate = k48000SamplingRate;
+    }
+
+    audio_config_t audioConfig;
+    audio_io_handle_t mOutput;
+    audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mPortId;
+    AudioPolicyInterface::output_type_t mOutputType;
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    bool mIsSpatialized;
+    bool mIsBitPerfect;
+};
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingLoopbackDapMixFails) {
+    // Add mix matching the test uid.
+    const int testUid = 12345;
+    const auto param = GetParam();
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+                                param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+    ASSERT_EQ(NO_ERROR, ret);
+
+    // Geting output for matching uid and mmap-ed stream should fail.
+    audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    ASSERT_EQ(INVALID_OPERATION,
+              mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+                                         createAttributionSourceState(testUid), &audioConfig,
+                                         &outputFlags, &mSelectedDeviceId, &mPortId, {},
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting,
+        NonMmapPlaybackStreamMatchingLoopbackDapMixSucceeds) {
+    // Add mix matching the test uid.
+    const int testUid = 12345;
+    const auto param = GetParam();
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+                                param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+    ASSERT_EQ(NO_ERROR, ret);
+
+    // Geting output for matching uid should succeed for non-mmaped stream.
+    audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_NONE;
+    ASSERT_EQ(NO_ERROR,
+              mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+                                         createAttributionSourceState(testUid), &audioConfig,
+                                         &outputFlags, &mSelectedDeviceId, &mPortId, {},
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
+        MmapPlaybackStreamMatchingRenderDapMixSucceeds) {
+      // Add render-only mix matching the test uid.
+    const int testUid = 12345;
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
+                                /*mixAddress=*/"", audioConfig, {createUidCriterion(testUid)});
+    ASSERT_EQ(NO_ERROR, ret);
+
+    // Geting output for matching uid should succeed for mmaped stream.
+    audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    ASSERT_EQ(NO_ERROR,
+              mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+                                         createAttributionSourceState(testUid), &audioConfig,
+                                         &outputFlags, &mSelectedDeviceId, &mPortId, {},
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        MmapPlaybackRerouting, AudioPolicyManagerTestMMapPlaybackRerouting,
+        testing::Values(DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                        /*deviceAddress=*/"remote_submix_media"),
+                        DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER,
+                                        AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                                        /*deviceAddress=*/"remote_submix_media")));
 
 class AudioPolicyManagerTestDPMixRecordInjection : public AudioPolicyManagerTestDynamicPolicy,
-        public testing::WithParamInterface<audio_attributes_t> {
+        public testing::WithParamInterface<DPTestParam> {
 protected:
     void SetUp() override;
     void TearDown() override;
 
     std::unique_ptr<RecordingActivityTracker> mTracker;
-
-    std::vector<PolicyMixTuple> mSourceRules = {
-        {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_CAMCORDER, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET},
-        {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_MIC, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET},
-        {AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_VOICE_COMMUNICATION, RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}
-    };
-
     struct audio_port_v7 mExtractionPort;
     audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 };
@@ -1493,9 +1978,11 @@
     audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
     audioConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
     audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
-    audioConfig.sample_rate = 48000;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    DPTestParam param = GetParam();
     status_t ret = addPolicyMix(MIX_TYPE_RECORDERS, MIX_ROUTE_FLAG_LOOP_BACK,
-            AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, mSourceRules);
+            AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
     ASSERT_EQ(NO_ERROR, ret);
 
     struct audio_port_v7 injectionPort;
@@ -1509,7 +1996,7 @@
     std::string tags = std::string("addr=") + mMixAddress;
     strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
     getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000 /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, &mPortId, attr);
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, &mPortId, attr);
     ASSERT_EQ(NO_ERROR, mManager->startOutput(mPortId));
     ASSERT_EQ(injectionPort.id, getDeviceIdFromPatch(mClient->getLastAddedPatch()));
 
@@ -1522,72 +2009,94 @@
     AudioPolicyManagerTestDynamicPolicy::TearDown();
 }
 
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, InitSuccess) {
-    // SetUp mush finish with no assertions.
-}
-
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, Dump) {
-    dumpToLog();
-}
-
 TEST_P(AudioPolicyManagerTestDPMixRecordInjection, RecordingInjection) {
-    const audio_attributes_t attr = GetParam();
-    const audio_source_t source = attr.source;
+    const DPTestParam param = GetParam();
 
     audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    getInputForAttr(attr, mTracker->getRiid(), &captureRoutedPortId, AUDIO_FORMAT_PCM_16_BIT,
-            AUDIO_CHANNEL_IN_STEREO, 48000 /*sampleRate*/, AUDIO_INPUT_FLAG_NONE, &portId);
-    if (std::find_if(begin(mSourceRules), end(mSourceRules), [&source](const auto &sourceRule) {
-            return (std::get<1>(sourceRule) == source) &&
-            (std::get<2>(sourceRule) == RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET);})
-            != end(mSourceRules)) {
+    getInputForAttr(param.attributes, param.session, mTracker->getRiid(), &captureRoutedPortId,
+        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+        AUDIO_INPUT_FLAG_NONE, &portId);
+    if (param.expected_match) {
         EXPECT_EQ(mExtractionPort.id, captureRoutedPortId);
     } else {
         EXPECT_NE(mExtractionPort.id, captureRoutedPortId);
     }
 }
 
+const std::vector<AudioMixMatchCriterion> SOURCE_CAM_MIC_VOICE_CRITERIA = {
+        createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER),
+        createCapturePresetCriterion(AUDIO_SOURCE_MIC),
+        createCapturePresetCriterion(AUDIO_SOURCE_VOICE_COMMUNICATION)
+};
+
 // No address priority rule for remote recording, address is a "don't care"
 INSTANTIATE_TEST_CASE_P(
-        RecordInjectionSourceMatch,
+        RecordInjectionSource,
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"}
-                )
-        );
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_CAMCORDER),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_CAMCORDER)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_MIC),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_MIC)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+                .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_VOICE_RECOGNITION),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_VOICE_RECOGNITION)
+                .withTags("addr=remote_submix_media"),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_HOTWORD),
+            DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+                .withSource(AUDIO_SOURCE_HOTWORD)
+                .withTags("addr=remote_submix_media")));
 
-// No address priority rule for remote recording
 INSTANTIATE_TEST_CASE_P(
-        RecordInjectionSourceNotMatch,
+        RecordInjectionWithSessionId,
         AudioPolicyManagerTestDPMixRecordInjection,
         testing::Values(
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE, ""},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"},
-                (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
-                                     AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE,
-                                     "addr=remote_submix_media"}
-                )
-        );
+            // Mix is matched because the session id matches the one specified by the mix rule.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                        /*expected_match=*/ true)
+                .withSessionId(TEST_SESSION_ID),
+            // Mix is not matched because the session id doesn't match the one specified
+            // by the mix rule.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                        /*expected_match=*/ false)
+                .withSessionId(OTHER_SESSION_ID),
+            // Mix is not matched, the session id doesn't match the one specified by rule,
+            // but tand address specified in the tags is ignored for recorder mix.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+                        /*expected_match=*/ false)
+                .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+            // Mix is matched, both the session id and the source match ones specified by mix rule
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                          createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+                        /*expected_match=*/ true)
+                .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_CAMCORDER),
+            // Mix is not matched, the session id matches the one specified by mix rule,
+            // but source does not.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+                                          createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+                        /*expected_match=*/ false)
+                .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC),
+            // Mix is not matched, the source matches the one specified by mix rule,
+            // but the session id is excluded.
+            DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID,
+                                                                       /*exclude=*/ true),
+                                          createCapturePresetCriterion(AUDIO_SOURCE_MIC)},
+                        /*expected_match=*/ false)
+                .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC)));
 
 using DeviceConnectionTestParams =
         std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/>;
@@ -1609,19 +2118,19 @@
     // Connecting a valid output device with valid parameters should trigger a routing update
     ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-            "a", "b", AUDIO_FORMAT_DEFAULT));
+            "00:11:22:33:44:55", "b", AUDIO_FORMAT_DEFAULT));
     ASSERT_EQ(1, mClient->getRoutingUpdatedCounter());
 
     // Disconnecting a connected device should succeed and trigger a routing update
     ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-            "a", "b", AUDIO_FORMAT_DEFAULT));
+            "00:11:22:33:44:55", "b", AUDIO_FORMAT_DEFAULT));
     ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
 
     // Disconnecting a disconnected device should fail and not trigger a routing update
     ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-            "a", "b",  AUDIO_FORMAT_DEFAULT));
+            "00:11:22:33:44:55", "b",  AUDIO_FORMAT_DEFAULT));
     ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
 
     // Changing force use should trigger an update
@@ -1687,11 +2196,12 @@
     // Try start input or output according to the device type
     if (audio_is_output_devices(type)) {
         getOutputForAttr(&routedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-                48000 /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE);
+                k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
     } else if (audio_is_input_device(type)) {
         RecordingActivityTracker tracker;
-        getInputForAttr({}, tracker.getRiid(), &routedPortId, AUDIO_FORMAT_PCM_16_BIT,
-                AUDIO_CHANNEL_IN_STEREO, 48000 /*sampleRate*/, AUDIO_INPUT_FLAG_NONE);
+        getInputForAttr({}, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
+         AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+         AUDIO_INPUT_FLAG_NONE);
     }
     ASSERT_EQ(devicePort.id, routedPortId);
 
@@ -1700,6 +2210,45 @@
             address.c_str(), name.c_str(), AUDIO_FORMAT_DEFAULT));
 }
 
+android::media::audio::common::ExtraAudioDescriptor make_ExtraAudioDescriptor(
+        android::media::audio::common::AudioStandard audioStandard,
+        android::media::audio::common::AudioEncapsulationType audioEncapsulationType) {
+    android::media::audio::common::ExtraAudioDescriptor result;
+    result.standard = audioStandard;
+    result.audioDescriptor = {0xb4, 0xaf, 0x98, 0x1a};
+    result.encapsulationType = audioEncapsulationType;
+    return result;
+}
+
+TEST_P(AudioPolicyManagerTestDeviceConnection, PassingExtraAudioDescriptors) {
+    const audio_devices_t type = std::get<0>(GetParam());
+    if (!audio_device_is_digital(type)) {
+        // EADs are used only for HDMI devices.
+        GTEST_SKIP() << "Not a digital device type: " << audio_device_to_string(type);
+    }
+    const std::string name = std::get<1>(GetParam());
+    const std::string address = std::get<2>(GetParam());
+    android::media::AudioPortFw audioPort;
+    ASSERT_EQ(NO_ERROR,
+            mManager->deviceToAudioPort(type, address.c_str(), name.c_str(), &audioPort));
+    android::media::audio::common::AudioPort& port = audioPort.hal;
+    port.extraAudioDescriptors.push_back(make_ExtraAudioDescriptor(
+                    android::media::audio::common::AudioStandard::EDID,
+                    android::media::audio::common::AudioEncapsulationType::IEC61937));
+    const size_t lastConnectedDevicePortCount = mClient->getConnectedDevicePortCount();
+    const size_t lastDisconnectedDevicePortCount = mClient->getDisconnectedDevicePortCount();
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE, port, AUDIO_FORMAT_DEFAULT));
+    EXPECT_EQ(lastConnectedDevicePortCount + 1, mClient->getConnectedDevicePortCount());
+    EXPECT_EQ(lastDisconnectedDevicePortCount, mClient->getDisconnectedDevicePortCount());
+    const audio_port_v7* devicePort = mClient->getLastConnectedDevicePort();
+    EXPECT_EQ(port.extraAudioDescriptors.size(), devicePort->num_extra_audio_descriptors);
+    EXPECT_EQ(AUDIO_STANDARD_EDID, devicePort->extra_audio_descriptors[0].standard);
+    EXPECT_EQ(AUDIO_ENCAPSULATION_TYPE_IEC61937,
+            devicePort->extra_audio_descriptors[0].encapsulation_type);
+    EXPECT_NE(0, devicePort->extra_audio_descriptors[0].descriptor[0]);
+}
+
 INSTANTIATE_TEST_CASE_P(
         DeviceConnectionState,
         AudioPolicyManagerTestDeviceConnection,
@@ -1709,12 +2258,483 @@
                 DeviceConnectionTestParams({AUDIO_DEVICE_OUT_HDMI, "test_out_hdmi",
                                             "audio_policy_test_out_hdmi"}),
                 DeviceConnectionTestParams({AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, "bt_hfp_in",
-                                            "hfp_client_in"}),
+                                            "00:11:22:33:44:55"}),
                 DeviceConnectionTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_SCO, "bt_hfp_out",
-                                            "hfp_client_out"})
+                                            "00:11:22:33:44:55"})
                 )
         );
 
+class AudioPolicyManagerCarTest : public AudioPolicyManagerTestDynamicPolicy {
+protected:
+    std::string getConfigFile() override { return sCarConfig; }
+
+    static const std::string sCarConfig;
+    static const std::string sCarBusMediaOutput;
+    static const std::string sCarBusNavigationOutput;
+    static const std::string sCarRearZoneOneOutput;
+    static const std::string sCarRearZoneTwoOutput;
+};
+
+const std::string AudioPolicyManagerCarTest::sCarConfig =
+        AudioPolicyManagerCarTest::sExecutableDir + "test_car_ap_atmos_offload_configuration.xml";
+
+const std::string AudioPolicyManagerCarTest::sCarBusMediaOutput = "bus0_media_out";
+
+const std::string AudioPolicyManagerCarTest::sCarBusNavigationOutput = "bus1_navigation_out";
+
+const std::string AudioPolicyManagerCarTest::sCarRearZoneOneOutput = "bus100_audio_zone_1";
+
+const std::string AudioPolicyManagerCarTest::sCarRearZoneTwoOutput = "bus200_audio_zone_2";
+
+TEST_F(AudioPolicyManagerCarTest, InitSuccess) {
+    // SetUp must finish with no assertions.
+}
+
+TEST_F(AudioPolicyManagerCarTest, Dump) {
+    dumpToLog();
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrAtmosOutputAfterRegisteringPolicyMix) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig);
+    ASSERT_EQ(NO_ERROR, ret);
+
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+    sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
+    ASSERT_NE(nullptr, outDesc.get());
+    ASSERT_EQ(AUDIO_FORMAT_E_AC3_JOC, outDesc->getFormat());
+    ASSERT_EQ(AUDIO_CHANNEL_OUT_5POINT1, outDesc->getChannelMask());
+    ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
+
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    output = AUDIO_IO_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+    outDesc = mManager->getOutputs().valueFor(output);
+    ASSERT_NE(nullptr, outDesc.get());
+    ASSERT_EQ(AUDIO_FORMAT_PCM_16_BIT, outDesc->getFormat());
+    ASSERT_EQ(AUDIO_CHANNEL_OUT_7POINT1POINT4, outDesc->getChannelMask());
+    ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrAfterRegisteringPolicyMix) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    audio_port_v7 mediaDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusMediaOutput, &mediaDevicePort));
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+            AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+    ASSERT_EQ(mediaDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterRegisteringPolicyMix) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    audio_port_v7 navDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusNavigationOutput, &navDevicePort));
+    audio_port_handle_t selectedDeviceId = navDevicePort.id;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+            AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+            AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+    ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterUserAffinities) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+    const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+    audio_port_v7 navDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusNavigationOutput, &navDevicePort));
+    audio_port_handle_t selectedDeviceId = navDevicePort.id;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+                AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+    ASSERT_NE(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithExcludeUserIdCriteria) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false),
+                createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    audio_port_v7 navDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusNavigationOutput, &navDevicePort));
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t navigationAttribute = {
+                AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, navigationAttribute);
+
+    ASSERT_NE(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputExcludeUserIdCriteria) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false),
+                createUserIdCriterion(0 /* userId */, /* exclude */ true)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    audio_port_v7 navDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusNavigationOutput, &navDevicePort));
+    audio_port_handle_t selectedDeviceId = navDevicePort.id;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+                AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+    ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+        GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+    const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
+    const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice, navOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+    audio_port_v7 navDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusNavigationOutput, &navDevicePort));
+    audio_port_handle_t selectedDeviceId = navDevicePort.id;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+                AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+    ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+        GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+    const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
+    const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice, navOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+    audio_port_v7 navDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusNavigationOutput, &navDevicePort));
+    audio_port_handle_t selectedDeviceId = navDevicePort.id;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t alarmAttribute = {
+                AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
+
+    ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+        GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+    const AudioDeviceTypeAddrVector primaryZoneDevices = {mediaOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 0, primaryZoneDevices);
+    audio_port_v7 primaryZoneDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusMediaOutput, &primaryZoneDevicePort));
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+                    AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+    uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
+            AUDIO_SESSION_NONE, user11AppUid);
+
+    ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+        GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+    const AudioDeviceTypeAddrVector primaryZoneDevices = {mediaOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 0, primaryZoneDevices);
+    const AudioDeviceTypeAddr secondaryOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput);
+    const AudioDeviceTypeAddrVector secondaryZoneDevices = {secondaryOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 11, secondaryZoneDevices);
+    audio_port_v7 secondaryZoneDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarRearZoneOneOutput, &secondaryZoneDevicePort));
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+                    AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+    uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
+            AUDIO_SESSION_NONE, user11AppUid);
+
+    ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+        GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+    const AudioDeviceTypeAddrVector primaryZoneDevices = {mediaOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 0, primaryZoneDevices);
+    const AudioDeviceTypeAddr secondaryOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput);
+    const AudioDeviceTypeAddrVector secondaryZoneDevices = {secondaryOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 11, secondaryZoneDevices);
+    const AudioDeviceTypeAddr tertiaryOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput);
+    const AudioDeviceTypeAddrVector tertiaryZoneDevices = {tertiaryOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 15, tertiaryZoneDevices);
+    audio_port_v7 tertiaryZoneDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarRearZoneTwoOutput, &tertiaryZoneDevicePort));
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t mediaAttribute = {
+                    AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+                    AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+    uid_t user15AppUid = multiuser_get_uid(/* user_id */ 15, /* app_id */ 12345);
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
+            AUDIO_SESSION_NONE, user15AppUid);
+
+    ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithNoMatchingMix) {
+    status_t ret;
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+    std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+            AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+                createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                    /*exclude=*/ false)};
+    ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+    const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+    const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
+    const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice, navOutputDevice};
+    mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+    audio_port_v7 navDevicePort;
+    ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+            sCarBusNavigationOutput, &navDevicePort));
+    audio_port_handle_t selectedDeviceId = navDevicePort.id;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    const audio_attributes_t alarmAttribute = {
+                AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
+
+    ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
 class AudioPolicyManagerTVTest : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
     std::string getConfigFile() override { return sTvConfig; }
@@ -1735,8 +2755,8 @@
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     audio_io_handle_t output;
     audio_port_handle_t portId;
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000,
-            flags, &output, &portId);
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            k48000SamplingRate, flags, &output, &portId);
     sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
     ASSERT_NE(nullptr, outDesc.get());
     audio_port_v7 port = {};
@@ -1913,6 +2933,108 @@
               mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
 }
 
+TEST_F(AudioPolicyManagerDevicesRoleForCapturePresetTest, PreferredDeviceUsedForInput) {
+    const audio_source_t source = AUDIO_SOURCE_MIC;
+    const device_role_t role = DEVICE_ROLE_PREFERRED;
+    const std::string address = "card=1;device=0";
+    const std::string deviceName = "randomName";
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+    auto availableDevices = mManager->getAvailableInputDevices();
+    ASSERT_GT(availableDevices.size(), 1);
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = source;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+    auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
+    ASSERT_NE(nullptr, selectedDevice);
+
+    sp<DeviceDescriptor> preferredDevice = nullptr;
+    for (const auto& device : availableDevices) {
+        if (device != selectedDevice) {
+            preferredDevice = device;
+            break;
+        }
+    }
+    ASSERT_NE(nullptr, preferredDevice);
+    // After setting preferred device for capture preset, the selected device for input should be
+    // the preferred device.
+    ASSERT_EQ(NO_ERROR,
+              mManager->setDevicesRoleForCapturePreset(source, role,
+                                                       {preferredDevice->getDeviceTypeAddr()}));
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+    ASSERT_EQ(preferredDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+    // After clearing preferred device for capture preset, the selected device for input should be
+    // the same as original one.
+    ASSERT_EQ(NO_ERROR,
+              mManager->clearDevicesRoleForCapturePreset(source, role));
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+    ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+            address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+}
+
+TEST_F(AudioPolicyManagerDevicesRoleForCapturePresetTest, DisabledDeviceNotUsedForInput) {
+    const audio_source_t source = AUDIO_SOURCE_MIC;
+    const device_role_t role = DEVICE_ROLE_DISABLED;
+    const std::string address = "card=1;device=0";
+    const std::string deviceName = "randomName";
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+    auto availableDevices = mManager->getAvailableInputDevices();
+    ASSERT_GT(availableDevices.size(), 1);
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = source;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+    auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
+    ASSERT_NE(nullptr, selectedDevice);
+
+    // After setting disabled device for capture preset, the disabled device must not be
+    // selected for input.
+    ASSERT_EQ(NO_ERROR,
+              mManager->setDevicesRoleForCapturePreset(source, role,
+                                                       {selectedDevice->getDeviceTypeAddr()}));
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+    ASSERT_NE(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+    // After clearing disabled device for capture preset, the selected device for input should be
+    // the original one.
+    ASSERT_EQ(NO_ERROR,
+              mManager->clearDevicesRoleForCapturePreset(source, role));
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+    ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+            AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+            address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+}
+
 INSTANTIATE_TEST_CASE_P(
         DevicesRoleForCapturePresetOperation,
         AudioPolicyManagerDevicesRoleForCapturePresetTest,
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index ff4d568..5e71210 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -12,6 +12,7 @@
     srcs: [
         "test_audio_policy_configuration.xml",
         "test_audio_policy_primary_only_configuration.xml",
+        "test_car_ap_atmos_offload_configuration.xml",
         "test_invalid_audio_policy_configuration.xml",
         "test_tv_apm_configuration.xml",
         "test_settop_box_surround_configuration.xml",
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 5e1822a..50ca26a 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -54,6 +54,7 @@
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                            samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
+                <mixPort name="hifi_output" role="source" flags="AUDIO_OUTPUT_FLAG_BIT_PERFECT"/>
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -71,12 +72,19 @@
                 <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
                             role="source" address="hfp_client_in">
                 </devicePort>
+                <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink"
+                            encodedFormats="AUDIO_FORMAT_LDAC AUDIO_FORMAT_APTX AUDIO_FORMAT_APTX_HD AUDIO_FORMAT_AAC AUDIO_FORMAT_SBC">
+                </devicePort>
+                <devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
+                </devicePort>
+                <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
+                </devicePort>
             </devicePorts>
             <routes>
                 <route type="mix" sink="Speaker"
                        sources="primary output,voip_rx"/>
                 <route type="mix" sink="primary input"
-                       sources="Built-In Mic,Hdmi-In Mic"/>
+                       sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
                 <route type="mix" sink="voip_tx"
                        sources="Built-In Mic"/>
                 <route type="mix" sink="Hdmi"
@@ -85,6 +93,10 @@
                        sources="mixport_bt_hfp_output,voip_rx"/>
                 <route type="mix" sink="mixport_bt_hfp_input"
                        sources="BT SCO Headset Mic"/>
+                <route type="mix" sink="BT A2DP Out"
+                       sources="primary output,hifi_output"/>
+                <route type="mix" sink="USB Device Out"
+                       sources="primary output,hifi_output"/>
             </routes>
         </module>
 
diff --git a/services/audiopolicy/tests/resources/test_car_ap_atmos_offload_configuration.xml b/services/audiopolicy/tests/resources/test_car_ap_atmos_offload_configuration.xml
new file mode 100644
index 0000000..d131ed8
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_car_ap_atmos_offload_configuration.xml
@@ -0,0 +1,308 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="true"/>
+
+    <modules>
+        <!-- Primary Audio HAL -->
+        <module name="primary" halVersion="3.0">
+            <attachedDevices>
+                <!-- One bus per context -->
+                <item>bus0_media_out</item>
+                <item>bus1_navigation_out</item>
+                <item>bus2_voice_command_out</item>
+                <item>bus3_call_ring_out</item>
+                <item>bus4_call_out</item>
+                <item>bus5_alarm_out</item>
+                <item>bus6_notification_out</item>
+                <item>bus7_system_sound_out</item>
+                <!-- names with _audio_zone_# are used for defined an emulator rear seat audio zone
+                    where each number # is the zone id number -->
+                <item>bus100_audio_zone_1</item>
+                <item>bus200_audio_zone_2</item>
+                <item>Built-In Mic</item>
+                <item>Built-In Back Mic</item>
+                <item>Echo-Reference Mic</item>
+                <item>FM Tuner</item>
+                <item>Tone Generator 0</item>
+                <item>Tone Generator 1</item>
+            </attachedDevices>
+            <defaultOutputDevice>bus0_media_out</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="mixport_bus0_media_out" role="source"
+                        flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus0_media_out_atmos" role="source"
+                        flags="AUDIO_OUTPUT_FLAG_DIRECT">
+                    <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO,AUDIO_CHANNEL_OUT_5POINT1"/>
+                </mixPort>
+                <mixPort name="mixport_bus0_media_out_atmos_pcm" role="source"
+                        flags="AUDIO_OUTPUT_FLAG_DIRECT">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_7POINT1POINT4"/>
+                </mixPort>
+                <mixPort name="mixport_bus1_navigation_out" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus2_voice_command_out" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus3_call_ring_out" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus4_call_out" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus5_alarm_out" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus6_notification_out" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus7_system_sound_out" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus100_audio_zone_1" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_bus200_audio_zone_2" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </mixPort>
+                <mixPort name="mixport_tuner0" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_input_bus_tone_zone_0" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mixport_input_bus_tone_zone_1" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="bus0_media_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus0_media_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+                            samplingRates="48000"
+                            channelMasks="AUDIO_CHANNEL_OUT_STEREO,AUDIO_CHANNEL_OUT_5POINT1"/>
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_7POINT1POINT4"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus1_navigation_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus1_navigation_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus2_voice_command_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus2_voice_command_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus3_call_ring_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus3_call_ring_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus4_call_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus4_call_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus5_alarm_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus5_alarm_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus6_notification_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus6_notification_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus7_system_sound_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus7_system_sound_out">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus100_audio_zone_1" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                        address="bus100_audio_zone_1">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="bus200_audio_zone_2" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+                            address="bus200_audio_zone_2">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                              minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                              stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+                            channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="Built-In Back Mic" type="AUDIO_DEVICE_IN_BACK_MIC"
+                        role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+                            channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="Echo-Reference Mic" type="AUDIO_DEVICE_IN_ECHO_REFERENCE"
+                        role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+                            channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+                </devicePort>
+                <devicePort tagName="FM Tuner" type="AUDIO_DEVICE_IN_FM_TUNER" role="source"
+                        address="tuner0">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                            samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                                minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                                stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="Tone Generator 0" type="AUDIO_DEVICE_IN_BUS" role="source"
+                            address="input_bus_tone_zone_0">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                              minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                              stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+                <devicePort tagName="Tone Generator 1" type="AUDIO_DEVICE_IN_BUS" role="source"
+                            address="input_bus_tone_zone_1">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                    <gains>
+                        <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+                              minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+                              stepValueMB="100"/>
+                    </gains>
+                </devicePort>
+            </devicePorts>
+            <!-- route declaration, i.e. list all available sources for a given sink -->
+            <routes>
+                <route type="mix" sink="bus0_media_out"
+                        sources="mixport_bus0_media_out,mixport_bus0_media_out_atmos,mixport_bus0_media_out_atmos_pcm"/>
+                <route type="mix" sink="bus1_navigation_out" sources="mixport_bus1_navigation_out"/>
+                <route type="mix" sink="bus2_voice_command_out"
+                        sources="mixport_bus2_voice_command_out"/>
+                <route type="mix" sink="bus3_call_ring_out" sources="mixport_bus3_call_ring_out"/>
+                <route type="mix" sink="bus4_call_out" sources="mixport_bus4_call_out"/>
+                <route type="mix" sink="bus5_alarm_out" sources="mixport_bus5_alarm_out"/>
+                <route type="mix" sink="bus6_notification_out"
+                        sources="mixport_bus6_notification_out"/>
+                <route type="mix" sink="bus7_system_sound_out"
+                        sources="mixport_bus7_system_sound_out"/>
+                <route type="mix" sink="bus100_audio_zone_1" sources="mixport_bus100_audio_zone_1"/>
+                <route type="mix" sink="bus200_audio_zone_2" sources="mixport_bus200_audio_zone_2"/>
+                <route type="mix" sink="primary input"
+                        sources="Built-In Mic,Built-In Back Mic,Echo-Reference Mic"/>
+                <route type="mix" sink="mixport_tuner0" sources="FM Tuner"/>
+                <route type="mix" sink="mixport_input_bus_tone_zone_0" sources="Tone Generator 0"/>
+                <route type="mix" sink="mixport_input_bus_tone_zone_1" sources="Tone Generator 1"/>
+            </routes>
+        </module>
+    </modules>
+</audioPolicyConfiguration>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 981c569..a45365a 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -57,7 +57,6 @@
         "api1/client2/StreamingProcessor.cpp",
         "api1/client2/JpegProcessor.cpp",
         "api1/client2/CallbackProcessor.cpp",
-        "api1/client2/JpegCompressor.cpp",
         "api1/client2/CaptureSequencer.cpp",
         "api1/client2/ZslProcessor.cpp",
         "api2/CameraDeviceClient.cpp",
@@ -66,6 +65,7 @@
         "api2/DepthCompositeStream.cpp",
         "api2/HeicEncoderInfoManager.cpp",
         "api2/HeicCompositeStream.cpp",
+        "api2/JpegRCompositeStream.cpp",
         "device3/BufferUtils.cpp",
         "device3/Camera3Device.cpp",
         "device3/Camera3OfflineSession.cpp",
@@ -95,6 +95,12 @@
         "hidl/HidlCameraDeviceUser.cpp",
         "hidl/HidlCameraService.cpp",
         "hidl/Utils.cpp",
+        "aidl/AidlCameraDeviceCallbacks.cpp",
+        "aidl/AidlCameraDeviceUser.cpp",
+        "aidl/AidlCameraService.cpp",
+        "aidl/AidlCameraServiceListener.cpp",
+        "aidl/AidlUtils.cpp",
+        "aidl/DeathPipe.cpp",
         "utils/CameraServiceProxyWrapper.cpp",
         "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
@@ -114,6 +120,7 @@
     ],
 
     shared_libs: [
+        "libactivitymanager_aidl",
         "libbase",
         "libdl",
         "libexif",
@@ -137,6 +144,7 @@
         "libhidlbase",
         "libimage_io",
         "libjpeg",
+        "libultrahdr",
         "libmedia_codeclist",
         "libmedia_omx",
         "libmemunreachable",
@@ -151,19 +159,22 @@
         "android.frameworks.cameraservice.service@2.2",
         "android.frameworks.cameraservice.device@2.0",
         "android.frameworks.cameraservice.device@2.1",
+        "android.frameworks.cameraservice.common-V1-ndk",
+        "android.frameworks.cameraservice.service-V1-ndk",
+        "android.frameworks.cameraservice.device-V1-ndk",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device-V1-ndk",
+        "android.hardware.camera.device-V2-ndk",
         "media_permission-aidl-cpp",
     ],
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 80410ab..668a51a 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -33,9 +33,11 @@
 #include <android/hardware/ICamera.h>
 #include <android/hardware/ICameraClient.h>
 
+#include <aidl/AidlCameraService.h>
 #include <android-base/macros.h>
 #include <android-base/parseint.h>
 #include <android-base/stringprintf.h>
+#include <android/permission/PermissionChecker.h>
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
 #include <binder/IPCThreadState.h>
@@ -67,6 +69,8 @@
 #include <private/android_filesystem_config.h>
 #include <system/camera_vendor_tags.h>
 #include <system/camera_metadata.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
 
 #include <system/camera.h>
 
@@ -80,6 +84,9 @@
 
 namespace {
     const char* kPermissionServiceName = "permission";
+    const char* kActivityServiceName = "activity";
+    const char* kSensorPrivacyServiceName = "sensor_privacy";
+    const char* kAppopsServiceName = "appops";
 }; // namespace anonymous
 
 namespace android {
@@ -88,6 +95,7 @@
 using binder::Status;
 using namespace camera3;
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
+using frameworks::cameraservice::service::implementation::AidlCameraService;
 using hardware::ICamera;
 using hardware::ICameraClient;
 using hardware::ICameraServiceListener;
@@ -131,6 +139,8 @@
         "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
 static const String16
         sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Constant integer for FGS Logging, used to denote the API type for logger
+static const int LOG_FGS_CAMERA_API = 1;
 const char *sFileName = "lastOpenSessionDumpFile";
 static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
 static constexpr int32_t kSystemNativeClientState =
@@ -142,7 +152,10 @@
 // Set to keep track of logged service error events.
 static std::set<String8> sServiceErrorEventSet;
 
-CameraService::CameraService() :
+CameraService::CameraService(
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
+                std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
         mNumberOfCameras(0),
         mNumberOfCamerasWithoutSystemCamera(0),
@@ -162,6 +175,20 @@
     return (CameraThreadState::getCallingUid() < AID_APP_START);
 }
 
+// Enable processes with isolated AID to request the binder
+void CameraService::instantiate() {
+    CameraService::publish(true);
+}
+
+void CameraService::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+    if (name != String16(kAppopsServiceName)) {
+        return;
+    }
+
+    ALOGV("appops service registered. setting camera audio restriction");
+    mAppOps.setCameraAudioRestriction(mAudioRestriction);
+}
+
 void CameraService::onFirstRef()
 {
 
@@ -186,16 +213,33 @@
     mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
     mSensorPrivacyPolicy->registerSelf();
     mInjectionStatusListener = new InjectionStatusListener(this);
-    mAppOps.setCameraAudioRestriction(mAudioRestriction);
+
+    // appops function setCamerAudioRestriction uses getService which
+    // is blocking till the appops service is ready. To enable early
+    // boot availability for cameraservice, use checkService which is
+    // non blocking and register for notifications
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->checkService(String16(kAppopsServiceName));
+    if (!binder) {
+        sm->registerForNotifications(String16(kAppopsServiceName), this);
+    } else {
+        mAppOps.setCameraAudioRestriction(mAudioRestriction);
+    }
+
     sp<HidlCameraService> hcs = HidlCameraService::getInstance(this);
     if (hcs->registerAsService() != android::OK) {
-        ALOGE("%s: Failed to register default android.frameworks.cameraservice.service@1.0",
+        // Deprecated, so it will fail to register on newer devices
+        ALOGW("%s: Did not register default android.frameworks.cameraservice.service@2.2",
               __FUNCTION__);
     }
 
+    if (!AidlCameraService::registerService(this)) {
+        ALOGE("%s: Failed to register default AIDL VNDK CameraService", __FUNCTION__);
+    }
+
     // This needs to be last call in this function, so that it's as close to
     // ServiceManager::addService() as possible.
-    CameraServiceProxyWrapper::pingCameraServiceProxy();
+    mCameraServiceProxyWrapper->pingCameraServiceProxy();
     ALOGI("CameraService pinged cameraservice proxy");
 }
 
@@ -203,6 +247,7 @@
     status_t res;
 
     std::vector<std::string> deviceIds;
+    std::unordered_map<std::string, std::set<std::string>> unavailPhysicalIds;
     {
         Mutex::Autolock l(mServiceLock);
 
@@ -233,7 +278,7 @@
             ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
         }
 
-        deviceIds = mCameraProviderManager->getCameraDeviceIds();
+        deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
     }
 
 
@@ -242,6 +287,12 @@
         if (getCameraState(id8) == nullptr) {
             onDeviceStatusChanged(id8, CameraDeviceStatus::PRESENT);
         }
+        if (unavailPhysicalIds.count(cameraId) > 0) {
+            for (const auto& physicalId : unavailPhysicalIds[cameraId]) {
+                String8 physicalId8 = String8(physicalId.c_str());
+                onDeviceStatusChanged(id8, physicalId8, CameraDeviceStatus::NOT_PRESENT);
+            }
+        }
     }
 
     // Derive primary rear/front cameras, and filter their charactierstics.
@@ -269,7 +320,10 @@
                     cameraId.c_str());
             continue;
         }
-        i->getListener()->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
+        auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
+                String16{cameraId});
+        i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+                __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
     }
 }
 
@@ -335,7 +389,9 @@
         int facing = -1;
         int orientation = 0;
         String8 cameraId8(cameraId.c_str());
-        getDeviceVersion(cameraId8, /*out*/&facing, /*out*/&orientation);
+        int portraitRotation;
+        getDeviceVersion(cameraId8, /*overrideToPortrait*/false, /*out*/&portraitRotation,
+                /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
             return;
@@ -495,7 +551,7 @@
 
     if (state == nullptr) {
         ALOGE("%s: Physical camera id %s status change on a non-present ID %s",
-                __FUNCTION__, id.string(), physicalId.string());
+                __FUNCTION__, physicalId.string(), id.string());
         return;
     }
 
@@ -538,8 +594,12 @@
                         id.c_str());
                 continue;
             }
-            listener->getListener()->onPhysicalCameraStatusChanged(mapToInterface(newStatus),
-                    id16, physicalId16);
+            auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
+                    mapToInterface(newStatus), id16, physicalId16);
+            listener->handleBinderStatus(ret,
+                    "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+                    __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                    ret.exceptionCode());
         }
     }
 }
@@ -580,8 +640,11 @@
         int32_t newStrengthLevel) {
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
-        i->getListener()->onTorchStrengthLevelChanged(String16{cameraId},
+        auto ret = i->getListener()->onTorchStrengthLevelChanged(String16{cameraId},
                 newStrengthLevel);
+        i->handleBinderStatus(ret,
+                "%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
+                i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
     }
 }
 
@@ -638,11 +701,18 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid,
-        bool logPermissionFailure = false) {
-    return checkPermission(sSystemCameraPermission, callingPid, callingUid,
-            logPermissionFailure) &&
-            checkPermission(sCameraPermission, callingPid, callingUid);
+static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+    permission::PermissionChecker permissionChecker;
+    AttributionSourceState attributionSource{};
+    attributionSource.pid = callingPid;
+    attributionSource.uid = callingUid;
+    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
+            sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
+            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
+            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    return checkPermissionForSystemCamera && checkPermissionForCamera;
 }
 
 Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
@@ -675,7 +745,7 @@
     return Status::ok();
 }
 
-Status CameraService::getCameraInfo(int cameraId,
+Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
         CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
@@ -703,8 +773,9 @@
     }
 
     Status ret = Status::ok();
+    int portraitRotation;
     status_t err = mCameraProviderManager->getCameraInfo(
-            cameraIdStr.c_str(), cameraInfo);
+            cameraIdStr.c_str(), overrideToPortrait, &portraitRotation, cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -720,8 +791,14 @@
     const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
     auto callingPid = CameraThreadState::getCallingPid();
     auto callingUid = CameraThreadState::getCallingUid();
-    if (checkPermission(sSystemCameraPermission, callingPid, callingUid,
-            /*logPermissionFailure*/false) || getpid() == callingPid) {
+    permission::PermissionChecker permissionChecker;
+    AttributionSourceState attributionSource{};
+    attributionSource.pid = callingPid;
+    attributionSource.uid = callingUid;
+    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
+                sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
+                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    if (checkPermissionForSystemCamera || getpid() == callingPid) {
         deviceIds = &mNormalDeviceIds;
     }
     if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
@@ -739,7 +816,7 @@
 }
 
 Status CameraService::getCameraCharacteristics(const String16& cameraId,
-        int targetSdkVersion, CameraMetadata* cameraInfo) {
+        int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
@@ -766,7 +843,7 @@
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraIdStr, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cameraIdStr, overrideForPerfClass, cameraInfo);
+            cameraIdStr, overrideForPerfClass, cameraInfo, overrideToPortrait);
     if (res != OK) {
         if (res == NAME_NOT_FOUND) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -792,9 +869,16 @@
     // If it's not calling from cameraserver, check the permission only if
     // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
     // it would've already been checked in shouldRejectSystemCameraConnection.
+    permission::PermissionChecker permissionChecker;
+    AttributionSourceState attributionSource{};
+    attributionSource.pid = callingPid;
+    attributionSource.uid = callingUid;
+    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
+                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
     if ((callingPid != getpid()) &&
             (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
-            !checkPermission(sCameraPermission, callingPid, callingUid)) {
+            !checkPermissionForCamera) {
         res = cameraInfo->removePermissionEntries(
                 mCameraProviderManager->getProviderTagIdLocked(String8(cameraId).string()),
                 &tagsRemoved);
@@ -887,8 +971,8 @@
     BasicClient::BasicClient::sCameraService = nullptr;
 }
 
-std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId, int* facing,
-        int* orientation) {
+std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId,
+        bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -907,7 +991,8 @@
 
     hardware::CameraInfo info;
     if (facing) {
-        res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
+        res = mCameraProviderManager->getCameraInfo(cameraId.string(), overrideToPortrait,
+                portraitRotation, &info);
         if (res != OK) {
             return std::make_pair(-1, IPCTransport::INVALID);
         }
@@ -942,7 +1027,8 @@
         const std::optional<String16>& featureId,  const String8& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
-        apiLevel effectiveApiLevel, bool overrideForPerfClass, /*out*/sp<BasicClient>* client) {
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+        bool forceSlowJpegMode, /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -973,15 +1059,20 @@
     }
     if (effectiveApiLevel == API_1) { // Camera1 API route
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-        *client = new Camera2Client(cameraService, tmp, packageName, featureId,
-                cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
-                servicePid, overrideForPerfClass);
+        *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
+                packageName, featureId, cameraId, api1CameraId, facing, sensorOrientation,
+                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                forceSlowJpegMode);
+        ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
+                __FUNCTION__, overrideToPortrait, forceSlowJpegMode);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
-        *client = new CameraDeviceClient(cameraService, tmp, packageName,
-                systemNativeClient, featureId, cameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass);
+        *client = new CameraDeviceClient(cameraService, tmp,
+                cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+                featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
+                overrideForPerfClass, overrideToPortrait);
+        ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
     }
     return Status::ok();
 }
@@ -1071,7 +1162,8 @@
             sp<ICameraClient>{nullptr}, id, cameraId,
             internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*out*/ tmp)
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
+            /*forceSlowJpegMode*/false, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
     }
@@ -1222,6 +1314,9 @@
 Status CameraService::validateClientPermissionsLocked(const String8& cameraId,
         const String8& clientName8, int& clientUid, int& clientPid,
         /*out*/int& originalClientPid) const {
+    permission::PermissionChecker permissionChecker;
+    AttributionSourceState attributionSource{};
+
     int callingPid = CameraThreadState::getCallingPid();
     int callingUid = CameraThreadState::getCallingUid();
 
@@ -1268,9 +1363,14 @@
     // If it's not calling from cameraserver, check the permission if the
     // device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
     // android.permission.SYSTEM_CAMERA for system only camera devices).
+    attributionSource.pid = clientPid;
+    attributionSource.uid = clientUid;
+    attributionSource.packageName = clientName8;
+    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
+            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
     if (callingPid != getpid() &&
-                (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
-                !checkPermission(sCameraPermission, clientPid, clientUid)) {
+                (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
@@ -1587,6 +1687,8 @@
         int clientUid,
         int clientPid,
         int targetSdkVersion,
+        bool overrideToPortrait,
+        bool forceSlowJpegMode,
         /*out*/
         sp<ICamera>* device) {
 
@@ -1597,7 +1699,8 @@
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
             clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
-            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, /*out*/client);
+            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
+            overrideToPortrait, forceSlowJpegMode, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1606,6 +1709,15 @@
     }
 
     *device = client;
+
+    const sp<IServiceManager> sm(defaultServiceManager());
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
+
     return ret;
 }
 
@@ -1657,7 +1769,7 @@
     //     characteristics) even if clients don't have android.permission.CAMERA. We do not want the
     //     same behavior for system camera devices.
     if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(cPid, cUid, /*logPermissionFailure*/true)) {
+            !hasPermissionsForSystemCamera(cPid, cUid)) {
         ALOGW("Rejecting access to system only camera %s, inadequete permissions",
                 cameraId.c_str());
         return true;
@@ -1672,6 +1784,7 @@
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
+        bool overrideToPortrait,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
 
@@ -1698,7 +1811,13 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    if (CameraServiceProxyWrapper::isCameraDisabled()) {
+    userid_t clientUserId = multiuser_get_user_id(clientUid);
+    int callingUid = CameraThreadState::getCallingUid();
+    if (clientUid == USE_CALLING_UID) {
+        clientUserId = multiuser_get_user_id(callingUid);
+    }
+
+    if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
         String8 msg =
                 String8::format("Camera disabled by device policy");
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -1707,7 +1826,8 @@
 
     // enforce system camera permissions
     if (oomScoreOffset > 0 &&
-            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
+            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
+            !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
         String8 msg =
                 String8::format("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
@@ -1719,7 +1839,8 @@
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
             /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, /*out*/client);
+            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
+            /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
@@ -1743,23 +1864,79 @@
             ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
         }
     }
+    const sp<IServiceManager> sm(defaultServiceManager());
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
     return ret;
 }
 
+String16 CameraService::getPackageNameFromUid(int clientUid) {
+    String16 packageName("");
+
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16(kPermissionServiceName));
+    if (binder == 0) {
+        ALOGE("Cannot get permission service");
+        // Return empty package name and the further interaction
+        // with camera will likely fail
+        return packageName;
+    }
+
+    sp<IPermissionController> permCtrl = interface_cast<IPermissionController>(binder);
+    Vector<String16> packages;
+
+    permCtrl->getPackagesForUid(clientUid, packages);
+
+    if (packages.isEmpty()) {
+        ALOGE("No packages for calling UID %d", clientUid);
+        // Return empty package name and the further interaction
+        // with camera will likely fail
+        return packageName;
+    }
+
+    // Arbitrarily pick the first name in the list
+    packageName = packages[0];
+
+    return packageName;
+}
+
 template<class CALLBACK, class CLIENT>
 Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-        int api1CameraId, const String16& clientPackageName, bool systemNativeClient,
+        int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
+        bool overrideToPortrait, bool forceSlowJpegMode,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
+    bool isNonSystemNdk = false;
+    String16 clientPackageName;
+    if (clientPackageNameMaybe.size() <= 0) {
+        // NDK calls don't come with package names, but we need one for various cases.
+        // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
+        // do exist. For all authentication cases, all packages under the same UID get the
+        // same permissions, so picking any associated package name is sufficient. For some
+        // other cases, this may give inaccurate names for clients in logs.
+        isNonSystemNdk = true;
+        int packageUid = (clientUid == USE_CALLING_UID) ?
+            CameraThreadState::getCallingUid() : clientUid;
+        clientPackageName = getPackageNameFromUid(packageUid);
+    } else {
+        clientPackageName = clientPackageNameMaybe;
+    }
+
     String8 clientName8(clientPackageName);
 
     int originalClientPid = 0;
 
+    int packagePid = (clientPid == USE_CALLING_PID) ?
+        CameraThreadState::getCallingPid() : clientPid;
     ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
-            "Camera API version %d", clientPid, clientName8.string(), cameraId.string(),
+            "Camera API version %d", packagePid, clientName8.string(), cameraId.string(),
             static_cast<int>(effectiveApiLevel));
 
     nsecs_t openTimeNs = systemTime();
@@ -1767,7 +1944,7 @@
     sp<CLIENT> client = nullptr;
     int facing = -1;
     int orientation = 0;
-    bool isNonSystemNdk = (clientPackageName.size() == 0);
+
     {
         // Acquire mServiceLock and prevent other clients from connecting
         std::unique_ptr<AutoConditionLock> lock =
@@ -1832,8 +2009,10 @@
         // give flashlight a chance to close devices if necessary.
         mFlashlight->prepareDeviceOpen(cameraId);
 
+        int portraitRotation;
         auto deviceVersionAndTransport =
-                getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
+                getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+                        /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1847,6 +2026,7 @@
                 clientFeatureId, cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
+                overrideToPortrait, forceSlowJpegMode,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -1901,13 +2081,42 @@
             }
         }
 
+        // Enable/disable camera service watchdog
+        client->setCameraServiceWatchdog(mCameraServiceWatchdogEnabled);
+
         // Set rotate-and-crop override behavior
         if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
             client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+        } else if (overrideToPortrait && portraitRotation != 0) {
+            uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+            switch (portraitRotation) {
+                case 90:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+                    break;
+                case 180:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
+                    break;
+                case 270:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
+                    break;
+                default:
+                    ALOGE("Unexpected portrait rotation: %d", portraitRotation);
+                    break;
+            }
+            client->setRotateAndCropOverride(rotateAndCropMode);
         } else {
-          client->setRotateAndCropOverride(
-              CameraServiceProxyWrapper::getRotateAndCropOverride(
-                  clientPackageName, facing, multiuser_get_user_id(clientUid)));
+            client->setRotateAndCropOverride(
+                mCameraServiceProxyWrapper->getRotateAndCropOverride(
+                    clientPackageName, facing, multiuser_get_user_id(clientUid)));
+        }
+
+        // Set autoframing override behaviour
+        if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+            client->setAutoframingOverride(mOverrideAutoframingMode);
+        } else {
+            client->setAutoframingOverride(
+                mCameraServiceProxyWrapper->getAutoframingOverride(
+                    clientPackageName));
         }
 
         // Set camera muting behavior
@@ -1949,6 +2158,8 @@
         }
 
         client->setImageDumpMask(mImageDumpMask);
+        client->setStreamUseCaseOverrides(mStreamUseCaseOverrides);
+        client->setZoomOverride(mZoomOverrideValue);
     } // lock is destroyed, allow further connect calls
 
     // Important: release the mutex here so the client can call back into the service from its
@@ -1956,7 +2167,7 @@
     device = client;
 
     int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
-    CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
+    mCameraServiceProxyWrapper->logOpen(cameraId, facing, clientPackageName,
             effectiveApiLevel, isNonSystemNdk, openLatencyMs);
 
     {
@@ -2022,6 +2233,10 @@
                 onlineClientDesc->getOwnerId(), onlinePriority.getState(),
                 // native clients don't have offline processing support.
                 /*ommScoreOffset*/ 0, /*systemNativeClient*/false);
+        if (offlineClientDesc == nullptr) {
+            ALOGE("%s: Offline client descriptor was NULL", __FUNCTION__);
+            return BAD_VALUE;
+        }
 
         // Allow only one offline device per camera
         auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
@@ -2382,9 +2597,20 @@
 
     for (const auto& it : mListenerList) {
         auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
-        if (!ret.isOk()) {
-            ALOGE("%s: Failed to trigger permission callback: %d", __FUNCTION__,
-                    ret.exceptionCode());
+        it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+                __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
+    }
+}
+
+void CameraService::notifyMonitoredUids(const std::unordered_set<uid_t> &notifyUidSet) {
+    Mutex::Autolock lock(mStatusListenerLock);
+
+    for (const auto& it : mListenerList) {
+        if (notifyUidSet.find(it->getListenerUid()) != notifyUidSet.end()) {
+            ALOGV("%s: notifying uid %d", __FUNCTION__, it->getListenerUid());
+            auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
+            it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+                    __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
         }
     }
 }
@@ -2409,6 +2635,11 @@
 
     ATRACE_CALL();
 
+    {
+        Mutex::Autolock lock(mServiceLock);
+        mDeviceState = newState;
+    }
+
     mCameraProviderManager->notifyDeviceStateChange(newState);
 
     return Status::ok();
@@ -2442,12 +2673,12 @@
     for (auto& current : clients) {
         if (current != nullptr) {
             const auto basicClient = current->getValue();
-            if (basicClient.get() != nullptr) {
-              basicClient->setRotateAndCropOverride(
-                  CameraServiceProxyWrapper::getRotateAndCropOverride(
-                      basicClient->getPackageName(),
-                      basicClient->getCameraFacing(),
-                      multiuser_get_user_id(basicClient->getClientUid())));
+            if (basicClient.get() != nullptr && !basicClient->getOverrideToPortrait()) {
+                basicClient->setRotateAndCropOverride(
+                        mCameraServiceProxyWrapper->getRotateAndCropOverride(
+                                basicClient->getPackageName(),
+                                basicClient->getCameraFacing(),
+                                multiuser_get_user_id(basicClient->getClientUid())));
             }
         }
     }
@@ -2516,7 +2747,14 @@
     // Check for camera permissions
     int callingPid = CameraThreadState::getCallingPid();
     int callingUid = CameraThreadState::getCallingUid();
-    if ((callingPid != getpid()) && !checkPermission(sCameraPermission, callingPid, callingUid)) {
+    permission::PermissionChecker permissionChecker;
+    AttributionSourceState attributionSource{};
+    attributionSource.pid = callingPid;
+    attributionSource.uid = callingUid;
+    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
+                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    if ((callingPid != getpid()) && !checkPermissionForCamera) {
         ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "android.permission.CAMERA needed to call"
@@ -2563,8 +2801,13 @@
 
     auto clientUid = CameraThreadState::getCallingUid();
     auto clientPid = CameraThreadState::getCallingPid();
-    bool openCloseCallbackAllowed = checkPermission(sCameraOpenCloseListenerPermission,
-            clientPid, clientUid, /*logPermissionFailure*/false);
+    permission::PermissionChecker permissionChecker;
+    AttributionSourceState attributionSource{};
+    attributionSource.uid = clientUid;
+    attributionSource.pid = clientPid;
+    bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
+            sCameraOpenCloseListenerPermission, attributionSource, String16(),
+            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -2593,7 +2836,7 @@
         // permissions the listener process has / whether it is a vendor listener. Since it might be
         // eligible to listen to other camera ids.
         mListenerList.emplace_back(serviceListener);
-        mUidPolicy->registerMonitorUid(clientUid);
+        mUidPolicy->registerMonitorUid(clientUid, /*openCamera*/false);
     }
 
     /* Collect current devices and status */
@@ -2661,7 +2904,7 @@
         Mutex::Autolock lock(mStatusListenerLock);
         for (auto it = mListenerList.begin(); it != mListenerList.end(); it++) {
             if (IInterface::asBinder((*it)->getListener()) == IInterface::asBinder(listener)) {
-                mUidPolicy->unregisterMonitorUid((*it)->getListenerUid());
+                mUidPolicy->unregisterMonitorUid((*it)->getListenerUid(), /*closeCamera*/false);
                 IInterface::asBinder(listener)->unlinkToDeath(*it);
                 mListenerList.erase(it);
                 return Status::ok();
@@ -2719,7 +2962,8 @@
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    auto deviceVersionAndTransport = getDeviceVersion(id);
+    int portraitRotation;
+    auto deviceVersionAndTransport = getDeviceVersion(id, false, &portraitRotation);
     if (deviceVersionAndTransport.first == -1) {
         String8 msg = String8::format("Unknown camera ID %s", id.string());
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -2830,6 +3074,13 @@
     return binder::Status::ok();
 }
 
+Status CameraService::reportExtensionSessionStats(
+        const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/) {
+    ALOGV("%s: reported %s", __FUNCTION__, stats.toString().c_str());
+    *sessionKey = mCameraServiceProxyWrapper->updateExtensionStats(stats);
+    return Status::ok();
+}
+
 void CameraService::removeByClient(const BasicClient* client) {
     Mutex::Autolock lock(mServiceLock);
     for (auto& i : mActiveClientManager.getAll()) {
@@ -3204,13 +3455,13 @@
         const String8& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
-        int servicePid) :
+        int servicePid, bool overrideToPortrait) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
-                servicePid),
+                servicePid, overrideToPortrait),
         mCameraId(api1CameraId)
 {
     int callingPid = CameraThreadState::getCallingPid();
@@ -3240,7 +3491,7 @@
         const String16& clientPackageName, bool nativeClient,
         const std::optional<String16>& clientFeatureId, const String8& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid):
+        int servicePid, bool overrideToPortrait):
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -3248,6 +3499,7 @@
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
+        mOverrideToPortrait(overrideToPortrait),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
         mRemoteBinder(remoteCallback),
         mOpsActive(false),
@@ -3257,37 +3509,6 @@
         sCameraService = cameraService;
     }
 
-    // In some cases the calling code has no access to the package it runs under.
-    // For example, NDK camera API.
-    // In this case we will get the packages for the calling UID and pick the first one
-    // for attributing the app op. This will work correctly for runtime permissions
-    // as for legacy apps we will toggle the app op for all packages in the UID.
-    // The caveat is that the operation may be attributed to the wrong package and
-    // stats based on app ops may be slightly off.
-    if (mClientPackageName.size() <= 0) {
-        sp<IServiceManager> sm = defaultServiceManager();
-        sp<IBinder> binder = sm->getService(String16(kPermissionServiceName));
-        if (binder == 0) {
-            ALOGE("Cannot get permission service");
-            // Leave mClientPackageName unchanged (empty) and the further interaction
-            // with camera will fail in BasicClient::startCameraOps
-            return;
-        }
-
-        sp<IPermissionController> permCtrl = interface_cast<IPermissionController>(binder);
-        Vector<String16> packages;
-
-        permCtrl->getPackagesForUid(mClientUid, packages);
-
-        if (packages.isEmpty()) {
-            ALOGE("No packages for calling UID");
-            // Leave mClientPackageName unchanged (empty) and the further interaction
-            // with camera will fail in BasicClient::startCameraOps
-            return;
-        }
-        mClientPackageName = packages[0];
-    }
-
     // There are 2 scenarios in which a client won't have AppOps operations
     // (both scenarios : native clients)
     //    1) It's an system native client*, the package name will be empty
@@ -3337,6 +3558,13 @@
     // client shouldn't be able to call into us anymore
     mClientPid = 0;
 
+    const auto& mActivityManager = getActivityManager();
+    if (mActivityManager) {
+        mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
+            CameraThreadState::getCallingUid(),
+            CameraThreadState::getCallingPid());
+    }
+
     return res;
 }
 
@@ -3429,6 +3657,11 @@
                 mClientPackageName);
         bool isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+        // We don't want to return EACCESS if the CameraPrivacy is enabled.
+        // We prefer to successfully open the camera and perform camera muting
+        // or blocking in connectHelper as handleAppOpMode can be called before the
+        // connection has been fully established and at that time camera muting
+        // capabilities are unknown.
         if (!isUidActive || !isCameraPrivacyEnabled) {
             ALOGI("Camera %s: Access for \"%s\" has been restricted",
                     mCameraIdStr.string(), String8(mClientPackageName).string());
@@ -3467,7 +3700,7 @@
     // Transition device availability listeners from PRESENT -> NOT_AVAILABLE
     sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
 
-    sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
 
     // Notify listeners of camera open/close status
     sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
@@ -3575,7 +3808,7 @@
     }
     mOpsCallback.clear();
 
-    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
 
     // Notify listeners of camera open/close status
     sCameraService->updateOpenCloseStatus(mCameraIdStr, false/*open*/, mClientPackageName);
@@ -3644,8 +3877,7 @@
 // ----------------------------------------------------------------------------
 
 void CameraService::Client::notifyError(int32_t errorCode,
-        const CaptureResultExtras& resultExtras) {
-    (void) resultExtras;
+        [[maybe_unused]] const CaptureResultExtras& resultExtras) {
     if (mRemoteCallback != NULL) {
         int32_t api1ErrorCode = CAMERA_ERROR_RELEASED;
         if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED) {
@@ -3683,20 +3915,44 @@
 //                  UidPolicy
 // ----------------------------------------------------------------------------
 
-void CameraService::UidPolicy::registerSelf() {
+void CameraService::UidPolicy::registerWithActivityManager() {
     Mutex::Autolock _l(mUidLock);
+    int32_t emptyUidArray[] = { };
 
     if (mRegistered) return;
     status_t res = mAm.linkToDeath(this);
-    mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
+    mAm.registerUidObserverForUids(this, ActivityManager::UID_OBSERVER_GONE
             | ActivityManager::UID_OBSERVER_IDLE
             | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
             | ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
             ActivityManager::PROCESS_STATE_UNKNOWN,
-            String16("cameraserver"));
+            String16("cameraserver"), emptyUidArray, 0, mObserverToken);
     if (res == OK) {
         mRegistered = true;
         ALOGV("UidPolicy: Registered with ActivityManager");
+    } else {
+        ALOGE("UidPolicy: Failed to register with ActivityManager: 0x%08x", res);
+    }
+}
+
+void CameraService::UidPolicy::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+    if (name != String16(kActivityServiceName)) {
+        return;
+    }
+
+    registerWithActivityManager();
+}
+
+void CameraService::UidPolicy::registerSelf() {
+    // Use check service to see if the activity service is available
+    // If not available then register for notifications, instead of blocking
+    // till the service is ready
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->checkService(String16(kActivityServiceName));
+    if (!binder) {
+        sm->registerForNotifications(String16(kActivityServiceName), this);
+    } else {
+        registerWithActivityManager();
     }
 }
 
@@ -3755,24 +4011,54 @@
     }
 }
 
-void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid) {
-    bool procAdjChange = false;
+/**
+ * When the OOM adj of the uid owning the camera changes, a different uid waiting on camera
+ * privileges may take precedence if the owner's new OOM adj is greater than the waiting package.
+ * Here, we track which monitoredUid has the camera, and track its adj relative to other
+ * monitoredUids. If it is revised above some other monitoredUid, signal
+ * onCameraAccessPrioritiesChanged. This only needs to capture the case where there are two
+ * foreground apps in split screen - state changes will capture all other cases.
+ */
+void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid, int32_t adj) {
+    std::unordered_set<uid_t> notifyUidSet;
     {
         Mutex::Autolock _l(mUidLock);
-        if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
-            procAdjChange = true;
+        auto it = mMonitoredUids.find(uid);
+
+        if (it != mMonitoredUids.end()) {
+            if (it->second.hasCamera) {
+                for (auto &monitoredUid : mMonitoredUids) {
+                    if (monitoredUid.first != uid && adj > monitoredUid.second.procAdj) {
+                        ALOGV("%s: notify uid %d", __FUNCTION__, monitoredUid.first);
+                        notifyUidSet.emplace(monitoredUid.first);
+                    }
+                }
+                ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+                notifyUidSet.emplace(uid);
+            } else {
+                for (auto &monitoredUid : mMonitoredUids) {
+                    if (monitoredUid.second.hasCamera && adj < monitoredUid.second.procAdj) {
+                        ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+                        notifyUidSet.emplace(uid);
+                    }
+                }
+            }
+            it->second.procAdj = adj;
         }
     }
 
-    if (procAdjChange) {
+    if (notifyUidSet.size() > 0) {
         sp<CameraService> service = mService.promote();
         if (service != nullptr) {
-            service->notifyMonitoredUids();
+            service->notifyMonitoredUids(notifyUidSet);
         }
     }
 }
 
-void CameraService::UidPolicy::registerMonitorUid(uid_t uid) {
+/**
+ * Register a uid for monitoring, and note whether it owns a camera.
+ */
+void CameraService::UidPolicy::registerMonitorUid(uid_t uid, bool openCamera) {
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
@@ -3780,18 +4066,36 @@
     } else {
         MonitoredUid monitoredUid;
         monitoredUid.procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+        monitoredUid.procAdj = resource_policy::UNKNOWN_ADJ;
         monitoredUid.refCount = 1;
-        mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid));
+        it = mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid)).first;
+        status_t res = mAm.addUidToObserver(mObserverToken, String16("cameraserver"), uid);
+        if (res != OK) {
+            ALOGE("UidPolicy: Failed to add uid to observer: 0x%08x", res);
+        }
+    }
+
+    if (openCamera) {
+        it->second.hasCamera = true;
     }
 }
 
-void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid) {
+/**
+ * Unregister a uid for monitoring, and note whether it lost ownership of a camera.
+ */
+void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid, bool closeCamera) {
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
         it->second.refCount--;
         if (it->second.refCount == 0) {
             mMonitoredUids.erase(it);
+            status_t res = mAm.removeUidFromObserver(mObserverToken, String16("cameraserver"), uid);
+            if (res != OK) {
+                ALOGE("UidPolicy: Failed to remove uid from observer: 0x%08x", res);
+            }
+        } else if (closeCamera) {
+            it->second.hasCamera = false;
         }
     } else {
         ALOGE("%s: Trying to unregister uid: %d which is not monitored!", __FUNCTION__, uid);
@@ -3913,7 +4217,9 @@
 // ----------------------------------------------------------------------------
 //                  SensorPrivacyPolicy
 // ----------------------------------------------------------------------------
-void CameraService::SensorPrivacyPolicy::registerSelf() {
+
+void CameraService::SensorPrivacyPolicy::registerWithSensorPrivacyManager()
+{
     Mutex::Autolock _l(mSensorPrivacyLock);
     if (mRegistered) {
         return;
@@ -3928,6 +4234,27 @@
     }
 }
 
+void CameraService::SensorPrivacyPolicy::onServiceRegistration(const String16& name,
+                                                               const sp<IBinder>&) {
+    if (name != String16(kSensorPrivacyServiceName)) {
+        return;
+    }
+
+    registerWithSensorPrivacyManager();
+}
+
+void CameraService::SensorPrivacyPolicy::registerSelf() {
+    // Use checkservice to see if the sensor_privacy service is available
+    // If service is not available then register for notification
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->checkService(String16(kSensorPrivacyServiceName));
+    if (!binder) {
+        sm->registerForNotifications(String16(kSensorPrivacyServiceName),this);
+    } else {
+        registerWithSensorPrivacyManager();
+    }
+}
+
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
@@ -3937,6 +4264,10 @@
 }
 
 bool CameraService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
+    if (!mRegistered) {
+      registerWithSensorPrivacyManager();
+    }
+
     Mutex::Autolock _l(mSensorPrivacyLock);
     return mSensorPrivacyEnabled;
 }
@@ -3952,7 +4283,7 @@
     int toggleType __unused, int sensor __unused, bool enabled) {
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
-        mSensorPrivacyEnabled = mSpm.isToggleSensorPrivacyEnabled(SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
+        mSensorPrivacyEnabled = enabled;
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
     if (enabled) {
@@ -4349,6 +4680,13 @@
     String8 activeClientString = mActiveClientManager.toString();
     dprintf(fd, "Active Camera Clients:\n%s", activeClientString.string());
     dprintf(fd, "Allowed user IDs: %s\n", toString(mAllowedUsers).string());
+    if (mStreamUseCaseOverrides.size() > 0) {
+        dprintf(fd, "Active stream use case overrides:");
+        for (int64_t useCaseOverride : mStreamUseCaseOverrides) {
+            dprintf(fd, " %" PRId64, useCaseOverride);
+        }
+        dprintf(fd, "\n");
+    }
 
     dumpEventLog(fd);
 
@@ -4647,8 +4985,12 @@
                             cameraId.c_str());
                     continue;
                 }
-                listener->getListener()->onStatusChanged(mapToInterface(status),
+                auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
                         String16(cameraId));
+                listener->handleBinderStatus(ret,
+                        "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                        __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                        ret.exceptionCode());
             }
         });
 }
@@ -4681,10 +5023,10 @@
         } else {
             ret = it->getListener()->onCameraClosed(cameraId64);
         }
-        if (!ret.isOk()) {
-            ALOGE("%s: Failed to trigger onCameraOpened/onCameraClosed callback: %d", __FUNCTION__,
-                    ret.exceptionCode());
-        }
+
+        it->handleBinderStatus(ret,
+                "%s: Failed to trigger onCameraOpened/onCameraClosed callback for %d:%d: %d",
+                __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
     }
 }
 
@@ -4785,8 +5127,12 @@
                         String8(physicalCameraId).c_str());
                 continue;
             }
-            listener->getListener()->onPhysicalCameraStatusChanged(status,
+            auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
                     logicalCameraId, physicalCameraId);
+            listener->handleBinderStatus(ret,
+                    "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+                    __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                    ret.exceptionCode());
         }
     }
 }
@@ -4834,14 +5180,27 @@
         return handleSetRotateAndCrop(args);
     } else if (args.size() >= 1 && args[0] == String16("get-rotate-and-crop")) {
         return handleGetRotateAndCrop(out);
+    } else if (args.size() >= 2 && args[0] == String16("set-autoframing")) {
+        return handleSetAutoframing(args);
+    } else if (args.size() >= 1 && args[0] == String16("get-autoframing")) {
+        return handleGetAutoframing(out);
     } else if (args.size() >= 2 && args[0] == String16("set-image-dump-mask")) {
         return handleSetImageDumpMask(args);
     } else if (args.size() >= 1 && args[0] == String16("get-image-dump-mask")) {
         return handleGetImageDumpMask(out);
     } else if (args.size() >= 2 && args[0] == String16("set-camera-mute")) {
         return handleSetCameraMute(args);
+    } else if (args.size() >= 2 && args[0] == String16("set-stream-use-case-override")) {
+        return handleSetStreamUseCaseOverrides(args);
+    } else if (args.size() >= 1 && args[0] == String16("clear-stream-use-case-override")) {
+        handleClearStreamUseCaseOverrides();
+        return OK;
+    } else if (args.size() >= 1 && args[0] == String16("set-zoom-override")) {
+        return handleSetZoomOverride(args);
     } else if (args.size() >= 2 && args[0] == String16("watch")) {
         return handleWatchCommand(args, in, out);
+    } else if (args.size() >= 2 && args[0] == String16("set-watchdog")) {
+        return handleSetCameraServiceWatchdog(args);
     } else if (args.size() == 1 && args[0] == String16("help")) {
         printHelp(out);
         return OK;
@@ -4935,12 +5294,68 @@
     return OK;
 }
 
+status_t CameraService::handleSetAutoframing(const Vector<String16>& args) {
+    char* end;
+    int autoframingValue = (int) strtol(String8(args[1]), &end, /*base=*/10);
+    if ((*end != '\0') ||
+            (autoframingValue != ANDROID_CONTROL_AUTOFRAMING_OFF &&
+             autoframingValue != ANDROID_CONTROL_AUTOFRAMING_ON &&
+             autoframingValue != ANDROID_CONTROL_AUTOFRAMING_AUTO)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+    mOverrideAutoframingMode = autoframingValue;
+
+    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) return OK;
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                basicClient->setAutoframingOverride(autoframingValue);
+            }
+        }
+    }
+
+    return OK;
+}
+
+status_t CameraService::handleSetCameraServiceWatchdog(const Vector<String16>& args) {
+    int enableWatchdog = atoi(String8(args[1]));
+
+    if (enableWatchdog < 0 || enableWatchdog > 1) return BAD_VALUE;
+
+    Mutex::Autolock lock(mServiceLock);
+
+    mCameraServiceWatchdogEnabled = enableWatchdog;
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                basicClient->setCameraServiceWatchdog(enableWatchdog);
+            }
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraService::handleGetRotateAndCrop(int out) {
     Mutex::Autolock lock(mServiceLock);
 
     return dprintf(out, "rotateAndCrop override: %d\n", mOverrideRotateAndCropMode);
 }
 
+status_t CameraService::handleGetAutoframing(int out) {
+    Mutex::Autolock lock(mServiceLock);
+
+    return dprintf(out, "autoframing override: %d\n", mOverrideAutoframingMode);
+}
+
 status_t CameraService::handleSetImageDumpMask(const Vector<String16>& args) {
     char *endPtr;
     errno = 0;
@@ -4988,6 +5403,71 @@
     return OK;
 }
 
+status_t CameraService::handleSetStreamUseCaseOverrides(const Vector<String16>& args) {
+    std::vector<int64_t> useCasesOverride;
+    for (size_t i = 1; i < args.size(); i++) {
+        int64_t useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+        String8 arg8 = String8(args[i]);
+        if (arg8 == "DEFAULT") {
+            useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+        } else if (arg8 == "PREVIEW") {
+            useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW;
+        } else if (arg8 == "STILL_CAPTURE") {
+            useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE;
+        } else if (arg8 == "VIDEO_RECORD") {
+            useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD;
+        } else if (arg8 == "PREVIEW_VIDEO_STILL") {
+            useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL;
+        } else if (arg8 == "VIDEO_CALL") {
+            useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
+        } else if (arg8 == "CROPPED_RAW") {
+            useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
+        } else {
+            ALOGE("%s: Invalid stream use case %s", __FUNCTION__, arg8.c_str());
+            return BAD_VALUE;
+        }
+        useCasesOverride.push_back(useCase);
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+    mStreamUseCaseOverrides = std::move(useCasesOverride);
+
+    return OK;
+}
+
+void CameraService::handleClearStreamUseCaseOverrides() {
+    Mutex::Autolock lock(mServiceLock);
+    mStreamUseCaseOverrides.clear();
+}
+
+status_t CameraService::handleSetZoomOverride(const Vector<String16>& args) {
+    char* end;
+    int zoomOverrideValue = strtol(String8(args[1]), &end, /*base=*/10);
+    if ((*end != '\0') ||
+            (zoomOverrideValue != -1 &&
+             zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF &&
+             zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM)) {
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock lock(mServiceLock);
+    mZoomOverrideValue = zoomOverrideValue;
+
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                if (basicClient->supportsZoomOverride()) {
+                    basicClient->setZoomOverride(mZoomOverrideValue);
+                }
+            }
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraService::handleWatchCommand(const Vector<String16>& args, int inFd, int outFd) {
     if (args.size() >= 3 && args[1] == String16("start")) {
         return startWatchingTags(args, outFd);
@@ -5338,10 +5818,24 @@
         "  set-rotate-and-crop <ROTATION> overrides the rotate-and-crop value for AUTO backcompat\n"
         "      Valid values 0=0 deg, 1=90 deg, 2=180 deg, 3=270 deg, 4=No override\n"
         "  get-rotate-and-crop returns the current override rotate-and-crop value\n"
+        "  set-autoframing <VALUE> overrides the autoframing value for AUTO\n"
+        "      Valid values 0=false, 1=true, 2=auto\n"
+        "  get-autoframing returns the current override autoframing value\n"
         "  set-image-dump-mask <MASK> specifies the formats to be saved to disk\n"
         "      Valid values 0=OFF, 1=ON for JPEG\n"
         "  get-image-dump-mask returns the current image-dump-mask value\n"
         "  set-camera-mute <0/1> enable or disable camera muting\n"
+        "  set-stream-use-case-override <usecase1> <usecase2> ... override stream use cases\n"
+        "      Use cases applied in descending resolutions. So usecase1 is assigned to the\n"
+        "      largest resolution, usecase2 is assigned to the 2nd largest resolution, and so\n"
+        "      on. In case the number of usecases is smaller than the number of streams, the\n"
+        "      last use case is assigned to all the remaining streams. In case of multiple\n"
+        "      streams with the same resolution, the tie-breaker is (JPEG, RAW, YUV, and PRIV)\n"
+        "      Valid values are (case sensitive): DEFAULT, PREVIEW, STILL_CAPTURE, VIDEO_RECORD,\n"
+        "      PREVIEW_VIDEO_STILL, VIDEO_CALL, CROPPED_RAW\n"
+        "  clear-stream-use-case-override clear the stream use case override\n"
+        "  set-zoom-override <-1/0/1> enable or disable zoom override\n"
+        "      Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
         "  help print this message\n");
 }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index d96ea00..3214d4c 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -29,6 +29,8 @@
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
 #include <binder/BinderService.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
 #include <binder/IAppOpsCallback.h>
 #include <binder/IUidObserver.h>
 #include <hardware/camera.h>
@@ -48,6 +50,7 @@
 #include "utils/AutoConditionLock.h"
 #include "utils/ClientManager.h"
 #include "utils/IPCTransport.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 #include <set>
 #include <string>
@@ -70,7 +73,8 @@
     public BinderService<CameraService>,
     public virtual ::android::hardware::BnCameraService,
     public virtual IBinder::DeathRecipient,
-    public virtual CameraProviderManager::StatusListener
+    public virtual CameraProviderManager::StatusListener,
+    public virtual IServiceManager::LocalRegistrationCallback
 {
     friend class BinderService<CameraService>;
     friend class CameraOfflineSessionClient;
@@ -97,10 +101,19 @@
     // Event log ID
     static const int SN_EVENT_LOG_ID = 0x534e4554;
 
+    // Register camera service
+    static void instantiate();
+
     // Implementation of BinderService<T>
     static char const* getServiceName() { return "media.camera"; }
 
-                        CameraService();
+    // Implementation of IServiceManager::LocalRegistrationCallback
+    virtual void onServiceRegistration(const String16& name, const sp<IBinder>& binder) override;
+
+                        // Non-null arguments for cameraServiceProxyWrapper should be provided for
+                        // testing purposes only.
+                        CameraService(std::shared_ptr<CameraServiceProxyWrapper>
+                                cameraServiceProxyWrapper = nullptr);
     virtual             ~CameraService();
 
     /////////////////////////////////////////////////////////////////////
@@ -127,10 +140,10 @@
     // ICameraService
     virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
 
-    virtual binder::Status     getCameraInfo(int cameraId,
-            hardware::CameraInfo* cameraInfo);
+    virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
+            hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const String16& cameraId,
-            int targetSdkVersion, CameraMetadata* cameraInfo);
+            int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
             hardware::camera2::params::VendorTagDescriptor* desc);
@@ -141,13 +154,14 @@
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
             int32_t cameraId, const String16& clientPackageName,
             int32_t clientUid, int clientPid, int targetSdkVersion,
+            bool overrideToPortrait, bool forceSlowJpegMode,
             /*out*/
-            sp<hardware::ICamera>* device);
+            sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
             const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion,
+            int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -204,6 +218,9 @@
             /*out*/
             sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession);
 
+    virtual binder::Status reportExtensionSessionStats(
+            const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/);
+
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
                                    Parcel* reply, uint32_t flags);
@@ -219,6 +236,7 @@
 
     // Monitored UIDs availability notification
     void                notifyMonitoredUids();
+    void                notifyMonitoredUids(const std::unordered_set<uid_t> &notifyUidSet);
 
     // Stores current open session device info in temp file.
     void cacheDump();
@@ -243,8 +261,9 @@
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId, int* facing = nullptr,
-            int* orientation = nullptr);
+    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId,
+            bool overrideToPortrait, int* portraitRotation,
+            int* facing = nullptr, int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
     // Methods to be used in CameraService class tests only
@@ -282,6 +301,10 @@
             return mRemoteBinder;
         }
 
+        bool getOverrideToPortrait() const {
+            return mOverrideToPortrait;
+        }
+
         // Disallows dumping over binder interface
         virtual status_t dump(int fd, const Vector<String16>& args);
         // Internal dump method to be called by CameraService
@@ -333,12 +356,31 @@
         // Override rotate-and-crop AUTO behavior
         virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) = 0;
 
+        // Override autoframing AUTO behaviour
+        virtual status_t setAutoframingOverride(uint8_t autoframingValue) = 0;
+
         // Whether the client supports camera muting (black only output)
         virtual bool supportsCameraMute() = 0;
 
         // Set/reset camera mute
         virtual status_t setCameraMute(bool enabled) = 0;
 
+        // Set Camera service watchdog
+        virtual status_t setCameraServiceWatchdog(bool enabled) = 0;
+
+        // Set stream use case overrides
+        virtual void setStreamUseCaseOverrides(
+                const std::vector<int64_t>& useCaseOverrides) = 0;
+
+        // Clear stream use case overrides
+        virtual void clearStreamUseCaseOverrides() = 0;
+
+        // Whether the client supports camera zoom override
+        virtual bool supportsZoomOverride() = 0;
+
+        // Set/reset zoom override
+        virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
         // The injection camera session to replace the internal camera
         // session.
         virtual status_t injectCamera(const String8& injectedCamId,
@@ -358,7 +400,8 @@
                 int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
-                int servicePid);
+                int servicePid,
+                bool overrideToPortrait);
 
         virtual ~BasicClient();
 
@@ -381,6 +424,7 @@
         const pid_t                     mServicePid;
         bool                            mDisconnected;
         bool                            mUidIsTrusted;
+        bool                            mOverrideToPortrait;
 
         mutable Mutex                   mAudioRestrictionLock;
         int32_t                         mAudioRestriction;
@@ -470,7 +514,8 @@
                 int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
-                int servicePid);
+                int servicePid,
+                bool overrideToPortrait);
         ~Client();
 
         // return our camera client
@@ -563,6 +608,20 @@
 
 private:
 
+    // TODO: b/263304156 update this to make use of a death callback for more
+    // robust/fault tolerant logging
+    static const sp<IActivityManager>& getActivityManager() {
+        static const char* kActivityService = "activity";
+        static const auto activityManager = []() -> sp<IActivityManager> {
+            const sp<IServiceManager> sm(defaultServiceManager());
+            if (sm != nullptr) {
+                 return interface_cast<IActivityManager>(sm->checkService(String16(kActivityService)));
+            }
+            return nullptr;
+        }();
+        return activityManager;
+    }
+
     /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
@@ -688,7 +747,10 @@
 
     // Observer for UID lifecycle enforcing that UIDs in idle
     // state cannot use the camera to protect user privacy.
-    class UidPolicy : public BnUidObserver, public virtual IBinder::DeathRecipient {
+    class UidPolicy :
+        public BnUidObserver,
+        public virtual IBinder::DeathRecipient,
+        public virtual IServiceManager::LocalRegistrationCallback {
     public:
         explicit UidPolicy(sp<CameraService> service)
                 : mRegistered(false), mService(service) {}
@@ -705,23 +767,29 @@
         void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
                 int32_t capability) override;
-        void onUidProcAdjChanged(uid_t uid) override;
+        void onUidProcAdjChanged(uid_t uid, int adj) override;
 
         void addOverrideUid(uid_t uid, String16 callingPackage, bool active);
         void removeOverrideUid(uid_t uid, String16 callingPackage);
 
-        void registerMonitorUid(uid_t uid);
-        void unregisterMonitorUid(uid_t uid);
+        void registerMonitorUid(uid_t uid, bool openCamera);
+        void unregisterMonitorUid(uid_t uid, bool closeCamera);
 
+        // Implementation of IServiceManager::LocalRegistrationCallback
+        virtual void onServiceRegistration(const String16& name,
+                        const sp<IBinder>& binder) override;
         // IBinder::DeathRecipient implementation
         virtual void binderDied(const wp<IBinder> &who);
     private:
         bool isUidActiveLocked(uid_t uid, String16 callingPackage);
         int32_t getProcStateLocked(uid_t uid);
         void updateOverrideUid(uid_t uid, String16 callingPackage, bool active, bool insert);
+        void registerWithActivityManager();
 
         struct MonitoredUid {
             int32_t procState;
+            int32_t procAdj;
+            bool hasCamera;
             size_t refCount;
         };
 
@@ -733,12 +801,14 @@
         // Monitored uid map
         std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
         std::unordered_map<uid_t, bool> mOverrideUids;
+        sp<IBinder> mObserverToken;
     }; // class UidPolicy
 
     // If sensor privacy is enabled then all apps, including those that are active, should be
     // prevented from accessing the camera.
     class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
-            public virtual IBinder::DeathRecipient {
+            public virtual IBinder::DeathRecipient,
+            public virtual IServiceManager::LocalRegistrationCallback {
         public:
             explicit SensorPrivacyPolicy(wp<CameraService> service)
                     : mService(service), mSensorPrivacyEnabled(false), mRegistered(false) {}
@@ -752,6 +822,9 @@
             binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
                                                   bool enabled);
 
+            // Implementation of IServiceManager::LocalRegistrationCallback
+            virtual void onServiceRegistration(const String16& name,
+                                               const sp<IBinder>& binder) override;
             // IBinder::DeathRecipient implementation
             virtual void binderDied(const wp<IBinder> &who);
 
@@ -763,12 +836,15 @@
             bool mRegistered;
 
             bool hasCameraPrivacyFeature();
+            void registerWithSensorPrivacyManager();
     };
 
     sp<UidPolicy> mUidPolicy;
 
     sp<SensorPrivacyPolicy> mSensorPrivacyPolicy;
 
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
     // Delay-load the Camera HAL module
     virtual void onFirstRef();
 
@@ -825,12 +901,22 @@
     // sorted in alpha-numeric order.
     void filterAPI1SystemCameraLocked(const std::vector<std::string> &normalDeviceIds);
 
+    // In some cases the calling code has no access to the package it runs under.
+    // For example, NDK camera API.
+    // In this case we will get the packages for the calling UID and pick the first one
+    // for attributing the app op. This will work correctly for runtime permissions
+    // as for legacy apps we will toggle the app op for all packages in the UID.
+    // The caveat is that the operation may be attributed to the wrong package and
+    // stats based on app ops may be slightly off.
+    String16 getPackageNameFromUid(int clientUid);
+
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-            int api1CameraId, const String16& clientPackageName, bool systemNativeClient,
+            int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
+            bool overrideToPortrait, bool forceSlowJpegMode,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -1055,6 +1141,29 @@
                 return IInterface::asBinder(mListener)->linkToDeath(this);
             }
 
+            template<typename... args_t>
+            void handleBinderStatus(const binder::Status &ret, const char *logOnError,
+                    args_t... args) {
+                if (!ret.isOk() &&
+                        (ret.exceptionCode() != binder::Status::Exception::EX_TRANSACTION_FAILED
+                        || !mLastTransactFailed)) {
+                    ALOGE(logOnError, args...);
+                }
+
+                // If the transaction failed, the process may have died (or other things, see
+                // b/28321379). Mute consecutive errors from this listener to avoid log spam.
+                if (ret.exceptionCode() == binder::Status::Exception::EX_TRANSACTION_FAILED) {
+                    if (!mLastTransactFailed) {
+                        ALOGE("%s: Muting similar errors from listener %d:%d", __FUNCTION__,
+                                mListenerUid, mListenerPid);
+                    }
+                    mLastTransactFailed = true;
+                } else {
+                    // Reset mLastTransactFailed when binder becomes healthy again.
+                    mLastTransactFailed = false;
+                }
+            }
+
             virtual void binderDied(const wp<IBinder> &/*who*/) {
                 auto parent = mParent.promote();
                 if (parent.get() != nullptr) {
@@ -1075,6 +1184,9 @@
             int mListenerPid = -1;
             bool mIsVendorListener = false;
             bool mOpenCloseCallbackAllowed = false;
+
+            // Flag for preventing log spam when binder becomes unhealthy
+            bool mLastTransactFailed = false;
     };
 
     // Guarded by mStatusListenerMutex
@@ -1186,6 +1298,12 @@
     // Get the rotate-and-crop AUTO override behavior
     status_t handleGetRotateAndCrop(int out);
 
+    // Set the autoframing AUTO override behaviour.
+    status_t handleSetAutoframing(const Vector<String16>& args);
+
+    // Get the autoframing AUTO override behaviour
+    status_t handleGetAutoframing(int out);
+
     // Set the mask for image dump to disk
     status_t handleSetImageDumpMask(const Vector<String16>& args);
 
@@ -1195,9 +1313,21 @@
     // Set the camera mute state
     status_t handleSetCameraMute(const Vector<String16>& args);
 
+    // Set the stream use case overrides
+    status_t handleSetStreamUseCaseOverrides(const Vector<String16>& args);
+
+    // Clear the stream use case overrides
+    void handleClearStreamUseCaseOverrides();
+
+    // Set or clear the zoom override flag
+    status_t handleSetZoomOverride(const Vector<String16>& args);
+
     // Handle 'watch' command as passed through 'cmd'
     status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
 
+    // Set the camera service watchdog
+    status_t handleSetCameraServiceWatchdog(const Vector<String16>& args);
+
     // Enable tag monitoring of the given tags in provided clients
     status_t startWatchingTags(const Vector<String16> &args, int outFd);
 
@@ -1243,7 +1373,8 @@
             const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
-            bool overrideForPerfClass, /*out*/sp<BasicClient>* client);
+            bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
+            /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
 
@@ -1278,12 +1409,24 @@
     // Current override cmd rotate-and-crop mode; AUTO means no override
     uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
 
+    // Current autoframing mode
+    uint8_t mOverrideAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_AUTO;
+
     // Current image dump mask
     uint8_t mImageDumpMask = 0;
 
     // Current camera mute mode
     bool mOverrideCameraMuteMode = false;
 
+    // Camera Service watchdog flag
+    bool mCameraServiceWatchdogEnabled = true;
+
+    // Current stream use case overrides
+    std::vector<int64_t> mStreamUseCaseOverrides;
+
+    // Current zoom override value
+    int32_t mZoomOverrideValue = -1;
+
     /**
      * A listener class that implements the IBinder::DeathRecipient interface
      * for use to call back the error state injected by the external camera, and
@@ -1337,6 +1480,9 @@
     // Guard mInjectionInternalCamId and mInjectionInitPending.
     Mutex mInjectionParametersLock;
 
+    // Track the folded/unfoled device state. 0 == UNFOLDED, 4 == FOLDED
+    int64_t mDeviceState;
+
     void updateTorchUidMapLocked(const String16& cameraId, int uid);
 };
 
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index fcd6ebe..1c1bd24 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "CameraServiceWatchdog"
 
 #include "CameraServiceWatchdog.h"
+#include "android/set_abort_message.h"
+#include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
 
@@ -35,14 +37,21 @@
     {
         AutoMutex _l(mWatchdogLock);
 
-        for (auto it = tidToCycleCounterMap.begin(); it != tidToCycleCounterMap.end(); it++) {
+        for (auto it = mTidMap.begin(); it != mTidMap.end(); it++) {
             uint32_t currentThreadId = it->first;
 
-            tidToCycleCounterMap[currentThreadId]++;
+            mTidMap[currentThreadId].cycles++;
 
-            if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
-                ALOGW("CameraServiceWatchdog triggering kill for pid: %d", getpid());
-                kill(getpid(), SIGKILL);
+            if (mTidMap[currentThreadId].cycles >= mMaxCycles) {
+                std::string abortMessage = getAbortMessage(mTidMap[currentThreadId].functionName);
+                android_set_abort_message(abortMessage.c_str());
+                ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
+                        currentThreadId);
+                mCameraServiceProxyWrapper->logClose(mCameraId, 0 /*latencyMs*/,
+                        true /*deviceError*/);
+                // We use abort here so we can get a tombstone for better
+                // debugging.
+                abort();
             }
         }
     }
@@ -50,13 +59,19 @@
     return true;
 }
 
+std::string CameraServiceWatchdog::getAbortMessage(const std::string& functionName) {
+    std::string res = "CameraServiceWatchdog triggering abort during "
+            + functionName;
+    return res;
+}
+
 void CameraServiceWatchdog::requestExit()
 {
     Thread::requestExit();
 
     AutoMutex _l(mWatchdogLock);
 
-    tidToCycleCounterMap.clear();
+    mTidMap.clear();
 
     if (mPause) {
         mPause = false;
@@ -64,22 +79,36 @@
     }
 }
 
+void CameraServiceWatchdog::setEnabled(bool enable)
+{
+    AutoMutex _l(mEnabledLock);
+
+    if (enable) {
+        mEnabled = true;
+    } else {
+        mEnabled = false;
+    }
+}
+
 void CameraServiceWatchdog::stop(uint32_t tid)
 {
     AutoMutex _l(mWatchdogLock);
 
-    tidToCycleCounterMap.erase(tid);
+    mTidMap.erase(tid);
 
-    if (tidToCycleCounterMap.empty()) {
+    if (mTidMap.empty()) {
         mPause = true;
     }
 }
 
-void CameraServiceWatchdog::start(uint32_t tid)
+void CameraServiceWatchdog::start(uint32_t tid, const char* functionName)
 {
     AutoMutex _l(mWatchdogLock);
 
-    tidToCycleCounterMap[tid] = 0;
+    MonitoredFunction monitoredFunction = {};
+    monitoredFunction.cycles = 0;
+    monitoredFunction.functionName = functionName;
+    mTidMap[tid] = monitoredFunction;
 
     if (mPause) {
         mPause = false;
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index f4955e2..de6ac9e 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -21,23 +21,28 @@
  * expected duration has exceeded.
  * Notes on multi-threaded behaviors:
  *    - The threadloop is blocked/paused when there are no calls being
- *   monitored.
+ *   monitored (when the TID cycle to counter map is empty).
  *   - The start and stop functions handle simultaneous call monitoring
  *   and single call monitoring differently. See function documentation for
  *   more details.
+ * To disable/enable:
+ *   - adb shell cmd media.camera set-cameraservice-watchdog [0/1]
  */
-
+#pragma once
 #include <chrono>
 #include <thread>
 #include <time.h>
+#include <utils/String8.h>
 #include <utils/Thread.h>
 #include <utils/Log.h>
 #include <unordered_map>
 
+#include "utils/CameraServiceProxyWrapper.h"
+
 // Used to wrap the call of interest in start and stop calls
-#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid())
+#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__)
 #define WATCH_CUSTOM_TIMER(toMonitor, cycles, cycleLength) \
-        watchThread([&]() { return toMonitor;}, gettid(), cycles, cycleLength);
+        watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__, cycles, cycleLength);
 
 // Default cycles and cycle length values used to calculate permitted elapsed time
 const static size_t   kMaxCycles     = 100;
@@ -47,35 +52,62 @@
 
 class CameraServiceWatchdog : public Thread {
 
-public:
-    explicit CameraServiceWatchdog() : mPause(true), mMaxCycles(kMaxCycles),
-            mCycleLengthMs(kCycleLengthMs) {};
+struct MonitoredFunction {
+    uint32_t cycles;
+    std::string functionName;
+};
 
-    explicit CameraServiceWatchdog (size_t maxCycles, uint32_t cycleLengthMs) :
-            mPause(true), mMaxCycles(maxCycles), mCycleLengthMs(cycleLengthMs) {};
+public:
+    explicit CameraServiceWatchdog(const String8 &cameraId,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+                    mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
+                    mCycleLengthMs(kCycleLengthMs), mEnabled(true),
+                    mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
+
+    explicit CameraServiceWatchdog (const String8 &cameraId, size_t maxCycles,
+            uint32_t cycleLengthMs, bool enabled,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+                    mCameraId(cameraId), mPause(true), mMaxCycles(maxCycles),
+                    mCycleLengthMs(cycleLengthMs), mEnabled(enabled),
+                    mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
 
     virtual ~CameraServiceWatchdog() {};
 
     virtual void requestExit();
 
+    /** Enables/disables the watchdog */
+    void setEnabled(bool enable);
+
     /** Used to wrap monitored calls in start and stop functions using custom timer values */
     template<typename T>
-    auto watchThread(T func, uint32_t tid, uint32_t cycles, uint32_t cycleLength) {
-        auto res = NULL;
+    auto watchThread(T func, uint32_t tid, const char* functionName, uint32_t cycles,
+            uint32_t cycleLength) {
+        decltype(func()) res;
 
         if (cycles != mMaxCycles || cycleLength != mCycleLengthMs) {
             // Create another instance of the watchdog to prevent disruption
             // of timer for current monitored calls
-            sp<CameraServiceWatchdog> tempWatchdog =
-                    new CameraServiceWatchdog(cycles, cycleLength);
-            tempWatchdog->run("CameraServiceWatchdog");
-            res = tempWatchdog->watchThread(func, tid);
+
+            // Lock for mEnabled
+            mEnabledLock.lock();
+            sp<CameraServiceWatchdog> tempWatchdog = new CameraServiceWatchdog(
+                    mCameraId, cycles, cycleLength, mEnabled, mCameraServiceProxyWrapper);
+            mEnabledLock.unlock();
+
+            status_t status = tempWatchdog->run("CameraServiceWatchdog");
+            if (status != OK) {
+                ALOGE("Unable to watch thread: %s (%d)", strerror(-status), status);
+                res = watchThread(func, tid, functionName);
+                return res;
+            }
+
+            res = tempWatchdog->watchThread(func, tid, functionName);
             tempWatchdog->requestExit();
             tempWatchdog.clear();
         } else {
             // If custom timer values are equivalent to set class timer values, use
             // current thread
-            res = watchThread(func, tid);
+            res = watchThread(func, tid, functionName);
         }
 
         return res;
@@ -83,12 +115,17 @@
 
     /** Used to wrap monitored calls in start and stop functions using class timer values */
     template<typename T>
-    auto watchThread(T func, uint32_t tid) {
-        auto res = NULL;
+    auto watchThread(T func, uint32_t tid, const char* functionName) {
+        decltype(func()) res;
+        AutoMutex _l(mEnabledLock);
 
-        start(tid);
-        res = func();
-        stop(tid);
+        if (mEnabled) {
+            start(tid, functionName);
+            res = func();
+            stop(tid);
+        } else {
+            res = func();
+        }
 
         return res;
     }
@@ -99,7 +136,7 @@
      * Start adds a cycle counter for the calling thread. When threadloop is blocked/paused,
      * start() unblocks and starts the watchdog
      */
-    void start(uint32_t tid);
+    void start(uint32_t tid, const char* functionName);
 
     /**
      * If there are no calls left to be monitored, stop blocks/pauses threadloop
@@ -107,15 +144,24 @@
      */
     void stop(uint32_t tid);
 
+    std::string getAbortMessage(const std::string& functionName);
+
     virtual bool    threadLoop();
 
-    Mutex           mWatchdogLock;        // Lock for condition variable
-    Condition       mWatchdogCondition;   // Condition variable for stop/start
-    bool            mPause;               // True if thread is currently paused
-    uint32_t        mMaxCycles;           // Max cycles
-    uint32_t        mCycleLengthMs;       // Length of time elapsed per cycle
+    Mutex           mWatchdogLock;      // Lock for condition variable
+    Mutex           mEnabledLock;       // Lock for enabled status
+    Condition       mWatchdogCondition; // Condition variable for stop/start
+    String8         mCameraId;          // Camera Id the watchdog belongs to
+    bool            mPause;             // True if tid map is empty
+    uint32_t        mMaxCycles;         // Max cycles
+    uint32_t        mCycleLengthMs;     // Length of time elapsed per cycle
+    bool            mEnabled;           // True if watchdog is enabled
 
-    std::unordered_map<uint32_t, uint32_t> tidToCycleCounterMap; // Thread Id to cycle counter map
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
+    std::unordered_map<uint32_t, MonitoredFunction> mTidMap; // Thread Id to MonitoredFunction type
+                                                             // which retrieves the num of cycles
+                                                             // and name of the function
 };
 
 }   // namespace android
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
new file mode 100644
index 0000000..e648a36
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraDeviceCallbacks"
+
+#include <aidl/AidlCameraDeviceCallbacks.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <hidl/Utils.h>
+#include <utility>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCaptureResultExtras =
+        ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using SPhysicalCaptureResultInfo =
+        ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+// NDK classes
+using UCaptureResultExtras = ::android::hardware::camera2::impl::CaptureResultExtras;
+using UPhysicalCaptureResultInfo = ::android::hardware::camera2::impl::PhysicalCaptureResultInfo;
+
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+
+const char *AidlCameraDeviceCallbacks::kResultKey = "CaptureResult";
+
+
+bool AidlCameraDeviceCallbacks::initializeLooper(int vndkVersion) {
+    mCbLooper = new ALooper;
+    mCbLooper->setName("cs-looper");
+    status_t err = mCbLooper->start(/*runOnCallingThread*/ false, /*canCallJava*/ false,
+                                    PRIORITY_DEFAULT);
+    if (err !=OK) {
+        ALOGE("Unable to start camera device callback looper");
+        return false;
+    }
+    mHandler = new CallbackHandler(this, vndkVersion);
+    mCbLooper->registerHandler(mHandler);
+    return true;
+}
+
+AidlCameraDeviceCallbacks::AidlCameraDeviceCallbacks(
+        const std::shared_ptr<SICameraDeviceCallback>& base):
+      mBase(base), mDeathPipe(this, base->asBinder()) {}
+
+AidlCameraDeviceCallbacks::~AidlCameraDeviceCallbacks() {
+    if (mCbLooper != nullptr) {
+        if (mHandler != nullptr) {
+            mCbLooper->unregisterHandler(mHandler->id());
+        }
+        mCbLooper->stop();
+    }
+    mCbLooper.clear();
+    mHandler.clear();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onDeviceError(
+    int32_t errorCode, const CaptureResultExtras& resultExtras) {
+    using hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+    SCaptureResultExtras cre = convertToAidl(resultExtras);
+    auto ret = mBase->onDeviceError(convertToAidl(errorCode), cre);
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceError")
+    return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onDeviceIdle() {
+    auto ret = mBase->onDeviceIdle();
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceIdle")
+    return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onCaptureStarted(
+        const CaptureResultExtras& resultExtras, int64_t timestamp) {
+    using hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+    SCaptureResultExtras hCaptureResultExtras = convertToAidl(resultExtras);
+    auto ret = mBase->onCaptureStarted(hCaptureResultExtras, timestamp);
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onCaptureStarted")
+    return binder::Status::ok();
+}
+
+void AidlCameraDeviceCallbacks::convertResultMetadataToAidl(const camera_metadata_t* src,
+                                                            SCaptureMetadataInfo* dst) {
+    // First try writing to fmq.
+    size_t metadata_size = get_camera_metadata_size(src);
+    if ((metadata_size > 0) &&
+        (mCaptureResultMetadataQueue->availableToWrite() > 0)) {
+        if (mCaptureResultMetadataQueue->write((int8_t *)src, metadata_size)) {
+            dst->set<SCaptureMetadataInfo::fmqMetadataSize>(metadata_size);
+        } else {
+            ALOGW("%s Couldn't use fmq, falling back to hwbinder", __FUNCTION__);
+            SCameraMetadata metadata;
+            hardware::cameraservice::utils::conversion::aidl::cloneToAidl(src, &metadata);
+            dst->set<SCaptureMetadataInfo::metadata>(std::move(metadata));
+        }
+    }
+}
+
+void AidlCameraDeviceCallbacks::CallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
+    sp<RefBase> obj = nullptr;
+    sp<ResultWrapper> resultWrapper = nullptr;
+    bool found = false;
+    switch (msg->what()) {
+        case kWhatResultReceived:
+            found = msg->findObject(kResultKey, &obj);
+            if (!found || obj == nullptr) {
+                ALOGE("Cannot find result object in callback message");
+                return;
+            }
+            resultWrapper = static_cast<ResultWrapper*>(obj.get());
+            processResultMessage(resultWrapper);
+            break;
+        default:
+            ALOGE("Unknown callback sent");
+            break;
+    }
+    }
+
+void AidlCameraDeviceCallbacks::CallbackHandler::processResultMessage(
+    sp<ResultWrapper> &resultWrapper) {
+    sp<AidlCameraDeviceCallbacks> converter = mConverter.promote();
+    if (converter == nullptr) {
+        ALOGE("Callback wrapper has died, result callback cannot be made");
+        return;
+    }
+    CameraMetadataNative &result = resultWrapper->mResult;
+    auto resultExtras = resultWrapper->mResultExtras;
+    SCaptureResultExtras convResultExtras =
+            hardware::cameraservice::utils::conversion::aidl::convertToAidl(resultExtras);
+
+    // Convert Metadata into HCameraMetadata;
+    SCaptureMetadataInfo captureMetadataInfo;
+    if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
+        ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
+                __FUNCTION__);
+        return;
+    }
+    const camera_metadata_t *rawMetadata = result.getAndLock();
+    converter->convertResultMetadataToAidl(rawMetadata, &captureMetadataInfo);
+    result.unlock(rawMetadata);
+
+    auto &physicalCaptureResultInfos = resultWrapper->mPhysicalCaptureResultInfos;
+    std::vector<SPhysicalCaptureResultInfo> stableCaptureResInfo =
+            convertToAidl(physicalCaptureResultInfos, converter->mCaptureResultMetadataQueue);
+    auto ret = converter->mBase->onResultReceived(captureMetadataInfo,
+                                                  convResultExtras,
+                                                  stableCaptureResInfo);
+
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "OnResultReceived")
+}
+
+binder::Status AidlCameraDeviceCallbacks::onResultReceived(
+    const CameraMetadataNative& result,
+    const UCaptureResultExtras& resultExtras,
+    const ::std::vector<UPhysicalCaptureResultInfo>& physicalCaptureResultInfos) {
+    // Wrap CameraMetadata, resultExtras and physicalCaptureResultInfos in on
+    // sp<RefBase>-able structure and post it.
+    sp<ResultWrapper> resultWrapper = new ResultWrapper(const_cast<CameraMetadataNative &>(result),
+                                                        resultExtras, physicalCaptureResultInfos);
+    sp<AMessage> msg = new AMessage(kWhatResultReceived, mHandler);
+    msg->setObject(kResultKey, resultWrapper);
+    msg->post();
+    return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onPrepared(int32_t streamId) {
+    auto ret = mBase->onPrepared(streamId);
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onPrepared")
+    return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onRepeatingRequestError(
+    int64_t lastFrameNumber,
+    int32_t repeatingRequestId) {
+    auto ret =
+        mBase->onRepeatingRequestError(lastFrameNumber, repeatingRequestId);
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onRepeatingRequestError")
+    return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onRequestQueueEmpty() {
+    // not implemented
+    return binder::Status::ok();
+}
+
+status_t AidlCameraDeviceCallbacks::linkToDeath(const sp<DeathRecipient>& recipient,
+                                                void* cookie, uint32_t flags) {
+    return mDeathPipe.linkToDeath(recipient, cookie, flags);
+}
+status_t AidlCameraDeviceCallbacks::unlinkToDeath(const wp<DeathRecipient>& recipient,
+                                                  void* cookie,
+                                                  uint32_t flags,
+                                                  wp<DeathRecipient>* outRecipient) {
+    return mDeathPipe.unlinkToDeath(recipient, cookie, flags, outRecipient);
+}
+
+} // namespace android::frameworks::cameraservice::device::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
new file mode 100644
index 0000000..5cff5b3
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
+
+#include <CameraService.h>
+#include <aidl/DeathPipe.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceCallback.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <fmq/AidlMessageQueue.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <mutex>
+#include <thread>
+#include <utility>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using SICameraDeviceCallback =
+        ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+// NDK classes
+using UBnCameraDeviceCallbacks = ::android::hardware::camera2::BnCameraDeviceCallbacks;
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::frameworks::cameraservice::utils::DeathPipe;
+using ::android::hardware::camera2::impl::CameraMetadataNative;
+
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+class AidlCameraDeviceCallbacks : public UBnCameraDeviceCallbacks {
+  public:
+    explicit AidlCameraDeviceCallbacks(const std::shared_ptr<SICameraDeviceCallback>& base);
+
+    ~AidlCameraDeviceCallbacks() override;
+
+    bool initializeLooper(int vndkVersion);
+
+    binder::Status onDeviceError(int32_t errorCode,
+                                 const CaptureResultExtras& resultExtras) override;
+
+    binder::Status onDeviceIdle() override;
+
+    binder::Status onCaptureStarted(const CaptureResultExtras& resultExtras,
+                                    int64_t timestamp) override;
+
+    binder::Status onResultReceived(
+            const CameraMetadataNative& result, const CaptureResultExtras& resultExtras,
+            const std::vector<PhysicalCaptureResultInfo>& physicalCaptureResultInfos) override;
+
+    binder::Status onPrepared(int32_t streamId) override;
+
+    binder::Status onRepeatingRequestError(int64_t lastFrameNumber,
+                                           int32_t repeatingRequestId) override;
+
+    binder::Status onRequestQueueEmpty() override;
+
+    status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+                         uint32_t flags) override;
+    status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
+                           wp<DeathRecipient>* outRecipient) override;
+
+    void setCaptureResultMetadataQueue(std::shared_ptr<CaptureResultMetadataQueue> metadataQueue) {
+        mCaptureResultMetadataQueue = std::move(metadataQueue);
+    }
+
+ private:
+    // Wrapper struct so that parameters to onResultReceived callback may be
+    // sent through an AMessage.
+    struct ResultWrapper : public RefBase {
+        CameraMetadataNative mResult;
+        CaptureResultExtras mResultExtras;
+        std::vector<PhysicalCaptureResultInfo> mPhysicalCaptureResultInfos;
+
+        ResultWrapper(CameraMetadataNative &result,
+                      CaptureResultExtras  resultExtras,
+                      std::vector<PhysicalCaptureResultInfo> physicalCaptureResultInfos) :
+              // TODO: make this std::movable
+              mResult(result),
+              mResultExtras(std::move(resultExtras)),
+              mPhysicalCaptureResultInfos(std::move(physicalCaptureResultInfos)) { }
+    };
+
+    struct CallbackHandler : public AHandler {
+        public:
+            void onMessageReceived(const sp<AMessage> &msg) override;
+            CallbackHandler(AidlCameraDeviceCallbacks *converter, int vndkVersion) :
+                    mConverter(converter), mVndkVersion(vndkVersion) { }
+        private:
+            void processResultMessage(sp<ResultWrapper> &resultWrapper);
+            wp<AidlCameraDeviceCallbacks> mConverter = nullptr;
+            int mVndkVersion = -1;
+    };
+
+    void convertResultMetadataToAidl(const camera_metadata * src, SCaptureMetadataInfo * dst);
+    enum {
+        kWhatResultReceived,
+    };
+
+    static const char *kResultKey;
+
+  private:
+    std::shared_ptr<SICameraDeviceCallback> mBase;
+    std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
+    sp<CallbackHandler> mHandler = nullptr;
+    sp<ALooper> mCbLooper = nullptr;
+
+    // Pipes death subscription from current NDK interface to VNDK mBase.
+    // Should consume calls to linkToDeath and unlinkToDeath.
+    DeathPipe mDeathPipe;
+};
+
+} // namespace android::frameworks::cameraservice::device::implementation
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
new file mode 100644
index 0000000..402f8a2
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -0,0 +1,280 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraDeviceUser"
+
+#include "AidlCameraDeviceUser.h"
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <android-base/properties.h>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+// NDK classes
+using UOutputConfiguration = ::android::hardware::camera2::params::OutputConfiguration;
+using USessionConfiguration = ::android::hardware::camera2::params::SessionConfiguration;
+using UStatus = ::android::binder::Status;
+using USubmitInfo = ::android::hardware::camera2::utils::SubmitInfo;
+
+using ::android::CameraMetadata;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneFromAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertFromAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using ::ndk::ScopedAStatus;
+
+namespace {
+constexpr int32_t CAMERA_REQUEST_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
+constexpr int32_t CAMERA_RESULT_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
+
+inline ScopedAStatus fromSStatus(const SStatus& s) {
+    return s == SStatus::NO_ERROR ? ScopedAStatus::ok()
+                                  : ScopedAStatus::fromServiceSpecificError(
+                                            static_cast<int32_t>(s));
+}
+inline ScopedAStatus fromUStatus(const UStatus& status) {
+    return status.isOk() ? ScopedAStatus::ok() : fromSStatus(convertToAidl(status));
+}
+} // anonymous namespace
+
+AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
+      mDeviceRemote(deviceRemote) {
+    mInitSuccess = initDevice();
+    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+}
+
+bool AidlCameraDeviceUser::initDevice() {
+    // TODO: Get request and result metadata queue size from a system property.
+    int32_t reqFMQSize = CAMERA_REQUEST_METADATA_QUEUE_SIZE;
+
+    mCaptureRequestMetadataQueue =
+        std::make_unique<CaptureRequestMetadataQueue>(static_cast<size_t>(reqFMQSize),
+                                                      false /* non blocking */);
+    if (!mCaptureRequestMetadataQueue->isValid()) {
+        ALOGE("%s: invalid request fmq", __FUNCTION__);
+        return false;
+    }
+
+    int32_t resFMQSize = CAMERA_RESULT_METADATA_QUEUE_SIZE;
+    mCaptureResultMetadataQueue =
+        std::make_shared<CaptureResultMetadataQueue>(static_cast<size_t>(resFMQSize),
+                                                     false /* non blocking */);
+    if (!mCaptureResultMetadataQueue->isValid()) {
+        ALOGE("%s: invalid result fmq", __FUNCTION__);
+        return false;
+    }
+    return true;
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::getCaptureRequestMetadataQueue(
+        MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+    if (mInitSuccess) {
+        *_aidl_return = mCaptureRequestMetadataQueue->dupeDesc();
+    }
+    return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::getCaptureResultMetadataQueue(
+        MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+    if (mInitSuccess) {
+        *_aidl_return = mCaptureResultMetadataQueue->dupeDesc();
+    }
+    return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::prepare(int32_t in_streamId) {
+    UStatus ret = mDeviceRemote->prepare(in_streamId);
+    return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::submitRequestList(
+        const std::vector<SCaptureRequest>& in_requestList, bool in_isRepeating,
+        SSubmitInfo* _aidl_return) {
+    USubmitInfo submitInfo;
+    std::vector<UCaptureRequest> requests;
+    for (const auto& req: in_requestList) {
+        requests.emplace_back();
+        if (!convertRequestFromAidl(req, &requests.back())) {
+            ALOGE("%s: Failed to convert AIDL CaptureRequest.", __FUNCTION__);
+            return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+        }
+    }
+    UStatus ret = mDeviceRemote->submitRequestList(requests,
+                                                   in_isRepeating, &submitInfo);
+    if (!ret.isOk()) {
+        ALOGE("%s: Failed submitRequestList to cameraservice: %s",
+              __FUNCTION__, ret.toString8().string());
+        return fromUStatus(ret);
+    }
+    mRequestId = submitInfo.mRequestId;
+    convertToAidl(submitInfo, _aidl_return);
+    return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::cancelRepeatingRequest(int64_t* _aidl_return) {
+    UStatus ret = mDeviceRemote->cancelRequest(mRequestId, _aidl_return);
+    return fromUStatus(ret);
+}
+
+ScopedAStatus AidlCameraDeviceUser::beginConfigure() {
+    UStatus ret = mDeviceRemote->beginConfigure();
+    return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::endConfigure(SStreamConfigurationMode in_operatingMode,
+                                                      const SCameraMetadata& in_sessionParams,
+                                                      int64_t in_startTimeNs) {
+    CameraMetadata metadata;
+    if (!cloneFromAidl(in_sessionParams, &metadata)) {
+        return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+    }
+
+    std::vector<int32_t> offlineStreamIds;
+    UStatus ret = mDeviceRemote->endConfigure(convertFromAidl(in_operatingMode),
+                                              metadata, in_startTimeNs,
+                                              &offlineStreamIds);
+    return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::createStream(
+        const SOutputConfiguration& in_outputConfiguration, int32_t* _aidl_return) {
+    UOutputConfiguration outputConfig = convertFromAidl(in_outputConfiguration);
+    int32_t newStreamId;
+    UStatus ret = mDeviceRemote->createStream(outputConfig, &newStreamId);
+    if (!ret.isOk()) {
+        ALOGE("%s: Failed to create stream: %s", __FUNCTION__, ret.toString8().string());
+    }
+    *_aidl_return = newStreamId;
+    return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::createDefaultRequest(STemplateId in_templateId,
+                                                              SCameraMetadata* _aidl_return) {
+    CameraMetadata metadata;
+    UStatus ret = mDeviceRemote->createDefaultRequest(convertFromAidl(in_templateId),
+                                                      &metadata);
+    if (!ret.isOk()) {
+        ALOGE("%s: Failed to create default request: %s", __FUNCTION__, ret.toString8().string());
+        return fromUStatus(ret);
+    }
+
+    if (filterVndkKeys(mVndkVersion, metadata, /*isStatic*/false) != OK) {
+        ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+              __FUNCTION__, mVndkVersion);
+        return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+
+    const camera_metadata_t* rawMetadata = metadata.getAndLock();
+    cloneToAidl(rawMetadata, _aidl_return);
+    metadata.unlock(rawMetadata);
+    return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::waitUntilIdle() {
+    UStatus ret = mDeviceRemote->waitUntilIdle();
+    return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::flush(int64_t* _aidl_return) {
+    UStatus ret = mDeviceRemote->flush(_aidl_return);
+    return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::updateOutputConfiguration(
+        int32_t in_streamId, const SOutputConfiguration& in_outputConfiguration) {
+    UOutputConfiguration outputConfig = convertFromAidl(in_outputConfiguration);
+    UStatus ret = mDeviceRemote->updateOutputConfiguration(in_streamId, outputConfig);
+    if (!ret.isOk()) {
+        ALOGE("%s: Failed to update output config for stream id: %d: %s",
+              __FUNCTION__, in_streamId, ret.toString8().string());
+    }
+    return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::isSessionConfigurationSupported(
+        const SSessionConfiguration& in_sessionConfiguration, bool* _aidl_return) {
+    USessionConfiguration sessionConfig = convertFromAidl(in_sessionConfiguration);
+    UStatus ret = mDeviceRemote->isSessionConfigurationSupported(sessionConfig,
+                                                                 _aidl_return);
+    return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::deleteStream(int32_t in_streamId) {
+    UStatus ret = mDeviceRemote->deleteStream(in_streamId);
+    return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::disconnect() {
+    UStatus ret = mDeviceRemote->disconnect();
+    return fromUStatus(ret);
+}
+bool AidlCameraDeviceUser::convertRequestFromAidl(
+        const SCaptureRequest& src, UCaptureRequest* dst) {
+    dst->mIsReprocess = false;
+    for (const auto& streamAndWindowId : src.streamAndWindowIds) {
+        dst->mStreamIdxList.push_back(streamAndWindowId.streamId);
+        dst->mSurfaceIdxList.push_back(streamAndWindowId.windowId);
+    }
+
+    return copyPhysicalCameraSettings(src.physicalCameraSettings,
+                                      &(dst->mPhysicalCameraSettings));
+}
+bool AidlCameraDeviceUser::copyPhysicalCameraSettings(
+        const std::vector<SPhysicalCameraSettings>& src,
+        std::vector<UCaptureRequest::PhysicalCameraSettings>* dst) {
+    bool converted = false;
+    for (auto &e : src) {
+        dst->emplace_back();
+        CaptureRequest::PhysicalCameraSettings &physicalCameraSetting =
+            dst->back();
+        physicalCameraSetting.id = e.id;
+
+        // Read the settings either from the fmq or straightaway from the
+        // request. We don't need any synchronization, since submitRequestList
+        // is guaranteed to be called serially by the client if it decides to
+        // use fmq.
+        if (e.settings.getTag() == SCaptureMetadataInfo::fmqMetadataSize) {
+            /**
+             * Get settings from the fmq.
+             */
+            SCameraMetadata settingsFmq;
+            int64_t metadataSize = e.settings.get<SCaptureMetadataInfo::fmqMetadataSize>();
+            settingsFmq.metadata.resize(metadataSize);
+            int8_t* metadataPtr = (int8_t*) settingsFmq.metadata.data();
+            bool read = mCaptureRequestMetadataQueue->read(metadataPtr,
+                                                           metadataSize);
+            if (!read) {
+                ALOGE("%s capture request settings could't be read from fmq size", __FUNCTION__);
+                converted = false;
+            } else {
+                converted = cloneFromAidl(settingsFmq, &physicalCameraSetting.settings);
+            }
+        } else {
+            /**
+             * The settings metadata is contained in request settings field.
+             */
+            converted = cloneFromAidl(e.settings.get<SCaptureMetadataInfo::metadata>(),
+                    &physicalCameraSetting.settings);
+        }
+        if (!converted) {
+          ALOGE("%s: Unable to convert physicalCameraSettings from HIDL to AIDL.", __FUNCTION__);
+          return false;
+        }
+    }
+    return true;
+}
+
+} // namespace android::frameworks::cameraservice::device::implementation
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
new file mode 100644
index 0000000..8014951
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
+
+#include <CameraService.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceUser.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCameraSettings.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/hardware/common/fmq/MQDescriptor.h>
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include <fmq/AidlMessageQueue.h>
+#include <memory>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using CaptureRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+// Stable NDK classes
+using SBnCameraDeviceUser = ::aidl::android::frameworks::cameraservice::device::BnCameraDeviceUser;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCaptureRequest = ::aidl::android::frameworks::cameraservice::device::CaptureRequest;
+using SOutputConfiguration =
+        ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using SPhysicalCameraSettings =
+        ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
+using SSessionConfiguration =
+        ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+using SStreamConfigurationMode =
+        ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using SSubmitInfo = ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using STemplateId = ::aidl::android::frameworks::cameraservice::device::TemplateId;
+// Unstable NDK classes
+using UCaptureRequest= ::android::hardware::camera2::CaptureRequest;
+using UICameraDeviceUser = ::android::hardware::camera2::ICameraDeviceUser;
+
+static constexpr int32_t REQUEST_ID_NONE = -1;
+
+class AidlCameraDeviceUser final : public SBnCameraDeviceUser {
+  public:
+    explicit AidlCameraDeviceUser(const sp<UICameraDeviceUser> &deviceRemote);
+    ~AidlCameraDeviceUser() override = default;
+
+    ndk::ScopedAStatus beginConfigure() override;
+    ndk::ScopedAStatus cancelRepeatingRequest(int64_t* _aidl_return) override;
+    ndk::ScopedAStatus createDefaultRequest(STemplateId in_templateId,
+                                            SCameraMetadata* _aidl_return) override;
+    ndk::ScopedAStatus createStream(const SOutputConfiguration& in_outputConfiguration,
+                                    int32_t* _aidl_return) override;
+    ndk::ScopedAStatus deleteStream(int32_t in_streamId) override;
+    ndk::ScopedAStatus disconnect() override;
+    ndk::ScopedAStatus endConfigure(SStreamConfigurationMode in_operatingMode,
+                                    const SCameraMetadata& in_sessionParams,
+                                    int64_t in_startTimeNs) override;
+    ndk::ScopedAStatus flush(int64_t* _aidl_return) override;
+    ndk::ScopedAStatus getCaptureRequestMetadataQueue(
+            MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+    ndk::ScopedAStatus getCaptureResultMetadataQueue(
+            MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+    ndk::ScopedAStatus isSessionConfigurationSupported(
+            const SSessionConfiguration& in_sessionConfiguration, bool* _aidl_return) override;
+    ndk::ScopedAStatus prepare(int32_t in_streamId) override;
+    ndk::ScopedAStatus submitRequestList(const std::vector<SCaptureRequest>& in_requestList,
+                                         bool in_isRepeating, SSubmitInfo* _aidl_return) override;
+    ndk::ScopedAStatus updateOutputConfiguration(
+            int32_t in_streamId, const SOutputConfiguration& in_outputConfiguration) override;
+    ndk::ScopedAStatus waitUntilIdle() override;
+
+    [[nodiscard]] bool initStatus() const { return mInitSuccess; }
+
+    std::shared_ptr<CaptureResultMetadataQueue> getCaptureResultMetadataQueue() {
+        return mCaptureResultMetadataQueue;
+    }
+
+  private:
+    bool initDevice();
+
+    bool convertRequestFromAidl(const SCaptureRequest &src, UCaptureRequest *dst);
+    bool copyPhysicalCameraSettings(const std::vector<SPhysicalCameraSettings> &src,
+                                    std::vector<CaptureRequest::PhysicalCameraSettings> *dst);
+
+    const sp<UICameraDeviceUser> mDeviceRemote;
+    std::unique_ptr<CaptureRequestMetadataQueue> mCaptureRequestMetadataQueue = nullptr;
+    std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
+    bool mInitSuccess = false;
+    int32_t mRequestId = REQUEST_ID_NONE;
+    int mVndkVersion = -1;
+};
+
+} // namespace android::frameworks::cameraservice::device::implementation
+
+#endif  // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
new file mode 100644
index 0000000..4232a81
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -0,0 +1,328 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraService"
+
+#include "AidlCameraService.h"
+#include <aidl/AidlCameraDeviceCallbacks.h>
+#include <aidl/AidlCameraDeviceUser.h>
+#include <aidl/AidlCameraServiceListener.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/CameraMetadataType.h>
+#include <android-base/properties.h>
+#include <android/binder_ibinder.h>
+#include <android/binder_manager.h>
+#include <binder/Status.h>
+#include <hidl/HidlTransportSupport.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::frameworks::cameraservice::device::implementation::AidlCameraDeviceCallbacks;
+using ::android::frameworks::cameraservice::device::implementation::AidlCameraDeviceUser;
+using ::android::hardware::cameraservice::utils::conversion::aidl::areBindersEqual;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using ::ndk::ScopedAStatus;
+
+// VNDK classes
+using SCameraMetadataType = ::aidl::android::frameworks::cameraservice::common::CameraMetadataType;
+using SVendorTag = ::aidl::android::frameworks::cameraservice::common::VendorTag;
+using SVendorTagSection = ::aidl::android::frameworks::cameraservice::common::VendorTagSection;
+// NDK classes
+using UICameraService = ::android::hardware::ICameraService;
+using UStatus = ::android::binder::Status;
+
+namespace {
+inline ScopedAStatus fromSStatus(const SStatus& s) {
+    return s == SStatus::NO_ERROR ? ScopedAStatus::ok()
+                                  : ScopedAStatus::fromServiceSpecificError(
+                                            static_cast<int32_t>(s));
+}
+inline ScopedAStatus fromUStatus(const UStatus& s) {
+    return s.isOk() ? ScopedAStatus::ok() : fromSStatus(convertToAidl(s));
+}
+} // anonymous namespace
+
+std::shared_ptr<AidlCameraService> kCameraService;
+
+bool AidlCameraService::registerService(::android::CameraService* cameraService) {
+    kCameraService = SharedRefBase::make<AidlCameraService>(cameraService);
+    std::string serviceName = SBnCameraService::descriptor;
+    serviceName += "/default";
+    bool isDeclared = AServiceManager_isDeclared(serviceName.c_str());
+    if (!isDeclared) {
+        ALOGI("%s: AIDL vndk not declared.", __FUNCTION__);
+        return false;
+    }
+
+    binder_exception_t registered = AServiceManager_addService(
+            kCameraService->asBinder().get(), serviceName.c_str());
+    ALOGE_IF(registered != EX_NONE,
+             "%s: AIDL VNDK declared, but failed to register service: %d",
+             __FUNCTION__, registered);
+    return registered == EX_NONE;
+}
+
+AidlCameraService::AidlCameraService(::android::CameraService* cameraService):
+      mCameraService(cameraService) {
+    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+}
+ScopedAStatus AidlCameraService::getCameraCharacteristics(const std::string& in_cameraId,
+                                                          SCameraMetadata* _aidl_return) {
+    if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
+
+    ::android::CameraMetadata cameraMetadata;
+    UStatus ret = mCameraService->getCameraCharacteristics(String16(in_cameraId.c_str()),
+                                                           mVndkVersion,
+                                                           /* overrideToPortrait= */ false,
+                                                           &cameraMetadata);
+    if (!ret.isOk()) {
+        if (ret.exceptionCode() != EX_SERVICE_SPECIFIC) {
+            ALOGE("%s: Transaction error when getting camera characteristics"
+                  " from camera service: %d.",
+                  __FUNCTION__ , ret.exceptionCode());
+            return fromUStatus(ret);
+        }
+        switch (ret.serviceSpecificErrorCode()) {
+            case UICameraService::ERROR_ILLEGAL_ARGUMENT:
+                ALOGE("%s: Camera ID %s does not exist!", __FUNCTION__, in_cameraId.c_str());
+                return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+            default:
+                ALOGE("Get camera characteristics from camera service failed: %s",
+                      ret.toString8().string());
+                return fromUStatus(ret);
+        }
+    }
+
+    if (filterVndkKeys(mVndkVersion, cameraMetadata) != OK) {
+         ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+              __FUNCTION__, mVndkVersion);
+         return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+
+    const camera_metadata_t* rawMetadata = cameraMetadata.getAndLock();
+    cloneToAidl(rawMetadata, _aidl_return);
+    cameraMetadata.unlock(rawMetadata);
+
+    return ScopedAStatus::ok();
+}
+ndk::ScopedAStatus AidlCameraService::connectDevice(
+        const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+        const std::string& in_cameraId,
+        std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
+    // Here, we first get NDK ICameraDeviceUser from mCameraService, then save
+    // that interface in the newly created AidlCameraDeviceUser impl class.
+    if (mCameraService == nullptr) {
+        return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+    sp<hardware::camera2::ICameraDeviceUser> unstableDevice = nullptr;
+    // Create a hardware::camera2::ICameraDeviceCallback object which internally
+    // calls callback functions passed through hCallback.
+    sp<AidlCameraDeviceCallbacks> hybridCallbacks = new AidlCameraDeviceCallbacks(in_callback);
+    if (!hybridCallbacks->initializeLooper(mVndkVersion)) {
+        ALOGE("Unable to handle callbacks on device, cannot connect");
+        return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+    sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    binder::Status serviceRet = mCameraService->connectDevice(
+            callbacks,
+            String16(in_cameraId.c_str()),
+            String16(""),
+            /* clientFeatureId= */{},
+            hardware::ICameraService::USE_CALLING_UID,
+            /* scoreOffset= */ 0,
+            /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
+            /* overrideToPortrait= */ false,
+            &unstableDevice);
+    if (!serviceRet.isOk()) {
+        ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
+              serviceRet.toString8().c_str());
+        return fromUStatus(serviceRet);
+    }
+
+    // Now we create a AidlCameraDeviceUser class, store the unstableDevice in it,
+    // and return that back. All calls on that interface will be forwarded to
+    // the NDK AIDL interface.
+    std::shared_ptr<AidlCameraDeviceUser> stableDevice =
+            ndk::SharedRefBase::make<AidlCameraDeviceUser>(unstableDevice);
+    if (!stableDevice->initStatus()) {
+        ALOGE("%s: Unable to initialize camera device AIDL wrapper", __FUNCTION__);
+        return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+    hybridCallbacks->setCaptureResultMetadataQueue(
+            stableDevice->getCaptureResultMetadataQueue());
+    *_aidl_return = stableDevice;
+    return ScopedAStatus::ok();
+}
+void AidlCameraService::addToListenerCacheLocked(
+        std::shared_ptr<SICameraServiceListener> stableCsListener,
+        sp<UICameraServiceListener> csListener) {
+    mListeners.emplace_back(std::make_pair(stableCsListener, csListener));
+}
+sp<UICameraServiceListener> AidlCameraService::searchListenerCacheLocked(
+        const std::shared_ptr<SICameraServiceListener>& listener, bool removeIfFound) {
+    // Go through the mListeners list and compare the listener with the VNDK AIDL
+    // listener registered.
+    if (listener == nullptr) {
+        return nullptr;
+    }
+
+    auto it = mListeners.begin();
+    sp<UICameraServiceListener> csListener = nullptr;
+    for (;it != mListeners.end(); it++) {
+        if (areBindersEqual(listener->asBinder(), it->first->asBinder())) {
+            break;
+        }
+    }
+    if (it != mListeners.end()) {
+        csListener = it->second;
+        if (removeIfFound) {
+            mListeners.erase(it);
+        }
+    }
+    return csListener;
+}
+ndk::ScopedAStatus AidlCameraService::addListener(
+        const std::shared_ptr<SICameraServiceListener>& in_listener,
+        std::vector<SCameraStatusAndId>* _aidl_return) {
+    std::vector<hardware::CameraStatus> cameraStatusAndIds{};
+    SStatus status = addListenerInternal(
+            in_listener, &cameraStatusAndIds);
+    if (status != SStatus::NO_ERROR) {
+        return fromSStatus(status);
+    }
+
+    // Convert cameraStatusAndIds to VNDK AIDL
+    convertToAidl(cameraStatusAndIds, _aidl_return);
+    return ScopedAStatus::ok();
+}
+SStatus AidlCameraService::addListenerInternal(
+        const std::shared_ptr<SICameraServiceListener>& listener,
+        std::vector<hardware::CameraStatus>* cameraStatusAndIds) {
+    if (mCameraService == nullptr) {
+        return SStatus::UNKNOWN_ERROR;
+    }
+    if (listener == nullptr || cameraStatusAndIds == nullptr) {
+        ALOGE("%s listener and cameraStatusAndIds must not be NULL", __FUNCTION__);
+        return SStatus::ILLEGAL_ARGUMENT;
+    }
+    sp<UICameraServiceListener> csListener = nullptr;
+    // Check the cache for previously registered callbacks
+    {
+        Mutex::Autolock l(mListenerListLock);
+        csListener = searchListenerCacheLocked(listener);
+        if (csListener == nullptr) {
+            // Wrap a listener with AidlCameraServiceListener and pass it to
+            // CameraService.
+            csListener = sp<AidlCameraServiceListener>::make(listener);
+            // Add to cache
+            addToListenerCacheLocked(listener, csListener);
+        } else {
+            ALOGE("%s: Trying to add a listener %p already registered",
+                  __FUNCTION__, listener.get());
+            return SStatus::ILLEGAL_ARGUMENT;
+        }
+    }
+    binder::Status serviceRet =
+            mCameraService->addListenerHelper(csListener, cameraStatusAndIds, true);
+    if (!serviceRet.isOk()) {
+        ALOGE("%s: Unable to add camera device status listener", __FUNCTION__);
+        return convertToAidl(serviceRet);
+    }
+
+    cameraStatusAndIds->erase(std::remove_if(cameraStatusAndIds->begin(),
+                                             cameraStatusAndIds->end(),
+            [this](const hardware::CameraStatus& s) {
+                bool supportsHAL3 = false;
+                binder::Status sRet =
+                            mCameraService->supportsCameraApi(String16(s.cameraId),
+                                    UICameraService::API_VERSION_2, &supportsHAL3);
+                return !sRet.isOk() || !supportsHAL3;
+            }), cameraStatusAndIds->end());
+
+    return SStatus::NO_ERROR;
+}
+ndk::ScopedAStatus AidlCameraService::removeListener(
+        const std::shared_ptr<SICameraServiceListener>& in_listener) {
+    if (in_listener == nullptr) {
+        ALOGE("%s listener must not be NULL", __FUNCTION__);
+        return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+    }
+    sp<UICameraServiceListener> csListener = nullptr;
+    {
+        Mutex::Autolock l(mListenerListLock);
+        csListener = searchListenerCacheLocked(in_listener, /*removeIfFound*/true);
+    }
+    if (csListener != nullptr) {
+          mCameraService->removeListener(csListener);
+    } else {
+        ALOGE("%s Removing unregistered listener %p", __FUNCTION__, in_listener.get());
+        return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+    }
+    return ScopedAStatus::ok();
+}
+ndk::ScopedAStatus AidlCameraService::getCameraVendorTagSections(
+        std::vector<SProviderIdAndVendorTagSections>* _aidl_return) {
+    sp<VendorTagDescriptorCache> gCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
+    if (gCache == nullptr) {
+        return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+
+    const std::unordered_map<metadata_vendor_id_t, sp<android::VendorTagDescriptor>>
+            &vendorIdsAndTagDescs = gCache->getVendorIdsAndTagDescriptors();
+    if (vendorIdsAndTagDescs.empty()) {
+        return fromSStatus(SStatus::UNKNOWN_ERROR);
+    }
+
+    std::vector<SProviderIdAndVendorTagSections>& tagIdAndVendorTagSections = *_aidl_return;
+    tagIdAndVendorTagSections.resize(vendorIdsAndTagDescs.size());
+    size_t j = 0;
+    for (auto &vendorIdAndTagDescs : vendorIdsAndTagDescs) {
+        std::vector<SVendorTagSection> vendorTagSections;
+        sp<VendorTagDescriptor> desc = vendorIdAndTagDescs.second;
+        const SortedVector<String8>* sectionNames = desc->getAllSectionNames();
+        size_t numSections = sectionNames->size();
+        std::vector<std::vector<SVendorTag>> tagsBySection(numSections);
+        int tagCount = desc->getTagCount();
+        if (tagCount <= 0) {
+            continue;
+        }
+        std::vector<uint32_t> tags(tagCount);
+        desc->getTagArray(tags.data());
+        for (int i = 0; i < tagCount; i++) {
+            SVendorTag vt;
+            vt.tagId = tags[i];
+            vt.tagName = desc->getTagName(tags[i]);
+            vt.tagType = (SCameraMetadataType) desc->getTagType(tags[i]);
+            ssize_t sectionIdx = desc->getSectionIndex(tags[i]);
+            tagsBySection[sectionIdx].push_back(vt);
+        }
+        vendorTagSections.resize(numSections);
+        for (size_t s = 0; s < numSections; s++) {
+            vendorTagSections[s].sectionName = (*sectionNames)[s].string();
+            vendorTagSections[s].tags = tagsBySection[s];
+        }
+        SProviderIdAndVendorTagSections & prvdrIdAndVendorTagSection =
+                tagIdAndVendorTagSections[j];
+        prvdrIdAndVendorTagSection.providerId = vendorIdAndTagDescs.first;
+        prvdrIdAndVendorTagSection.vendorTagSections = std::move(vendorTagSections);
+        j++;
+    }
+    return ScopedAStatus::ok();
+}
+
+} // namespace android::frameworks::cameraservice::service::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.h b/services/camera/libcameraservice/aidl/AidlCameraService.h
new file mode 100644
index 0000000..4c67ac7
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
+
+#include <CameraService.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/service/BnCameraService.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+// VNDK classes
+using SBnCameraService = ::aidl::android::frameworks::cameraservice::service::BnCameraService;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SICameraDeviceCallback =
+        ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+using SICameraDeviceUser = ::aidl::android::frameworks::cameraservice::device::ICameraDeviceUser;
+using SICameraServiceListener =
+        ::aidl::android::frameworks::cameraservice::service::ICameraServiceListener;
+using SProviderIdAndVendorTagSections =
+        ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+// NDK classes
+using UICameraServiceListener = ::android::hardware::ICameraServiceListener;
+
+class AidlCameraService: public SBnCameraService {
+  public:
+    static bool registerService(::android::CameraService* cameraService);
+
+    explicit AidlCameraService(::android::CameraService* cameraService);
+    ~AidlCameraService() override = default;
+    ndk::ScopedAStatus getCameraCharacteristics(const std::string& in_cameraId,
+                                                SCameraMetadata* _aidl_return) override;
+
+    ndk::ScopedAStatus connectDevice(const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+                                     const std::string& in_cameraId,
+                                     std::shared_ptr<SICameraDeviceUser>* _aidl_return) override;
+
+    ndk::ScopedAStatus addListener(const std::shared_ptr<SICameraServiceListener>& in_listener,
+                                   std::vector<SCameraStatusAndId>* _aidl_return) override;
+
+    ndk::ScopedAStatus getCameraVendorTagSections(
+            std::vector<SProviderIdAndVendorTagSections>* _aidl_return) override;
+
+    ndk::ScopedAStatus removeListener(
+            const std::shared_ptr<SICameraServiceListener>& in_listener) override;
+
+  private:
+    void addToListenerCacheLocked(std::shared_ptr<SICameraServiceListener> stableCsListener,
+                                  sp<hardware::ICameraServiceListener> csListener);
+
+    sp<UICameraServiceListener> searchListenerCacheLocked(
+            const std::shared_ptr<SICameraServiceListener>& listener, bool removeIfFound = false);
+
+    SStatus addListenerInternal(const std::shared_ptr<SICameraServiceListener>& listener,
+                                std::vector<hardware::CameraStatus>* cameraStatusAndIds);
+
+
+    ::android::CameraService* mCameraService;
+
+    Mutex mListenerListLock;
+    std::list<std::pair<std::shared_ptr<SICameraServiceListener>,
+                        sp<UICameraServiceListener>>> mListeners;
+    int mVndkVersion = -1;
+
+};
+
+} // namespace android::frameworks::cameraservice::service::implementation
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
new file mode 100644
index 0000000..e183063
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <aidl/AidlCameraServiceListener.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertCameraStatusToAidl;
+// VNDK classes
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+
+binder::Status AidlCameraServiceListener::onStatusChanged(
+        int32_t status, const ::android::String16& cameraId) {
+    SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
+    std::string sCameraId = String8(cameraId).string();
+    auto ret = mBase->onStatusChanged(sStatus, sCameraId);
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onStatusChanged")
+    return binder::Status::ok();
+}
+
+binder::Status AidlCameraServiceListener::onPhysicalCameraStatusChanged(
+        int32_t status, const ::android::String16& cameraId,
+        const ::android::String16& physicalCameraId) {
+    SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
+    std::string sCameraId = String8(cameraId).string();
+    std::string sPhysicalCameraId = String8(physicalCameraId).string();
+
+    auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, sCameraId, sPhysicalCameraId);
+    LOG_STATUS_ERROR_IF_NOT_OK(ret, "onPhysicalCameraStatusChanged")
+    return binder::Status::ok();
+}
+
+::android::binder::Status AidlCameraServiceListener::onTorchStatusChanged(
+    int32_t, const ::android::String16&) {
+  // We don't implement onTorchStatusChanged
+  return binder::Status::ok();
+}
+
+::android::binder::Status AidlCameraServiceListener::onTorchStrengthLevelChanged(
+    const ::android::String16&, int32_t) {
+    // We don't implement onTorchStrengthLevelChanged
+    return binder::Status::ok();
+}
+status_t AidlCameraServiceListener::linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+                                                uint32_t flags) {
+    return mDeathPipe.linkToDeath(recipient, cookie, flags);
+}
+status_t AidlCameraServiceListener::unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie,
+                                                  uint32_t flags,
+                                                  wp<DeathRecipient>* outRecipient) {
+    return mDeathPipe.unlinkToDeath(recipient, cookie, flags, outRecipient);
+}
+
+} // namespace android::frameworks::cameraservice::service::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
new file mode 100644
index 0000000..906dd8e
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
+
+
+#include <aidl/DeathPipe.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/ICameraServiceListener.h>
+#include <android/hardware/BnCameraServiceListener.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::frameworks::cameraservice::utils::DeathPipe;
+
+// VNDK classes
+using SCameraDeviceStatus = ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using SICameraServiceListener =
+        ::aidl::android::frameworks::cameraservice::service::ICameraServiceListener;
+// NDK classes
+using UBnCameraServiceListener = ::android::hardware::BnCameraServiceListener;
+
+/**
+ * A simple shim to pass calls from CameraService to VNDK client.
+ */
+class AidlCameraServiceListener : public UBnCameraServiceListener {
+  public:
+    AidlCameraServiceListener(const std::shared_ptr<SICameraServiceListener>& base):
+          mBase(base), mDeathPipe(this, base->asBinder()) {}
+
+    ~AidlCameraServiceListener() = default;
+
+    ::android::binder::Status onStatusChanged(int32_t status,
+            const ::android::String16& cameraId) override;
+    ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
+            const ::android::String16& cameraId,
+            const ::android::String16& physicalCameraId) override;
+
+    ::android::binder::Status onTorchStatusChanged(
+            int32_t status, const ::android::String16& cameraId) override;
+    ::android::binder::Status onTorchStrengthLevelChanged(
+            const ::android::String16& cameraId, int32_t newStrengthLevel) override;
+    binder::Status onCameraAccessPrioritiesChanged() override {
+        // TODO: no implementation yet.
+        return binder::Status::ok();
+    }
+    binder::Status onCameraOpened(const ::android::String16& /*cameraId*/,
+            const ::android::String16& /*clientPackageId*/) override {
+        // empty implementation
+        return binder::Status::ok();
+    }
+    binder::Status onCameraClosed(const ::android::String16& /*cameraId*/) override {
+        // empty implementation
+        return binder::Status::ok();
+    }
+
+    status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+                         uint32_t flags) override;
+    status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
+                           wp<DeathRecipient>* outRecipient) override;
+
+  private:
+    std::shared_ptr<SICameraServiceListener> mBase;
+
+    // Pipes death subscription to current NDK AIDL interface to VNDK mBase.
+    // Should consume calls to linkToDeath and unlinkToDeath.
+    DeathPipe mDeathPipe;
+};
+
+} // android
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
new file mode 100644
index 0000000..1b8e53b
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -0,0 +1,305 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlUtils"
+
+#include <aidl/AidlUtils.h>
+#include <aidl/VndkVersionMetadataTags.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <device3/Camera3StreamInterface.h>
+#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <mediautils/AImageReaderUtils.h>
+
+namespace android::hardware::cameraservice::utils::conversion::aidl {
+
+using aimg::AImageReader_getHGBPFromHandle;
+using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
+
+// Note: existing data in dst will be gone. Caller still owns the memory of src
+void cloneToAidl(const camera_metadata_t* src, SCameraMetadata* dst) {
+    if (src == nullptr) {
+        ALOGW("%s:attempt to convert empty metadata to AIDL", __FUNCTION__);
+        return;
+    }
+    size_t size = get_camera_metadata_size(src);
+    uint8_t* startPtr = (uint8_t*)src;
+    uint8_t* endPtr = startPtr + size;
+    dst->metadata.assign(startPtr, endPtr);
+}
+
+// The camera metadata here is cloned. Since we're reading metadata over
+// the binder we would need to clone it in order to avoid alignment issues.
+bool cloneFromAidl(const SCameraMetadata &src, CameraMetadata *dst) {
+    const camera_metadata_t *buffer =
+            reinterpret_cast<const camera_metadata_t*>(src.metadata.data());
+    size_t expectedSize = src.metadata.size();
+    if (buffer != nullptr) {
+        int res = validate_camera_metadata_structure(buffer, &expectedSize);
+        if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
+            *dst = buffer;
+        } else {
+            ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+            return false;
+        }
+    }
+    return true;
+}
+
+int32_t convertFromAidl(SStreamConfigurationMode streamConfigurationMode) {
+    switch (streamConfigurationMode) {
+        case SStreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE:
+            return camera2::ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE;
+        case SStreamConfigurationMode::NORMAL_MODE:
+            return camera2::ICameraDeviceUser::NORMAL_MODE;
+        default:
+            // TODO: Fix this
+            return camera2::ICameraDeviceUser::VENDOR_MODE_START;
+    }
+}
+
+UOutputConfiguration convertFromAidl(const SOutputConfiguration &src) {
+    std::vector<sp<IGraphicBufferProducer>> iGBPs;
+    auto &windowHandles = src.windowHandles;
+    iGBPs.reserve(windowHandles.size());
+
+    for (auto &handle : windowHandles) {
+        native_handle_t* nh = makeFromAidl(handle);
+        iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(nh)));
+        native_handle_delete(nh);
+    }
+    String16 physicalCameraId16(src.physicalCameraId.c_str());
+    UOutputConfiguration outputConfiguration(
+        iGBPs, convertFromAidl(src.rotation), physicalCameraId16,
+        src.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
+        (windowHandles.size() > 1));
+    return outputConfiguration;
+}
+
+USessionConfiguration convertFromAidl(const SSessionConfiguration &src) {
+    USessionConfiguration sessionConfig(src.inputWidth, src.inputHeight,
+                                        src.inputFormat, static_cast<int>(src.operationMode));
+
+    for (const auto& os : src.outputStreams) {
+        UOutputConfiguration config = convertFromAidl(os);
+        sessionConfig.addOutputConfiguration(config);
+    }
+
+    return sessionConfig;
+}
+
+int convertFromAidl(SOutputConfiguration::Rotation rotation) {
+    switch(rotation) {
+        case SOutputConfiguration::Rotation::R270:
+            return android::camera3::CAMERA_STREAM_ROTATION_270;
+        case SOutputConfiguration::Rotation::R180:
+            return android::camera3::CAMERA_STREAM_ROTATION_180;
+        case SOutputConfiguration::Rotation::R90:
+            return android::camera3::CAMERA_STREAM_ROTATION_90;
+        case SOutputConfiguration::Rotation::R0:
+        default:
+            return android::camera3::CAMERA_STREAM_ROTATION_0;
+    }
+}
+
+int32_t convertFromAidl(STemplateId templateId) {
+    switch(templateId) {
+        case STemplateId::PREVIEW:
+            return camera2::ICameraDeviceUser::TEMPLATE_PREVIEW;
+        case STemplateId::STILL_CAPTURE:
+            return camera2::ICameraDeviceUser::TEMPLATE_STILL_CAPTURE;
+        case STemplateId::RECORD:
+            return camera2::ICameraDeviceUser::TEMPLATE_RECORD;
+        case STemplateId::VIDEO_SNAPSHOT:
+            return camera2::ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT;
+        case STemplateId::ZERO_SHUTTER_LAG:
+            return camera2::ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG;
+        case STemplateId::MANUAL:
+            return camera2::ICameraDeviceUser::TEMPLATE_MANUAL;
+    }
+}
+
+void convertToAidl(const camera2::utils::SubmitInfo& submitInfo, SSubmitInfo* hSubmitInfo) {
+    hSubmitInfo->requestId = submitInfo.mRequestId;
+    hSubmitInfo->lastFrameNumber = submitInfo.mLastFrameNumber;
+}
+
+
+SStatus convertToAidl(const binder::Status &status) {
+    if (status.isOk()) {
+        return SStatus::NO_ERROR;
+    }
+    if (status.exceptionCode() != EX_SERVICE_SPECIFIC) {
+        return SStatus::UNKNOWN_ERROR;
+    }
+
+    switch (status.serviceSpecificErrorCode()) {
+        case hardware::ICameraService::ERROR_DISCONNECTED:
+            return SStatus::DISCONNECTED;
+        case hardware::ICameraService::ERROR_CAMERA_IN_USE:
+            return SStatus::CAMERA_IN_USE;
+        case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
+            return SStatus::MAX_CAMERAS_IN_USE;
+        case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+            return SStatus::ILLEGAL_ARGUMENT;
+        case hardware::ICameraService::ERROR_DEPRECATED_HAL:
+            // Should not reach here since we filtered legacy HALs earlier
+            return SStatus::DEPRECATED_HAL;
+        case hardware::ICameraService::ERROR_DISABLED:
+            return SStatus::DISABLED;
+        case hardware::ICameraService::ERROR_PERMISSION_DENIED:
+            return SStatus::PERMISSION_DENIED;
+        case hardware::ICameraService::ERROR_INVALID_OPERATION:
+            return SStatus::INVALID_OPERATION;
+        default:
+            return SStatus::UNKNOWN_ERROR;
+    }
+}
+
+SCaptureResultExtras convertToAidl(const UCaptureResultExtras &src) {
+    SCaptureResultExtras dst;
+    dst.requestId = src.requestId;
+    dst.burstId = src.burstId;
+    dst.frameNumber = src.frameNumber;
+    dst.partialResultCount = src.partialResultCount;
+    dst.errorStreamId = src.errorStreamId;
+    dst.errorPhysicalCameraId = String8(src.errorPhysicalCameraId).string();
+    return dst;
+}
+
+SErrorCode convertToAidl(int32_t errorCode) {
+    switch(errorCode) {
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
+            return SErrorCode::CAMERA_DISCONNECTED;
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE :
+            return SErrorCode::CAMERA_DEVICE;
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
+            return SErrorCode::CAMERA_SERVICE;
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
+            return SErrorCode::CAMERA_REQUEST;
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+            return SErrorCode::CAMERA_RESULT;
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+            return SErrorCode::CAMERA_BUFFER;
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED:
+            return SErrorCode::CAMERA_DISABLED;
+        case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR:
+            return SErrorCode::CAMERA_INVALID_ERROR;
+        default:
+            return SErrorCode::CAMERA_UNKNOWN_ERROR;
+    }
+}
+
+std::vector<SPhysicalCaptureResultInfo> convertToAidl(
+        const std::vector<UPhysicalCaptureResultInfo>& src,
+        std::shared_ptr<CaptureResultMetadataQueue>& fmq) {
+    std::vector<SPhysicalCaptureResultInfo> dst;
+    dst.resize(src.size());
+    size_t i = 0;
+    for (auto &physicalCaptureResultInfo : src) {
+        dst[i++] = convertToAidl(physicalCaptureResultInfo, fmq);
+    }
+    return dst;
+}
+
+SPhysicalCaptureResultInfo convertToAidl(const UPhysicalCaptureResultInfo & src,
+                                         std::shared_ptr<CaptureResultMetadataQueue> & fmq) {
+    SPhysicalCaptureResultInfo dst;
+    dst.physicalCameraId = String8(src.mPhysicalCameraId).string();
+
+    const camera_metadata_t *rawMetadata = src.mPhysicalCameraMetadata.getAndLock();
+    // Try using fmq at first.
+    size_t metadata_size = get_camera_metadata_size(rawMetadata);
+    if ((metadata_size > 0) && (fmq->availableToWrite() > 0)) {
+        if (fmq->write((int8_t *)rawMetadata, metadata_size)) {
+            dst.physicalCameraMetadata.set<SCaptureMetadataInfo::fmqMetadataSize>(metadata_size);
+        } else {
+            ALOGW("%s Couldn't use fmq, falling back to hwbinder", __FUNCTION__);
+            SCameraMetadata metadata;
+            cloneToAidl(rawMetadata, &metadata);
+            dst.physicalCameraMetadata.set<SCaptureMetadataInfo::metadata>(std::move(metadata));
+        }
+    }
+    src.mPhysicalCameraMetadata.unlock(rawMetadata);
+    return dst;
+}
+
+void convertToAidl(const std::vector<hardware::CameraStatus> &src,
+                   std::vector<SCameraStatusAndId>* dst) {
+    dst->resize(src.size());
+    size_t i = 0;
+    for (const auto &statusAndId : src) {
+        auto &a = (*dst)[i++];
+        a.cameraId = statusAndId.cameraId.c_str();
+        a.deviceStatus = convertCameraStatusToAidl(statusAndId.status);
+        size_t numUnvailPhysicalCameras = statusAndId.unavailablePhysicalIds.size();
+        a.unavailPhysicalCameraIds.resize(numUnvailPhysicalCameras);
+        for (size_t j = 0; j < numUnvailPhysicalCameras; j++) {
+            a.unavailPhysicalCameraIds[j] = statusAndId.unavailablePhysicalIds[j].c_str();
+        }
+    }
+}
+
+SCameraDeviceStatus convertCameraStatusToAidl(int32_t src) {
+    SCameraDeviceStatus deviceStatus = SCameraDeviceStatus::STATUS_UNKNOWN;
+    switch(src) {
+        case hardware::ICameraServiceListener::STATUS_NOT_PRESENT:
+            deviceStatus = SCameraDeviceStatus::STATUS_NOT_PRESENT;
+            break;
+        case hardware::ICameraServiceListener::STATUS_PRESENT:
+            deviceStatus = SCameraDeviceStatus::STATUS_PRESENT;
+            break;
+        case hardware::ICameraServiceListener::STATUS_ENUMERATING:
+            deviceStatus = SCameraDeviceStatus::STATUS_ENUMERATING;
+            break;
+        case hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE:
+            deviceStatus = SCameraDeviceStatus::STATUS_NOT_AVAILABLE;
+            break;
+        default:
+            break;
+    }
+    return deviceStatus;
+}
+
+bool areBindersEqual(const ndk::SpAIBinder& b1, const ndk::SpAIBinder& b2) {
+    return !AIBinder_lt(b1.get(), b2.get()) && !AIBinder_lt(b2.get(), b1.get());
+}
+
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
+    if (vndkVersion == __ANDROID_API_FUTURE__) {
+        // VNDK version in ro.vndk.version is a version code-name that
+        // corresponds to the current version.
+        return OK;
+    }
+    const auto &apiLevelToKeys =
+            isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
+    // Find the vndk versions above the given vndk version. All the vndk
+    // versions above the given one, need to have their keys filtered from the
+    // metadata in order to avoid metadata invalidation.
+    auto it = apiLevelToKeys.upper_bound(vndkVersion);
+    while (it != apiLevelToKeys.end()) {
+        for (const auto &key : it->second) {
+            status_t res = metadata.erase(key);
+            if (res != OK) {
+                ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+                return res;
+            }
+        }
+        it++;
+    }
+    return OK;
+}
+
+} // namespace android::hardware::cameraservice::utils::conversion::aidl
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
new file mode 100644
index 0000000..c89d7ff
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
+
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureResultExtras.h>
+#include <aidl/android/frameworks/cameraservice/device/ErrorCode.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <android/hardware/ICameraService.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
+#include <camera/CameraMetadata.h>
+#include <fmq/AidlMessageQueue.h>
+#include <hardware/camera.h>
+
+namespace android::hardware::cameraservice::utils::conversion::aidl {
+
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::CameraMetadata;
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+// VNDK classes
+using SCameraDeviceStatus = ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SCaptureResultExtras =
+        ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using SErrorCode = ::aidl::android::frameworks::cameraservice::device::ErrorCode;
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using SOutputConfiguration =
+        ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using SPhysicalCaptureResultInfo =
+        ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using SSessionConfiguration =
+        ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+using SStreamConfigurationMode =
+        ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using SSubmitInfo = ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using STemplateId = ::aidl::android::frameworks::cameraservice::device::TemplateId;
+// NDK classes
+using UCaptureResultExtras = ::android::hardware::camera2::impl::CaptureResultExtras;
+using UOutputConfiguration = ::android::hardware::camera2::params::OutputConfiguration;
+using UPhysicalCaptureResultInfo = ::android::hardware::camera2::impl::PhysicalCaptureResultInfo;
+using USessionConfiguration = ::android::hardware::camera2::params::SessionConfiguration;
+
+// Common macro to log errors returned from stable AIDL calls
+#define LOG_STATUS_ERROR_IF_NOT_OK(status, callName)                                        \
+    if (!(status).isOk()) {                                                                 \
+        if ((status).getExceptionCode() == EX_SERVICE_SPECIFIC) {                           \
+            SStatus errStatus = static_cast<SStatus>((status).getServiceSpecificError());   \
+            ALOGE("%s: %s callback failed: %s", __FUNCTION__, callName,                     \
+                  toString(errStatus).c_str());                                             \
+        } else {                                                                            \
+            ALOGE("%s: Transaction failed during %s: %d", __FUNCTION__, callName,           \
+                  (status).getExceptionCode());                                             \
+        }                                                                                   \
+    }
+
+// Note: existing data in dst will be gone. Caller still owns the memory of src
+void cloneToAidl(const camera_metadata_t *src, SCameraMetadata* dst);
+
+bool cloneFromAidl(const SCameraMetadata &src, CameraMetadata *dst);
+
+int32_t convertFromAidl(SStreamConfigurationMode streamConfigurationMode);
+
+UOutputConfiguration convertFromAidl(const SOutputConfiguration &src);
+
+USessionConfiguration convertFromAidl(const SSessionConfiguration &src);
+
+int convertFromAidl(SOutputConfiguration::Rotation rotation);
+
+int32_t convertFromAidl(STemplateId templateId);
+
+void convertToAidl(const hardware::camera2::utils::SubmitInfo &submitInfo,
+                   SSubmitInfo *hSubmitInfo);
+
+SStatus convertToAidl(const binder::Status &status);
+
+SCaptureResultExtras convertToAidl(const UCaptureResultExtras &captureResultExtras);
+
+SErrorCode convertToAidl(int32_t errorCode);
+
+std::vector<SPhysicalCaptureResultInfo> convertToAidl(
+        const std::vector<UPhysicalCaptureResultInfo>& src,
+        std::shared_ptr<CaptureResultMetadataQueue>& fmq);
+
+SPhysicalCaptureResultInfo convertToAidl(const UPhysicalCaptureResultInfo& src,
+                                         std::shared_ptr<CaptureResultMetadataQueue>& fmq);
+
+void convertToAidl(const std::vector<hardware::CameraStatus> &src,
+                   std::vector<SCameraStatusAndId>* dst);
+
+SCameraDeviceStatus convertCameraStatusToAidl(int32_t src);
+
+bool areBindersEqual(const ndk::SpAIBinder& b1, const ndk::SpAIBinder& b2);
+
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
+
+} // namespace android::hardware::cameraservice::utils::conversion::aidl
+
+#endif  // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/DeathPipe.cpp b/services/camera/libcameraservice/aidl/DeathPipe.cpp
new file mode 100644
index 0000000..de46411
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/DeathPipe.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DeathPipe"
+
+#include "DeathPipe.h"
+
+namespace android::frameworks::cameraservice::utils {
+
+DeathPipe::DeathPipe(IBinder* parent, const ::ndk::SpAIBinder& binder):
+      mParent(parent), mAIBinder(binder) {
+    mDeathRecipient = ::ndk::ScopedAIBinder_DeathRecipient(
+            AIBinder_DeathRecipient_new(DeathPipe::onDeathCallback));
+    // Set an unlinked callback that allows Obituaries to be deallocated
+    AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(),
+                                          DeathPipe::onUnlinkedCallback);
+}
+
+status_t DeathPipe::linkToDeath(const sp<IBinder::DeathRecipient>& recipient,
+                                void* cookie, uint32_t flags) {
+    LOG_ALWAYS_FATAL_IF(recipient == nullptr, "%s: recipient must be non-nullptr", __FUNCTION__);
+    std::lock_guard<std::mutex> _l(mLock);
+
+    // Create and immortalize an obituary before linking it to death.
+    // The created Obituary can now only be garbage collected if it is unlinked from death
+    std::shared_ptr<Obituary> obituary = std::make_shared<Obituary>(recipient, cookie,
+                                                                    flags, /* who= */ mParent);
+    obituary->immortalize();
+
+    // Ensure that "cookie" is a pointer to an immortal obituary.
+    // AIBinder_linkToDeath calls DeathPipe::onUnlinkedCallback if linking to death fails, marking
+    // it for garbage collection
+    binder_status_t ret = AIBinder_linkToDeath(mAIBinder.get(),
+                                               mDeathRecipient.get(),
+                                               /* cookie= */ obituary.get());
+    if (ret != STATUS_OK) {
+        return DEAD_OBJECT;
+    }
+    mObituaries.emplace_back(obituary);
+    return NO_ERROR;
+}
+
+status_t DeathPipe::unlinkToDeath(const wp<IBinder::DeathRecipient>& recipient,
+                                  void* cookie, uint32_t flags,
+                                  wp<IBinder::DeathRecipient>* outRecipient) {
+    std::lock_guard<std::mutex> _l(mLock);
+    // Temporary Obituary for checking equality
+    std::shared_ptr<Obituary> inObituary = std::make_shared<Obituary>(recipient, cookie,
+                                                                      flags, mParent);
+    for (auto it = mObituaries.begin(); it != mObituaries.end(); it++) {
+        if ((*inObituary) == (**it)) {
+            if (outRecipient != nullptr) {
+                *outRecipient = (*it)->recipient;
+            }
+            // Unlink the found Obituary from death. AIBinder_unlinkToDeath calls
+            // DeathPipe::onUnlinkedCallback with the given cookie when unlinking is done
+            binder_status_t ret = AIBinder_unlinkToDeath(mAIBinder.get(),
+                                                         mDeathRecipient.get(),
+                                                         /* cookie= */ (*it).get());
+            mObituaries.erase(it);
+            return ret == STATUS_OK ? NO_ERROR : DEAD_OBJECT;
+        }
+    }
+    return NAME_NOT_FOUND;
+}
+
+DeathPipe::~DeathPipe() = default;
+
+
+void DeathPipe::onDeathCallback(void* cookie) {
+    // Cookie will always be a pointer to a valid immortal Obituary
+    Obituary* obituary = static_cast<Obituary*>(cookie);
+    obituary->onDeath();
+    // Don't call Obituary::clear() because VNDK Binder will call DeathPipe::onUnlinkedCallback()
+    // when it is ready
+}
+
+void DeathPipe::onUnlinkedCallback(void* cookie) {
+    // Cookie will always be a pointer to a valid immortal Obituary.
+    Obituary* obituary = static_cast<Obituary*>(cookie);
+    // Mark obituary to be garbage collected if needed. onDeathCallback won't be called with
+    // this particular cookie after this.
+    obituary->clear();
+}
+
+} // namespace android::frameworks::cameraservice::utils
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/DeathPipe.h b/services/camera/libcameraservice/aidl/DeathPipe.h
new file mode 100644
index 0000000..a816dd0
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/DeathPipe.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_ibinder.h>
+#include <binder/Parcel.h>
+#include <list>
+
+namespace android::frameworks::cameraservice::utils {
+
+/**
+ * This is a helper class to pipe death notifications from  VNDK {@code AIBinder} to
+ * S/NDK {@code IBinder}.
+ *
+ * To use this class, create a DeathPipe member object as a field of NDK interface
+ * implementation, and forward functions {@code BBinder::linkToDeath} and
+ * {@code BBinder::unlinkToDeath} to corresponding DeathPipe functions.
+ */
+class DeathPipe {
+  public:
+    /**
+     * @param parent the NDK Binder object. Assumed to live longer than the DeathPipe
+     *               object
+     * @param binder the VNDK Binder object which DeathPipe with subscribe to.
+     */
+    explicit DeathPipe(IBinder* parent, const ::ndk::SpAIBinder& binder);
+    ~DeathPipe();
+
+    status_t linkToDeath(const sp<IBinder::DeathRecipient>& recipient, void* cookie,
+                         uint32_t flags);
+    status_t unlinkToDeath(const wp<IBinder::DeathRecipient>& recipient,
+                           void* cookie, uint32_t flags, wp<IBinder::DeathRecipient>* outRecipient);
+
+    // Static functions that will be called by VNDK binder upon death or unlinking
+    static void onDeathCallback(void* cookie);
+    static void onUnlinkedCallback(void* cookie);
+
+  private:
+    /**
+     * {@code Obituary} is a tiny container that contains some metadata to pass VNDK binder's
+     * death notification to the NDK binder. A pointer to the Obituary is used as the
+     * {@code cookie} in VNDK binder's death notification.
+     *
+     * Theoretically, the VNDK binder might send out death notification after the DeathPipe
+     * object is destroyed, so care must be taken to ensure that Obituaries aren't accidentally
+     * destroyed before VNDK binder stops using its cookies.
+     *
+     */
+    struct Obituary: public std::enable_shared_from_this<Obituary> {
+        wp<IBinder::DeathRecipient> recipient; // NDK death recipient
+        void *cookie; // cookie sent by the NDK recipient
+        uint32_t flags; // flags sent by the NDK recipient
+        wp<IBinder> who; // NDK binder whose death 'recipient' subscribed to
+
+        // Self ptr to ensure we don't destroy this obituary while it can still be notified by the
+        // VNDK Binder. When populated with Obituary::immortalize, this Obituary won't be
+        // garbage collected until Obituary::clear is called.
+        std::shared_ptr<Obituary> mSelfPtr;
+
+        Obituary(const wp<IBinder::DeathRecipient>& recipient, void* cookie,
+                 uint32_t flags, IBinder* who) :
+              recipient(recipient), cookie(cookie), flags(flags),
+              who(who), mSelfPtr(nullptr) {}
+
+        // Function to be called when the VNDK Binder dies. Pipes the notification to the relevant
+        // NDK recipient if it still exists
+        void onDeath() const {
+                sp<IBinder::DeathRecipient> r = recipient.promote();
+                if (r == nullptr) { return; }
+                r->binderDied(who);
+        };
+
+        // Should be called before calling AIBinder_linkToDeath. Once this function returns this
+        // Obituary won't be garbage collected until Obituary::clear is called.
+        void immortalize() {
+            mSelfPtr = shared_from_this();
+        }
+
+        // Should be called when this Obituary can be garbage collected.
+        // Typically, after the Obituary is no longer linked to a VNDK DeathRecipient
+        void clear() {
+            mSelfPtr = nullptr;
+        }
+
+        bool operator==(const Obituary& rhs) const {
+            return recipient == rhs.recipient &&
+                   cookie == rhs.cookie &&
+                   flags == rhs.flags &&
+                   who == rhs.who;
+        }
+    };
+
+    // Parent to which the cameraservice wants to subscribe to for death notification
+    IBinder* mParent;
+
+    // VNDK Binder object to which the death notification will be bound to. If it dies,
+    // cameraservice will be notified as if mParent died.
+    ::ndk::SpAIBinder mAIBinder;
+
+    // Owning VNDK's deathRecipient ensures that all linked death notifications are cleaned up
+    // when this class destructs.
+    ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+    // Lock to protect access to fields below.
+    std::mutex mLock;
+    // List of all obituaries created by DeathPipe, used to unlink death subscription
+    std::list<std::shared_ptr<Obituary>> mObituaries;
+
+};
+
+} // namespace android::frameworks::cameraservice::utils
+
+#endif  // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
similarity index 79%
rename from services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
rename to services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index d3377f4..48c804d 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -21,7 +21,7 @@
  * ! Do not edit this file directly !
  *
  * Generated automatically from vndk_camera_metadata_tags.mako. To be included in libcameraservice
- * only by hidl/Utils.cpp.
+ * only by aidl/AidlUtils.cpp.
  */
 
 /**
@@ -31,47 +31,59 @@
 std::map<int, std::vector<camera_metadata_tag>> static_api_level_to_keys{
       {30, {
           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
+          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
           ANDROID_CONTROL_ZOOM_RATIO_RANGE,
           ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
-          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
         } },
       {31, {
-          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
+          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_SENSOR_INFO_BINNING_FACTOR,
+          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
         } },
       {32, {
           ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
         } },
       {33, {
-          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
           ANDROID_AUTOMOTIVE_LENS_FACING,
           ANDROID_AUTOMOTIVE_LOCATION,
+          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
           ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
           ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
-          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
-          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+          ANDROID_SENSOR_READOUT_TIMESTAMP,
+        } },
+      {34, {
+          ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+          ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS,
+          ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
         } },
 };
 
@@ -81,12 +93,21 @@
  */
 std::map<int, std::vector<camera_metadata_tag>> dynamic_api_level_to_keys{
       {30, {
+          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
           ANDROID_CONTROL_ZOOM_RATIO,
           ANDROID_SCALER_ROTATE_AND_CROP,
-          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
         }  },
       {31, {
           ANDROID_SENSOR_PIXEL_MODE,
           ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
         }  },
+      {34, {
+          ANDROID_CONTROL_AUTOFRAMING,
+          ANDROID_CONTROL_AUTOFRAMING_STATE,
+          ANDROID_CONTROL_SETTINGS_OVERRIDE,
+          ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+          ANDROID_EXTENSION_CURRENT_TYPE,
+          ANDROID_EXTENSION_STRENGTH,
+          ANDROID_SCALER_RAW_CROP_REGION,
+        }  },
 };
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 68dc7f8..8348cd9 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -52,6 +52,7 @@
 
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraDeviceId,
@@ -61,11 +62,14 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideForPerfClass):
-        Camera2ClientBase(cameraService, cameraClient, clientPackageName,
+        bool overrideForPerfClass,
+        bool overrideToPortrait,
+        bool forceSlowJpegMode):
+        Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideForPerfClass, /*legacyClient*/ true),
+                clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                /*legacyClient*/ true),
         mParameters(api1CameraId, cameraFacing)
 {
     ATRACE_CALL();
@@ -76,6 +80,7 @@
 
     SharedParameters::Lock l(mParameters);
     l.mParameters.state = Parameters::DISCONNECTED;
+    l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
 }
 
 status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
@@ -139,28 +144,53 @@
     mFrameProcessor = new FrameProcessor(mDevice, this);
     threadName = String8::format("C2-%d-FrameProc",
             mCameraId);
-    mFrameProcessor->run(threadName.string());
+    res = mFrameProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mCaptureSequencer = new CaptureSequencer(this);
     threadName = String8::format("C2-%d-CaptureSeq",
             mCameraId);
-    mCaptureSequencer->run(threadName.string());
+    res = mCaptureSequencer->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
     threadName = String8::format("C2-%d-JpegProc",
             mCameraId);
-    mJpegProcessor->run(threadName.string());
+    res = mJpegProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
 
     threadName = String8::format("C2-%d-ZslProc",
             mCameraId);
-    mZslProcessor->run(threadName.string());
+    res = mZslProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mCallbackProcessor = new CallbackProcessor(this);
     threadName = String8::format("C2-%d-CallbkProc",
             mCameraId);
-    mCallbackProcessor->run(threadName.string());
+    res = mCallbackProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start callback processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     if (gLogLevel >= 1) {
         SharedParameters::Lock l(mParameters);
@@ -473,12 +503,13 @@
 
     ALOGV("Camera %d: Disconnecting device", mCameraId);
 
+    bool hasDeviceError = mDevice->hasDeviceError();
     mDevice->disconnect();
 
     CameraService::Client::disconnect();
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
-    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+    mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
 
     return res;
 }
@@ -1330,21 +1361,18 @@
             || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
 }
 
-void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
-    (void)mem;
+void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
 
-void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
-    (void)handle;
+void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
 
 void Camera2Client::releaseRecordingFrameHandleBatch(
-        const std::vector<native_handle_t*>& handles) {
-    (void)handles;
+        [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
     ATRACE_CALL();
     ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
@@ -2316,6 +2344,10 @@
     return INVALID_OPERATION;
 }
 
+status_t Camera2Client::setCameraServiceWatchdog(bool enabled) {
+    return mDevice->setCameraServiceWatchdog(enabled);
+}
+
 status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop) {
     if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
 
@@ -2333,6 +2365,13 @@
         static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
 }
 
+status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
+    if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+    return mDevice->setAutoframingAutoBehavior(
+        static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
 bool Camera2Client::supportsCameraMute() {
     return mDevice->supportsCameraMute();
 }
@@ -2341,6 +2380,23 @@
     return mDevice->setCameraMute(enabled);
 }
 
+void Camera2Client::setStreamUseCaseOverrides(
+        const std::vector<int64_t>& useCaseOverrides) {
+    mDevice->setStreamUseCaseOverrides(useCaseOverrides);
+}
+
+void Camera2Client::clearStreamUseCaseOverrides() {
+    mDevice->clearStreamUseCaseOverrides();
+}
+
+bool Camera2Client::supportsZoomOverride() {
+    return mDevice->supportsZoomOverride();
+}
+
+status_t  Camera2Client::setZoomOverride(int zoomOverride) {
+    return mDevice->setZoomOverride(zoomOverride);
+}
+
 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
     int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
     if (activeRequestId != 0) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index c8dfc46..6bdb644 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -86,16 +86,27 @@
     virtual status_t        setAudioRestriction(int mode);
     virtual int32_t         getGlobalAudioRestriction();
     virtual status_t        setRotateAndCropOverride(uint8_t rotateAndCrop);
+    virtual status_t        setAutoframingOverride(uint8_t autoframingMode);
 
     virtual bool            supportsCameraMute();
     virtual status_t        setCameraMute(bool enabled);
 
+    virtual status_t        setCameraServiceWatchdog(bool enabled);
+
+    virtual void            setStreamUseCaseOverrides(
+                                    const std::vector<int64_t>& useCaseOverrides);
+    virtual void            clearStreamUseCaseOverrides();
+
+    virtual bool            supportsZoomOverride();
+    virtual status_t        setZoomOverride(int32_t zoomOverride);
+
     /**
      * Interface used by CameraService
      */
 
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
             const String16& clientPackageName,
             const std::optional<String16>& clientFeatureId,
             const String8& cameraDeviceId,
@@ -105,7 +116,9 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideForPerfClass);
+            bool overrideForPerfClass,
+            bool overrideToPortrait,
+            bool forceSlowJpegMode);
 
     virtual ~Camera2Client();
 
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 2daacd1..74423e5 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -59,6 +59,8 @@
         m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
         m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
         m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+
+        mLastFocalLength = l.mParameters.params.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
     }
 }
 
@@ -92,9 +94,32 @@
         client->notifyRequestId(mCurrentRequestId);
     }
 
+    processLensState(frame.mMetadata, client);
+
     return FrameProcessorBase::processSingleFrame(frame, device);
 }
 
+void FrameProcessor::processLensState(const CameraMetadata &frame,
+        const sp<Camera2Client> &client) {
+    ATRACE_CALL();
+    camera_metadata_ro_entry_t entry;
+
+    entry = frame.find(ANDROID_LENS_FOCAL_LENGTH);
+    if (entry.count == 0) {
+        return;
+    }
+
+    if (fabs(entry.data.f[0] - mLastFocalLength) > 0.001f) {
+        SharedParameters::Lock l(client->getParameters());
+        l.mParameters.params.setFloat(
+                CameraParameters::KEY_FOCAL_LENGTH,
+                entry.data.f[0]);
+        l.mParameters.paramsFlattened = l.mParameters.params.flatten();
+
+        mLastFocalLength = entry.data.f[0];
+    }
+}
+
 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
         const sp<Camera2Client> &client) {
     status_t res = BAD_VALUE;
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index bb985f6..6c8d221 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -57,6 +57,9 @@
     virtual bool processSingleFrame(CaptureResult &frame,
                                     const sp<FrameProducer> &device);
 
+    void processLensState(const CameraMetadata &frame,
+            const sp<Camera2Client> &client);
+
     status_t processFaceDetect(const CameraMetadata &frame,
             const sp<Camera2Client> &client);
 
@@ -110,6 +113,9 @@
     // Emit FaceDetection event to java if faces changed
     void callbackFaceDetection(const sp<Camera2Client>& client,
                                const camera_frame_metadata &metadata);
+
+    // Track most recent focal length sent by the camera device
+    float mLastFocalLength;
 };
 
 
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
deleted file mode 100644
index 01951a0..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Camera2-JpegCompressor"
-
-#include <utils/Log.h>
-#include <ui/GraphicBufferMapper.h>
-
-#include "JpegCompressor.h"
-
-namespace android {
-namespace camera2 {
-
-JpegCompressor::JpegCompressor():
-        Thread(false),
-        mIsBusy(false),
-        mCaptureTime(0) {
-}
-
-JpegCompressor::~JpegCompressor() {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock lock(mMutex);
-}
-
-status_t JpegCompressor::start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
-        nsecs_t captureTime) {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock busyLock(mBusyMutex);
-
-    if (mIsBusy) {
-        ALOGE("%s: Already processing a buffer!", __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
-    mIsBusy = true;
-
-    mBuffers = buffers;
-    mCaptureTime = captureTime;
-
-    status_t res;
-    res = run("JpegCompressor");
-    if (res != OK) {
-        ALOGE("%s: Unable to start up compression thread: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        //delete mBuffers;  // necessary?
-    }
-    return res;
-}
-
-status_t JpegCompressor::cancel() {
-    ALOGV("%s", __FUNCTION__);
-    requestExitAndWait();
-    return OK;
-}
-
-status_t JpegCompressor::readyToRun() {
-    ALOGV("%s", __FUNCTION__);
-    return OK;
-}
-
-bool JpegCompressor::threadLoop() {
-    ALOGV("%s", __FUNCTION__);
-
-    mAuxBuffer = mBuffers[0];    // input
-    mJpegBuffer = mBuffers[1];    // output
-
-    // Set up error management
-    mJpegErrorInfo = NULL;
-    JpegError error;
-    error.parent = this;
-
-    mCInfo.err = jpeg_std_error(&error);
-    mCInfo.err->error_exit = jpegErrorHandler;
-
-    jpeg_create_compress(&mCInfo);
-    if (checkError("Error initializing compression")) return false;
-
-    // Route compressed data straight to output stream buffer
-    JpegDestination jpegDestMgr;
-    jpegDestMgr.parent = this;
-    jpegDestMgr.init_destination = jpegInitDestination;
-    jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
-    jpegDestMgr.term_destination = jpegTermDestination;
-
-    mCInfo.dest = &jpegDestMgr;
-
-    // Set up compression parameters
-    mCInfo.image_width = mAuxBuffer->width;
-    mCInfo.image_height = mAuxBuffer->height;
-    mCInfo.input_components = 1; // 3;
-    mCInfo.in_color_space = JCS_GRAYSCALE; // JCS_RGB
-
-    ALOGV("%s: image_width = %d, image_height = %d", __FUNCTION__, mCInfo.image_width, mCInfo.image_height);
-
-    jpeg_set_defaults(&mCInfo);
-    if (checkError("Error configuring defaults")) return false;
-
-    // Do compression
-    jpeg_start_compress(&mCInfo, TRUE);
-    if (checkError("Error starting compression")) return false;
-
-    size_t rowStride = mAuxBuffer->stride;// * 3;
-    const size_t kChunkSize = 32;
-    while (mCInfo.next_scanline < mCInfo.image_height) {
-        JSAMPROW chunk[kChunkSize];
-        for (size_t i = 0 ; i < kChunkSize; i++) {
-            chunk[i] = (JSAMPROW)
-                    (mAuxBuffer->data + (i + mCInfo.next_scanline) * rowStride);
-        }
-        jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
-        if (checkError("Error while compressing")) return false;
-        if (exitPending()) {
-            ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
-            cleanUp();
-            return false;
-        }
-    }
-
-    jpeg_finish_compress(&mCInfo);
-    if (checkError("Error while finishing compression")) return false;
-
-    cleanUp();
-    return false;
-}
-
-bool JpegCompressor::isBusy() {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock busyLock(mBusyMutex);
-    return mIsBusy;
-}
-
-// old function -- TODO: update for new buffer type
-bool JpegCompressor::isStreamInUse(uint32_t /*id*/) {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock lock(mBusyMutex);
-
-    if (mBuffers.size() && mIsBusy) {
-        for (size_t i = 0; i < mBuffers.size(); i++) {
-//            if ( mBuffers[i].streamId == (int)id ) return true;
-        }
-    }
-    return false;
-}
-
-bool JpegCompressor::waitForDone(nsecs_t timeout) {
-    ALOGV("%s", __FUNCTION__);
-    Mutex::Autolock lock(mBusyMutex);
-    status_t res = OK;
-    if (mIsBusy) {
-        res = mDone.waitRelative(mBusyMutex, timeout);
-    }
-    return (res == OK);
-}
-
-bool JpegCompressor::checkError(const char *msg) {
-    ALOGV("%s", __FUNCTION__);
-    if (mJpegErrorInfo) {
-        char errBuffer[JMSG_LENGTH_MAX];
-        mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
-        ALOGE("%s: %s: %s",
-                __FUNCTION__, msg, errBuffer);
-        cleanUp();
-        mJpegErrorInfo = NULL;
-        return true;
-    }
-    return false;
-}
-
-void JpegCompressor::cleanUp() {
-    ALOGV("%s", __FUNCTION__);
-    jpeg_destroy_compress(&mCInfo);
-    Mutex::Autolock lock(mBusyMutex);
-    mIsBusy = false;
-    mDone.signal();
-}
-
-void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
-    ALOGV("%s", __FUNCTION__);
-    JpegError *error = static_cast<JpegError*>(cinfo->err);
-    error->parent->mJpegErrorInfo = cinfo;
-}
-
-void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
-    ALOGV("%s", __FUNCTION__);
-    JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
-    ALOGV("%s: Setting destination to %p, size %zu",
-            __FUNCTION__, dest->parent->mJpegBuffer->data, kMaxJpegSize);
-    dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer->data);
-    dest->free_in_buffer = kMaxJpegSize;
-}
-
-boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) {
-    ALOGV("%s", __FUNCTION__);
-    ALOGE("%s: JPEG destination buffer overflow!",
-            __FUNCTION__);
-    return true;
-}
-
-void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
-    (void) cinfo; // TODO: clean up
-    ALOGV("%s", __FUNCTION__);
-    ALOGV("%s: Done writing JPEG data. %zu bytes left in buffer",
-            __FUNCTION__, cinfo->dest->free_in_buffer);
-}
-
-}; // namespace camera2
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.h b/services/camera/libcameraservice/api1/client2/JpegCompressor.h
deleted file mode 100644
index 589a2fd..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/**
- * This class simulates a hardware JPEG compressor.  It receives image buffers
- * in RGBA_8888 format, processes them in a worker thread, and then pushes them
- * out to their destination stream.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-#define ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-
-#include "utils/Thread.h"
-#include "utils/Mutex.h"
-#include "utils/Timers.h"
-#include "utils/Vector.h"
-//#include "Base.h"
-#include <stdio.h>
-#include <gui/CpuConsumer.h>
-
-extern "C" {
-#include <jpeglib.h>
-}
-
-
-namespace android {
-namespace camera2 {
-
-class JpegCompressor: private Thread, public virtual RefBase {
-  public:
-
-    JpegCompressor();
-    ~JpegCompressor();
-
-    // Start compressing COMPRESSED format buffers; JpegCompressor takes
-    // ownership of the Buffers vector.
-    status_t start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
-            nsecs_t captureTime);
-
-    status_t cancel();
-
-    bool isBusy();
-    bool isStreamInUse(uint32_t id);
-
-    bool waitForDone(nsecs_t timeout);
-
-    // TODO: Measure this
-    static const size_t kMaxJpegSize = 300000;
-
-  private:
-    Mutex mBusyMutex;
-    Mutex mMutex;
-    bool mIsBusy;
-    Condition mDone;
-    nsecs_t mCaptureTime;
-
-    Vector<CpuConsumer::LockedBuffer*> mBuffers;
-    CpuConsumer::LockedBuffer *mJpegBuffer;
-    CpuConsumer::LockedBuffer *mAuxBuffer;
-
-    jpeg_compress_struct mCInfo;
-
-    struct JpegError : public jpeg_error_mgr {
-        JpegCompressor *parent;
-    };
-    j_common_ptr mJpegErrorInfo;
-
-    struct JpegDestination : public jpeg_destination_mgr {
-        JpegCompressor *parent;
-    };
-
-    static void jpegErrorHandler(j_common_ptr cinfo);
-
-    static void jpegInitDestination(j_compress_ptr cinfo);
-    static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
-    static void jpegTermDestination(j_compress_ptr cinfo);
-
-    bool checkError(const char *msg);
-    void cleanUp();
-
-    /**
-     * Inherited Thread virtual overrides
-     */
-  private:
-    virtual status_t readyToRun();
-    virtual bool threadLoop();
-};
-
-}; // namespace camera2
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 123cd75..13dcbaa 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -43,6 +43,7 @@
         int cameraFacing) :
         cameraId(cameraId),
         cameraFacing(cameraFacing),
+        isSlowJpegModeForced(false),
         info(NULL),
         mDefaultSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED) {
 }
@@ -953,6 +954,12 @@
                 false);
 
     if (availableVideoStabilizationModes.count > 1) {
+        for (size_t i = 0; i < availableVideoStabilizationModes.count; i++) {
+            if (availableVideoStabilizationModes.data.u8[i] ==
+                ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) {
+                videoStabilizationOnSupported = true;
+            }
+        }
         params.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED,
                 CameraParameters::TRUE);
     } else {
@@ -984,9 +991,8 @@
     Size maxJpegSize = getMaxSize(getAvailableJpegSizes());
     int64_t minFrameDurationNs = getJpegStreamMinFrameDurationNs(maxJpegSize);
 
-    slowJpegMode = false;
-    if (minFrameDurationNs > kSlowJpegModeThreshold) {
-        slowJpegMode = true;
+    slowJpegMode = isSlowJpegModeForced || minFrameDurationNs > kSlowJpegModeThreshold;
+    if (slowJpegMode) {
         // Slow jpeg devices does not support video snapshot without
         // slowing down preview.
         // TODO: support video size video snapshot only?
@@ -2083,7 +2089,7 @@
     paramsFlattened = newParams.flatten();
     params = newParams;
 
-    slowJpegMode = false;
+    slowJpegMode = isSlowJpegModeForced;
     Size pictureSize = { pictureWidth, pictureHeight };
     bool zslFrameRateSupported = false;
     int64_t jpegMinFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
@@ -2373,9 +2379,11 @@
             reqCropRegion, 4);
     if (res != OK) return res;
 
-    uint8_t reqVstabMode = videoStabilization ?
+    uint8_t reqVstabMode = videoStabilization ? videoStabilizationOnSupported ?
             ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON :
+                    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION :
             ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+
     res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
             &reqVstabMode, 1);
     if (res != OK) return res;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index cbe62a7..2bd3d43 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -147,6 +147,7 @@
 
     bool recordingHint;
     bool videoStabilization;
+    bool videoStabilizationOnSupported = false;
 
     CameraParameters2 params;
     String8 paramsFlattened;
@@ -182,6 +183,8 @@
     bool isDeviceZslSupported;
     // Whether the device supports geometric distortion correction
     bool isDistortionCorrectionSupported;
+    // Whether slowJpegMode is forced regardless of jpeg stream FPS
+    bool isSlowJpegModeForced;
 
     // Overall camera state
     enum State {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 5e91501..38c615d 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -31,12 +31,12 @@
 #include "device3/Camera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "api2/CameraDeviceClient.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 #include <camera_metadata_hidden.h>
 
 #include "DepthCompositeStream.h"
 #include "HeicCompositeStream.h"
+#include "JpegRCompositeStream.h"
 
 // Convenience methods for constructing binder::Status objects for error returns
 
@@ -61,12 +61,13 @@
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
-        int api1CameraId,
+        [[maybe_unused]] int api1CameraId,
         int cameraFacing,
         int sensorOrientation,
         int clientPid,
         uid_t clientUid,
-        int servicePid) :
+        int servicePid,
+        bool overrideToPortrait) :
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
             clientPackageName,
@@ -77,16 +78,16 @@
             sensorOrientation,
             clientPid,
             clientUid,
-            servicePid),
+            servicePid,
+            overrideToPortrait),
     mRemoteCallback(remoteCallback) {
-    // We don't need it for API2 clients, but Camera2ClientBase requires it.
-    (void) api1CameraId;
 }
 
 // Interface used by CameraService
 
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
@@ -96,10 +97,12 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideForPerfClass) :
-    Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
-                clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass),
+        bool overrideForPerfClass,
+        bool overrideToPortrait) :
+    Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+            systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+            sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
+            overrideToPortrait),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
@@ -127,7 +130,12 @@
     String8 threadName;
     mFrameProcessor = new FrameProcessorBase(mDevice);
     threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string());
-    mFrameProcessor->run(threadName.string());
+    res = mFrameProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
                                       camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -181,11 +189,11 @@
     // Cache physical camera ids corresponding to this device and also the high
     // resolution sensors in this device + physical camera ids
     mProviderManager->isLogicalCamera(mCameraIdStr.string(), &mPhysicalCameraIds);
-    if (isUltraHighResolutionSensor(mCameraIdStr)) {
+    if (supportsUltraHighResolutionCapture(mCameraIdStr)) {
         mHighResolutionSensors.insert(mCameraIdStr.string());
     }
     for (auto &physicalId : mPhysicalCameraIds) {
-        if (isUltraHighResolutionSensor(String8(physicalId.c_str()))) {
+        if (supportsUltraHighResolutionCapture(String8(physicalId.c_str()))) {
             mHighResolutionSensors.insert(physicalId.c_str());
         }
     }
@@ -692,7 +700,7 @@
 
         nsecs_t configureEnd = systemTime();
         int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
-        CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+        mCameraServiceProxyWrapper->logStreamConfigured(mCameraIdStr, operatingMode,
                 false /*internalReconfig*/, configureDurationMs);
     }
 
@@ -879,6 +887,8 @@
     int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int mirrorMode = outputConfiguration.getMirrorMode();
+    int32_t colorSpace = outputConfiguration.getColorSpace();
+    bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
 
     res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
@@ -923,7 +933,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                 isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode);
+                streamUseCase, timestampBase, mirrorMode, colorSpace);
 
         if (!res.isOk())
             return res;
@@ -944,20 +954,27 @@
     std::vector<int> surfaceIds;
     bool isDepthCompositeStream =
             camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
-    bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
-    if (isDepthCompositeStream || isHeicCompisiteStream) {
+    bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
+    bool isJpegRCompositeStream =
+        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
+        !mDevice->isCompositeJpegRDisabled();
+    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
         sp<CompositeStream> compositeStream;
         if (isDepthCompositeStream) {
             compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
-        } else {
+        } else if (isHeicCompositeStream) {
             compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+        } else {
+            compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
         }
 
         err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
                 streamInfo.height, streamInfo.format,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
-                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+                streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+                useReadoutTimestamp);
         if (err == OK) {
             Mutex::Autolock l(mCompositeLock);
             mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
@@ -970,7 +987,8 @@
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
                 outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
                 /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
-                streamInfo.timestampBase, streamInfo.mirrorMode);
+                streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
+                useReadoutTimestamp);
     }
 
     if (err != OK) {
@@ -1022,6 +1040,7 @@
     int width, height, format, surfaceType;
     uint64_t consumerUsage;
     android_dataspace dataSpace;
+    int32_t colorSpace;
     status_t err;
     binder::Status res;
 
@@ -1035,6 +1054,7 @@
     surfaceType = outputConfiguration.getSurfaceType();
     format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+    colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
     // Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
     consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
     if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1052,7 +1072,7 @@
             outputConfiguration.getSensorPixelModesUsed();
     if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
             sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
-            /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+            &overriddenSensorPixelModesUsed) != OK) {
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "sensor pixel modes used not valid for deferred stream");
     }
@@ -1067,7 +1087,8 @@
             outputConfiguration.isMultiResolution(), consumerUsage,
             outputConfiguration.getDynamicRangeProfile(),
             outputConfiguration.getStreamUseCase(),
-            outputConfiguration.getMirrorMode());
+            outputConfiguration.getMirrorMode(),
+            outputConfiguration.useReadoutTimestamp());
 
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -1084,7 +1105,8 @@
                         outputConfiguration.getDynamicRangeProfile(),
                         outputConfiguration.getStreamUseCase(),
                         outputConfiguration.getTimestampBase(),
-                        outputConfiguration.getMirrorMode()));
+                        outputConfiguration.getMirrorMode(),
+                        colorSpace));
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
@@ -1275,6 +1297,7 @@
     int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+    int32_t colorSpace = outputConfiguration.getColorSpace();
     int mirrorMode = outputConfiguration.getMirrorMode();
 
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1283,7 +1306,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                 /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode);
+                streamUseCase, timestampBase, mirrorMode, colorSpace);
         if (!res.isOk())
             return res;
 
@@ -1457,7 +1480,7 @@
 
 binder::Status CameraDeviceClient::prepare(int streamId) {
     ATRACE_CALL();
-    ALOGV("%s", __FUNCTION__);
+    ALOGV("%s stream id %d", __FUNCTION__, streamId);
 
     binder::Status res;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1497,7 +1520,7 @@
 
 binder::Status CameraDeviceClient::prepare2(int maxCount, int streamId) {
     ATRACE_CALL();
-    ALOGV("%s", __FUNCTION__);
+    ALOGV("%s stream id %d", __FUNCTION__, streamId);
 
     binder::Status res;
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1641,7 +1664,8 @@
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
-    int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+    int32_t colorSpace = outputConfiguration.getColorSpace();
+    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
     int timestampBase = outputConfiguration.getTimestampBase();
     int mirrorMode = outputConfiguration.getMirrorMode();
     for (auto& bufferProducer : bufferProducers) {
@@ -1657,7 +1681,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode);
+                streamUseCase, timestampBase, mirrorMode, colorSpace);
 
         if (!res.isOk())
             return res;
@@ -1730,6 +1754,10 @@
     return binder::Status::ok();
 }
 
+status_t CameraDeviceClient::setCameraServiceWatchdog(bool enabled) {
+    return mDevice->setCameraServiceWatchdog(enabled);
+}
+
 status_t CameraDeviceClient::setRotateAndCropOverride(uint8_t rotateAndCrop) {
     if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
 
@@ -1737,6 +1765,13 @@
         static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
 }
 
+status_t CameraDeviceClient::setAutoframingOverride(uint8_t autoframingValue) {
+    if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+    return mDevice->setAutoframingAutoBehavior(
+        static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
 bool CameraDeviceClient::supportsCameraMute() {
     return mDevice->supportsCameraMute();
 }
@@ -1745,6 +1780,23 @@
     return mDevice->setCameraMute(enabled);
 }
 
+void CameraDeviceClient::setStreamUseCaseOverrides(
+        const std::vector<int64_t>& useCaseOverrides) {
+    mDevice->setStreamUseCaseOverrides(useCaseOverrides);
+}
+
+void CameraDeviceClient::clearStreamUseCaseOverrides() {
+    mDevice->clearStreamUseCaseOverrides();
+}
+
+bool CameraDeviceClient::supportsZoomOverride() {
+    return mDevice->supportsZoomOverride();
+}
+
+status_t CameraDeviceClient::setZoomOverride(int32_t zoomOverride) {
+    return mDevice->setZoomOverride(zoomOverride);
+}
+
 binder::Status CameraDeviceClient::switchToOffline(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const std::vector<int>& offlineOutputIds,
@@ -1797,7 +1849,9 @@
         for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
             sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
             isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
-                camera3::HeicCompositeStream::isHeicCompositeStream(s);
+                camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+                (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
+                 !mDevice->isCompositeJpegRDisabled());
             if (isCompositeStream) {
                 auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
                 if (compositeIdx == NAME_NOT_FOUND) {
@@ -1847,6 +1901,10 @@
         mCompositeStreamMap.clear();
         mInputStream = {false, 0, 0, 0, 0};
     } else {
+        // In case we failed to register the offline client, ensure that it still initialized
+        // so that all failing requests can return back correctly once the object is released.
+        offlineClient->initialize(nullptr /*cameraProviderManager*/, String8()/*monitorTags*/);
+
         switch(ret) {
             case BAD_VALUE:
                 return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
@@ -1976,8 +2034,20 @@
     if (remoteCb != 0) {
         remoteCb->onDeviceIdle();
     }
+
+    std::vector<hardware::CameraStreamStats> fullStreamStats = streamStats;
+    {
+        Mutex::Autolock l(mCompositeLock);
+        for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+            hardware::CameraStreamStats compositeStats;
+            mCompositeStreamMap.valueAt(i)->getStreamStats(&compositeStats);
+            if (compositeStats.mWidth > 0) {
+                fullStreamStats.push_back(compositeStats);
+            }
+        }
+    }
     Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
-            streamStats, mUserTag, mVideoStabilizationMode);
+            fullStreamStats, mUserTag, mVideoStabilizationMode);
 }
 
 void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2000,6 +2070,7 @@
     // Thread safe. Don't bother locking.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
     if (remoteCb != 0) {
+        ALOGV("%s: stream id %d", __FUNCTION__, streamId);
         remoteCb->onPrepared(streamId);
     }
 }
@@ -2054,10 +2125,11 @@
         mCompositeStreamMap.clear();
     }
 
+    bool hasDeviceError = mDevice->hasDeviceError();
     Camera2ClientBase::detachDevice();
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
-    CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+    mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
 }
 
 /** Device-related methods */
@@ -2195,9 +2267,9 @@
     return mDevice->infoPhysical(cameraId);
 }
 
-bool CameraDeviceClient::isUltraHighResolutionSensor(const String8 &cameraId) {
+bool CameraDeviceClient::supportsUltraHighResolutionCapture(const String8 &cameraId) {
     const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
-    return SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+    return SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
 }
 
 bool CameraDeviceClient::isSensorPixelModeConsistent(
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c5aad6b..1533cf5 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -29,6 +29,7 @@
 #include "common/FrameProcessorBase.h"
 #include "common/Camera2ClientBase.h"
 #include "CompositeStream.h"
+#include "utils/CameraServiceProxyWrapper.h"
 #include "utils/SessionConfigurationUtils.h"
 
 using android::camera3::OutputStreamInfo;
@@ -58,7 +59,8 @@
             int sensorOrientation,
             int clientPid,
             uid_t clientUid,
-            int servicePid);
+            int servicePid,
+            bool overrideToPortrait);
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
 };
@@ -178,6 +180,7 @@
 
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+            std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
             const String16& clientPackageName,
             bool clientPackageOverride,
             const std::optional<String16>& clientFeatureId,
@@ -187,7 +190,8 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideForPerfClass);
+            bool overrideForPerfClass,
+            bool overrideToPortrait);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -195,9 +199,14 @@
 
     virtual status_t      setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
+    virtual status_t      setAutoframingOverride(uint8_t autoframingValue) override;
+
     virtual bool          supportsCameraMute();
     virtual status_t      setCameraMute(bool enabled);
 
+    virtual bool          supportsZoomOverride() override;
+    virtual status_t      setZoomOverride(int32_t zoomOverride) override;
+
     virtual status_t      dump(int fd, const Vector<String16>& args);
 
     virtual status_t      dumpClient(int fd, const Vector<String16>& args);
@@ -206,6 +215,11 @@
     virtual status_t      stopWatchingTags(int out);
     virtual status_t      dumpWatchedEventsToVector(std::vector<std::string> &out);
 
+    virtual status_t      setCameraServiceWatchdog(bool enabled);
+
+    virtual void          setStreamUseCaseOverrides(const std::vector<int64_t>& useCaseOverrides);
+    virtual void          clearStreamUseCaseOverrides() override;
+
     /**
      * Device listener interface
      */
@@ -231,7 +245,7 @@
     // Calculate the ANativeWindow transform from android.sensor.orientation
     status_t              getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
 
-    bool isUltraHighResolutionSensor(const String8 &cameraId);
+    bool supportsUltraHighResolutionCapture(const String8 &cameraId);
 
     bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
             const CameraMetadata &settings);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 9303fd2..86a0ebc 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -29,6 +29,11 @@
 status_t CameraOfflineSessionClient::initialize(sp<CameraProviderManager>, const String8&) {
     ATRACE_CALL();
 
+    if (mFrameProcessor.get() != nullptr) {
+        // Already initialized
+        return OK;
+    }
+
     // Verify ops permissions
     auto res = startCameraOps();
     if (res != OK) {
@@ -44,7 +49,12 @@
     String8 threadName;
     mFrameProcessor = new camera2::FrameProcessorBase(mOfflineSession);
     threadName = String8::format("Offline-%s-FrameProc", mCameraIdStr.string());
-    mFrameProcessor->run(threadName.string());
+    res = mFrameProcessor->run(threadName.string());
+    if (res != OK) {
+        ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
                                       camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -66,12 +76,20 @@
     return OK;
 }
 
+status_t CameraOfflineSessionClient::setCameraServiceWatchdog(bool) {
+    return OK;
+}
+
 status_t CameraOfflineSessionClient::setRotateAndCropOverride(uint8_t /*rotateAndCrop*/) {
     // Since we're not submitting more capture requests, changes to rotateAndCrop override
     // make no difference.
     return OK;
 }
 
+status_t CameraOfflineSessionClient::setAutoframingOverride(uint8_t) {
+    return OK;
+}
+
 bool CameraOfflineSessionClient::supportsCameraMute() {
     // Offline mode doesn't support muting
     return false;
@@ -81,6 +99,20 @@
     return INVALID_OPERATION;
 }
 
+void CameraOfflineSessionClient::setStreamUseCaseOverrides(
+        const std::vector<int64_t>& /*useCaseOverrides*/) {
+}
+
+void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
+}
+
+bool CameraOfflineSessionClient::supportsZoomOverride() {
+    return false;
+}
+
+status_t CameraOfflineSessionClient::setZoomOverride(int32_t /*zoomOverride*/) {
+    return INVALID_OPERATION;
+}
 
 status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
     return BasicClient::dump(fd, args);
@@ -232,7 +264,7 @@
     mOpsActive = true;
 
     // Transition device state to OPEN
-    sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
 
     return OK;
 }
@@ -256,7 +288,7 @@
     }
     mOpsCallback.clear();
 
-    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+    sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
 
     return OK;
 }
@@ -300,26 +332,20 @@
     finishCameraStreamingOps();
 }
 
-void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 }
 
-void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autoexposure state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
-void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoWhitebalance([[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
             triggerId);
 }
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index f2c42d8..ad763f9 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -56,7 +56,8 @@
                     IInterface::asBinder(remoteCallback),
                     // (v)ndk doesn't have offline session support
                     clientPackageName, /*overridePackageName*/false, clientFeatureId,
-                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
+                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
+                    /*overrideToPortrait*/false),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
@@ -81,9 +82,22 @@
 
     status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
+    status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
     bool supportsCameraMute() override;
     status_t setCameraMute(bool enabled) override;
 
+    status_t setCameraServiceWatchdog(bool enabled) override;
+
+    void setStreamUseCaseOverrides(
+            const std::vector<int64_t>& useCaseOverrides) override;
+
+    void clearStreamUseCaseOverrides() override;
+
+    bool supportsZoomOverride() override;
+
+    status_t setZoomOverride(int32_t zoomOverride) override;
+
     // permissions management
     status_t startCameraOps() override;
     status_t finishCameraOps() override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 4b840fc..4ed1c28 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -49,7 +49,8 @@
         camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> * surfaceIds,
-        int streamSetId, bool isShared, bool isMultiResolution) {
+        int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+        int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
     if (hasDeferredConsumer) {
         ALOGE("%s: Deferred consumers not supported in case of composite streams!",
                 __FUNCTION__);
@@ -75,7 +76,8 @@
     }
 
     return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
-            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared,
+            colorSpace, dynamicProfile, streamUseCase, useReadoutTimestamp);
 }
 
 status_t CompositeStream::deleteStream() {
@@ -85,6 +87,7 @@
         mCaptureResults.clear();
         mFrameNumberMap.clear();
         mErrorFrameNumbers.clear();
+        mRequestTimeMap.clear();
     }
 
     return deleteInternalStreams();
@@ -95,6 +98,8 @@
     Mutex::Autolock l(mMutex);
     if (!mErrorState && (streamId == getStreamId())) {
         mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+        auto ts = systemTime();
+        mRequestTimeMap.emplace(frameNumber, ts);
     }
 }
 
@@ -109,6 +114,11 @@
 void CompositeStream::eraseResult(int64_t frameNumber) {
     Mutex::Autolock l(mMutex);
 
+    auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+    if (requestTimeIt != mRequestTimeMap.end()) {
+        mRequestTimeMap.erase(requestTimeIt);
+    }
+
     auto it = mPendingCaptureResults.find(frameNumber);
     if (it == mPendingCaptureResults.end()) {
         return;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 600bd28..a551d11 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -46,7 +46,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared, bool isMultiResolution);
+            int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp);
 
     status_t deleteStream();
 
@@ -59,7 +60,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) = 0;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) = 0;
 
     // Release all internal streams and corresponding resources.
     virtual status_t deleteInternalStreams() = 0;
@@ -81,6 +83,9 @@
     // Notify when shutter notify is triggered
     virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
 
+    // Get composite stream stats
+    virtual void getStreamStats(hardware::CameraStreamStats* streamStats /*out*/) = 0;
+
     void onResultAvailable(const CaptureResult& result);
     bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
 
@@ -138,6 +143,9 @@
     // Keeps a set buffer/result frame numbers for any errors detected during processing.
     std::set<int64_t> mErrorFrameNumbers;
 
+    // Frame number to request time map
+    std::unordered_map<int64_t, nsecs_t> mRequestTimeMap;
+
 };
 
 }; //namespace camera3
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 048d85d..737c2b5 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -98,7 +98,7 @@
         }
 
         getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
-        if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+        if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(staticInfo)) {
             getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
         }
     }
@@ -581,7 +581,8 @@
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
-        int /*streamSetId*/, bool /*isShared*/) {
+        int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+        int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
     if (mSupportedDepthSizes.empty()) {
         ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
         return INVALID_OPERATION;
@@ -612,7 +613,14 @@
     mBlobSurface = new Surface(producer);
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
-            id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+            camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+            /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            OutputConfiguration::MIRROR_MODE_AUTO,
+            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            useReadoutTimestamp);
     if (ret == OK) {
         mBlobStreamId = *id;
         mBlobSurfaceId = (*surfaceIds)[0];
@@ -629,7 +637,14 @@
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
             kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
-            &depthSurfaceId);
+            &depthSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false,
+            /*isMultiResolution*/false, /*consumerUsage*/0,
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            OutputConfiguration::MIRROR_MODE_AUTO,
+            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            useReadoutTimestamp);
     if (ret == OK) {
         mDepthSurfaceId = depthSurfaceId[0];
     } else {
@@ -886,7 +901,7 @@
         return BAD_VALUE;
     }
 
-    if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+    if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(ch)) {
         getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
         if (depthSizesMaximumResolution.empty()) {
             ALOGE("%s: No depth stream configurations for maximum resolution present",
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index c1c75c1..fbe99dd 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -53,7 +53,8 @@
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) override;
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
     status_t deleteInternalStreams() override;
     status_t configureStream() override;
     status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
@@ -68,6 +69,9 @@
     static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
             const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
 
+    // Get composite stream stats
+    void getStreamStats(hardware::CameraStreamStats*) override {};
+
 protected:
 
     bool threadLoop() override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 54cc27a..694aff3 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -120,8 +120,8 @@
         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
-        int /*streamSetId*/, bool /*isShared*/) {
-
+        int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+        int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
     sp<CameraDeviceBase> device = mDevice.promote();
     if (!device.get()) {
         ALOGE("%s: Invalid camera device!", __FUNCTION__);
@@ -147,7 +147,14 @@
 
     res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
             kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
-            sensorPixelModesUsed,surfaceIds);
+            sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+            /*isShared*/false, /*isMultiResolution*/false,
+            /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            OutputConfiguration::MIRROR_MODE_AUTO,
+            colorSpace,
+            useReadoutTimestamp);
     if (res == OK) {
         mAppSegmentSurfaceId = (*surfaceIds)[0];
     } else {
@@ -183,7 +190,14 @@
     int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
-            rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
+            rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+            camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+            /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            OutputConfiguration::MIRROR_MODE_AUTO,
+            colorSpace,
+            useReadoutTimestamp);
     if (res == OK) {
         mMainImageSurfaceId = sourceSurfaceId[0];
         mMainImageStreamId = *id;
@@ -931,7 +945,7 @@
                 tempOutputFile.str().c_str(), errno);
         return NO_INIT;
     }
-    inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
+    inputFrame.muxer = MediaMuxer::create(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
     if (inputFrame.muxer == nullptr) {
         ALOGE("%s: Failed to create MediaMuxer for file fd %d",
                 __FUNCTION__, inputFrame.fileFd);
@@ -1161,11 +1175,13 @@
     inputFrame.fileFd = -1;
 
     // Fill in HEIC header
-    uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
-    CameraBlob *blobHeader = (CameraBlob *)header;
     // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
-    blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
-    blobHeader->blobSizeBytes = fSize;
+    uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
+    CameraBlob blobHeader = {
+        .blobId = static_cast<CameraBlobId>(0x00FE),
+        .blobSizeBytes = static_cast<int32_t>(fSize)
+    };
+    memcpy(header, &blobHeader, sizeof(CameraBlob));
 
     res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
     if (res != OK) {
@@ -1599,7 +1615,7 @@
     return OK;
 }
 
-void HeicCompositeStream::initCopyRowFunction(int32_t width)
+void HeicCompositeStream::initCopyRowFunction([[maybe_unused]] int32_t width)
 {
     using namespace libyuv;
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 1077a1f..602a247 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -47,8 +47,8 @@
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
-            std::vector<int> *surfaceIds,
-            int streamSetId, bool isShared) override;
+            std::vector<int> *surfaceIds, int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
 
     status_t deleteInternalStreams() override;
 
@@ -75,6 +75,9 @@
     static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
             const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
 
+    // Get composite stream stats
+    void getStreamStats(hardware::CameraStreamStats*) override {};
+
     static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
             bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
     static bool isInMemoryTempFileSupported();
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
new file mode 100644
index 0000000..6588470
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -0,0 +1,879 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "hardware/gralloc.h"
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
+#define LOG_TAG "Camera3-JpegRCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
+#include "common/CameraProviderManager.h"
+#include <gui/Surface.h>
+#include <ultrahdr/jpegr.h>
+#include <utils/ExifUtils.h>
+#include <utils/Log.h>
+#include "utils/SessionConfigurationUtils.h"
+#include <utils/Trace.h>
+
+#include "JpegRCompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
+JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
+        wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+        CompositeStream(device, cb),
+        mBlobStreamId(-1),
+        mBlobSurfaceId(-1),
+        mP010StreamId(-1),
+        mP010SurfaceId(-1),
+        mBlobWidth(0),
+        mBlobHeight(0),
+        mP010BufferAcquired(false),
+        mBlobBufferAcquired(false),
+        mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+        mOutputStreamUseCase(0),
+        mFirstRequestLatency(-1),
+        mProducerListener(new ProducerListener()),
+        mMaxJpegBufferSize(-1),
+        mUHRMaxJpegBufferSize(-1),
+        mStaticInfo(device->info()) {
+    auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
+    if (entry.count > 0) {
+        mMaxJpegBufferSize = entry.data.i32[0];
+    } else {
+        ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
+    }
+
+    mUHRMaxJpegSize =
+            SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+                    /*ultraHighResolution*/true);
+    mDefaultMaxJpegSize =
+            SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+                    /*isUltraHighResolution*/false);
+
+    mUHRMaxJpegBufferSize =
+        SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+                mMaxJpegBufferSize);
+}
+
+JpegRCompositeStream::~JpegRCompositeStream() {
+    mBlobConsumer.clear(),
+    mBlobSurface.clear(),
+    mBlobStreamId = -1;
+    mBlobSurfaceId = -1;
+    mP010Consumer.clear();
+    mP010Surface.clear();
+    mP010Consumer = nullptr;
+    mP010Surface = nullptr;
+}
+
+void JpegRCompositeStream::compilePendingInputLocked() {
+    CpuConsumer::LockedBuffer imgBuffer;
+
+    while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
+        auto it = mInputJpegBuffers.begin();
+        auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
+        if (res == NOT_ENOUGH_DATA) {
+            // Can not lock any more buffers.
+            break;
+        } else if (res != OK) {
+            ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+            mPendingInputFrames[*it].error = true;
+            mInputJpegBuffers.erase(it);
+            continue;
+        }
+
+        if (*it != imgBuffer.timestamp) {
+            ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
+                    "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+        }
+
+        if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+                (mPendingInputFrames[imgBuffer.timestamp].error)) {
+            mBlobConsumer->unlockBuffer(imgBuffer);
+        } else {
+            mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
+            mBlobBufferAcquired = true;
+        }
+        mInputJpegBuffers.erase(it);
+    }
+
+    while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
+        auto it = mInputP010Buffers.begin();
+        auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
+        if (res == NOT_ENOUGH_DATA) {
+            // Can not lock any more buffers.
+            break;
+        } else if (res != OK) {
+            ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+            mPendingInputFrames[*it].error = true;
+            mInputP010Buffers.erase(it);
+            continue;
+        }
+
+        if (*it != imgBuffer.timestamp) {
+            ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
+                    "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+        }
+
+        if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+                (mPendingInputFrames[imgBuffer.timestamp].error)) {
+            mP010Consumer->unlockBuffer(imgBuffer);
+        } else {
+            mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
+            mP010BufferAcquired = true;
+        }
+        mInputP010Buffers.erase(it);
+    }
+
+    while (!mCaptureResults.empty()) {
+        auto it = mCaptureResults.begin();
+        // Negative timestamp indicates that something went wrong during the capture result
+        // collection process.
+        if (it->first >= 0) {
+            auto frameNumber = std::get<0>(it->second);
+            mPendingInputFrames[it->first].frameNumber = frameNumber;
+            mPendingInputFrames[it->first].result = std::get<1>(it->second);
+            mSessionStatsBuilder.incResultCounter(false /*dropped*/);
+        }
+        mCaptureResults.erase(it);
+    }
+
+    while (!mFrameNumberMap.empty()) {
+        auto it = mFrameNumberMap.begin();
+        auto frameNumber = it->first;
+        mPendingInputFrames[it->second].frameNumber = frameNumber;
+        auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+        if (requestTimeIt != mRequestTimeMap.end()) {
+            mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
+            mRequestTimeMap.erase(requestTimeIt);
+        }
+        mFrameNumberMap.erase(it);
+    }
+
+    auto it = mErrorFrameNumbers.begin();
+    while (it != mErrorFrameNumbers.end()) {
+        bool frameFound = false;
+        for (auto &inputFrame : mPendingInputFrames) {
+            if (inputFrame.second.frameNumber == *it) {
+                inputFrame.second.error = true;
+                frameFound = true;
+                break;
+            }
+        }
+
+        if (frameFound) {
+            mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
+                    0 /*captureLatencyMs*/);
+            it = mErrorFrameNumbers.erase(it);
+        } else {
+            ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
+                    *it);
+            it++;
+        }
+    }
+}
+
+bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
+    if (currentTs == nullptr) {
+        return false;
+    }
+
+    bool newInputAvailable = false;
+    for (const auto& it : mPendingInputFrames) {
+        if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+                (it.second.requestTimeNs != -1) &&
+                ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
+                (it.first < *currentTs)) {
+            *currentTs = it.first;
+            newInputAvailable = true;
+        }
+    }
+
+    return newInputAvailable;
+}
+
+int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
+    int64_t ret = -1;
+    if (currentTs == nullptr) {
+        return ret;
+    }
+
+    for (const auto& it : mPendingInputFrames) {
+        if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+            *currentTs = it.first;
+            ret = it.second.frameNumber;
+        }
+    }
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
+    status_t res;
+    sp<ANativeWindow> outputANW = mOutputSurface;
+    ANativeWindowBuffer *anb;
+    int fenceFd;
+    void *dstBuffer;
+
+    size_t maxJpegRBufferSize = 0;
+    if (mMaxJpegBufferSize > 0) {
+        // If this is an ultra high resolution sensor and the input frames size
+        // is > default res jpeg.
+        if (mUHRMaxJpegSize.width != 0 &&
+                inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+                mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+            maxJpegRBufferSize = mUHRMaxJpegBufferSize;
+        } else {
+            maxJpegRBufferSize = mMaxJpegBufferSize;
+        }
+    } else {
+        maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
+    }
+
+    uint8_t jpegQuality = 100;
+    auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
+    if (entry.count > 0) {
+        jpegQuality = entry.data.u8[0];
+    }
+
+    if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
+            != OK) {
+        ALOGE("%s: Unable to configure stream buffer dimensions"
+                " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
+        return res;
+    }
+
+    res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
+    if (res != OK) {
+        ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+                res);
+        return res;
+    }
+
+    sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
+    GraphicBufferLocker gbLocker(gb);
+    res = gbLocker.lockAsync(&dstBuffer, fenceFd);
+    if (res != OK) {
+        ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return res;
+    }
+
+    if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
+        ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
+                maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return BAD_VALUE;
+    }
+
+    size_t actualJpegRSize = 0;
+    ultrahdr::jpegr_uncompressed_struct p010;
+    ultrahdr::jpegr_compressed_struct jpegR;
+    ultrahdr::JpegR jpegREncoder;
+
+    p010.height = inputFrame.p010Buffer.height;
+    p010.width = inputFrame.p010Buffer.width;
+    p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
+    p010.data = inputFrame.p010Buffer.data;
+    p010.chroma_data = inputFrame.p010Buffer.dataCb;
+    // Strides are expected to be in pixels not bytes
+    p010.luma_stride = inputFrame.p010Buffer.stride / 2;
+    p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
+
+    jpegR.data = dstBuffer;
+    jpegR.maxLength = maxJpegRBufferSize;
+
+    ultrahdr::ultrahdr_transfer_function transferFunction;
+    switch (mP010DynamicRange) {
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+            transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
+            break;
+        default:
+            transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
+    }
+
+    if (mSupportInternalJpeg) {
+        ultrahdr::jpegr_compressed_struct jpeg;
+
+        jpeg.data = inputFrame.jpegBuffer.data;
+        jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
+                inputFrame.jpegBuffer.width);
+        if (jpeg.length == 0) {
+            ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
+                    __FUNCTION__);
+            jpeg.length = inputFrame.jpegBuffer.width;
+        }
+
+        if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
+            jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
+        } else {
+            jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
+        }
+
+        res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
+    } else {
+        const uint8_t* exifBuffer = nullptr;
+        size_t exifBufferSize = 0;
+        std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+        utils->initializeEmpty();
+        utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
+                inputFrame.p010Buffer.height);
+        if (utils->generateApp1()) {
+            exifBuffer = utils->getApp1Buffer();
+            exifBufferSize = utils->getApp1Length();
+        } else {
+            ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+        }
+
+        ultrahdr::jpegr_exif_struct exif;
+        exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
+        exif.length = exifBufferSize;
+
+        res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif);
+    }
+
+    if (res != OK) {
+        ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    actualJpegRSize = jpegR.length;
+
+    size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
+    if (finalJpegRSize > maxJpegRBufferSize) {
+        ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
+        outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+        return NO_MEMORY;
+    }
+
+    res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
+                getStreamId(), strerror(-res), res);
+        return res;
+    }
+
+    ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
+    uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
+        (gb->getWidth() - sizeof(CameraBlob));
+    CameraBlob blobHeader = {
+        .blobId = CameraBlobId::JPEG,
+        .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
+    };
+    memcpy(header, &blobHeader, sizeof(CameraBlob));
+
+    if (inputFrame.requestTimeNs != -1) {
+        auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
+        mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
+        if (mFirstRequestLatency == -1) {
+            mFirstRequestLatency = captureLatency;
+        }
+    }
+    outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+
+    return res;
+}
+
+void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+    if (inputFrame == nullptr) {
+        return;
+    }
+
+    if (inputFrame->p010Buffer.data != nullptr) {
+        mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
+        inputFrame->p010Buffer.data = nullptr;
+        mP010BufferAcquired = false;
+    }
+
+    if (inputFrame->jpegBuffer.data != nullptr) {
+        mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
+        inputFrame->jpegBuffer.data = nullptr;
+        mBlobBufferAcquired = false;
+    }
+
+    if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+        //TODO: Figure out correct requestId
+        notifyError(inputFrame->frameNumber, -1 /*requestId*/);
+        inputFrame->errorNotified = true;
+        mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
+    }
+}
+
+void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+    auto it = mPendingInputFrames.begin();
+    while (it != mPendingInputFrames.end()) {
+        if (it->first <= currentTs) {
+            releaseInputFrameLocked(&it->second);
+            it = mPendingInputFrames.erase(it);
+        } else {
+            it++;
+        }
+    }
+}
+
+bool JpegRCompositeStream::threadLoop() {
+    int64_t currentTs = INT64_MAX;
+    bool newInputAvailable = false;
+
+    {
+        Mutex::Autolock l(mMutex);
+
+        if (mErrorState) {
+            // In case we landed in error state, return any pending buffers and
+            // halt all further processing.
+            compilePendingInputLocked();
+            releaseInputFramesLocked(currentTs);
+            return false;
+        }
+
+        while (!newInputAvailable) {
+            compilePendingInputLocked();
+            newInputAvailable = getNextReadyInputLocked(&currentTs);
+            if (!newInputAvailable) {
+                auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
+                if (failingFrameNumber >= 0) {
+                    // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+                    // possible for two internal stream buffers to fail. In such scenario the
+                    // composite stream should notify the client about a stream buffer error only
+                    // once and this information is kept within 'errorNotified'.
+                    // Any present failed input frames will be removed on a subsequent call to
+                    // 'releaseInputFramesLocked()'.
+                    releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+                    currentTs = INT64_MAX;
+                }
+
+                auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+                if (ret == TIMED_OUT) {
+                    return true;
+                } else if (ret != OK) {
+                    ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+                            strerror(-ret), ret);
+                    return false;
+                }
+            }
+        }
+    }
+
+    auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+    Mutex::Autolock l(mMutex);
+    if (res != OK) {
+        ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
+                currentTs, strerror(-res), res);
+        mPendingInputFrames[currentTs].error = true;
+    }
+
+    releaseInputFramesLocked(currentTs);
+
+    return true;
+}
+
+bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
+    if (CameraProviderManager::kFrameworkJpegRDisabled) {
+        return false;
+    }
+    ANativeWindow *anw = surface.get();
+    status_t err;
+    int format;
+    if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+        ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
+                err);
+        return false;
+    }
+
+    int dataspace;
+    if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+        ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
+                err);
+        return false;
+    }
+
+    if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
+void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
+        int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
+    if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
+        return;
+    }
+
+    switch (dynamicProfile) {
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+            *dynamicRange = dynamicProfile;
+            *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
+            break;
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+            *dynamicRange = dynamicProfile;
+            *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+            break;
+        default:
+            *dynamicRange = kP010DefaultDynamicRange;
+            *dataSpace = kP010DefaultDataSpace;
+    }
+
+}
+
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+        bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        std::vector<int> *surfaceIds,
+        int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+        int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (!device.get()) {
+        ALOGE("%s: Invalid camera device!", __FUNCTION__);
+        return NO_INIT;
+    }
+
+    deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
+    mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+            mStaticInfo, mP010DynamicRange,
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+
+    sp<IGraphicBufferProducer> producer;
+    sp<IGraphicBufferConsumer> consumer;
+    BufferQueue::createBufferQueue(&producer, &consumer);
+    mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
+    mP010Consumer->setFrameAvailableListener(this);
+    mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+    mP010Surface = new Surface(producer);
+
+    auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
+            static_cast<android_dataspace>(mP010DataSpace), rotation,
+            id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+            camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
+            GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
+            OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
+            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
+    if (ret == OK) {
+        mP010StreamId = *id;
+        mP010SurfaceId = (*surfaceIds)[0];
+        mOutputSurface = consumers[0];
+    } else {
+        return ret;
+    }
+
+    if (mSupportInternalJpeg) {
+        BufferQueue::createBufferQueue(&producer, &consumer);
+        mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+        mBlobConsumer->setFrameAvailableListener(this);
+        mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+        mBlobSurface = new Surface(producer);
+        std::vector<int> blobSurfaceId;
+        ret = device->createStream(mBlobSurface, width, height, format,
+                kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
+                &blobSurfaceId,
+                /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
+                /*isShared*/  false,
+                /*isMultiResolution*/ false,
+                /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
+                /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+                streamUseCase,
+                /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+                /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
+                /*colorSpace*/ colorSpace, useReadoutTimestamp);
+        if (ret == OK) {
+            mBlobSurfaceId = blobSurfaceId[0];
+        } else {
+            return ret;
+        }
+
+        ret = registerCompositeStreamListener(mBlobStreamId);
+        if (ret != OK) {
+            ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
+            return ret;
+        }
+    }
+
+    ret = registerCompositeStreamListener(getStreamId());
+    if (ret != OK) {
+        ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
+        return ret;
+    }
+
+    mOutputColorSpace = colorSpace;
+    mOutputStreamUseCase = streamUseCase;
+    mBlobWidth = width;
+    mBlobHeight = height;
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::configureStream() {
+    if (isRunning()) {
+        // Processing thread is already running, nothing more to do.
+        return NO_ERROR;
+    }
+
+    if (mOutputSurface.get() == nullptr) {
+        ALOGE("%s: No valid output surface set!", __FUNCTION__);
+        return NO_INIT;
+    }
+
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    if (res != OK) {
+        ALOGE("%s: Unable to connect to native window for stream %d",
+                __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+            != OK) {
+        ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+                mP010StreamId);
+        return res;
+    }
+
+    if ((res = native_window_set_usage(mOutputSurface.get(),
+            GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
+        ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
+                mP010StreamId);
+        return res;
+    }
+
+    int maxProducerBuffers;
+    ANativeWindow *anw = mP010Surface.get();
+    if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    ANativeWindow *anwConsumer = mOutputSurface.get();
+    int maxConsumerBuffers;
+    if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+                    &maxConsumerBuffers)) != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    if ((res = native_window_set_buffer_count(
+                    anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+        ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
+        return res;
+    }
+
+    mSessionStatsBuilder.addStream(mP010StreamId);
+
+    run("JpegRCompositeStreamProc");
+
+    return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::deleteInternalStreams() {
+    // The 'CameraDeviceClient' parent will delete the P010 stream
+    requestExit();
+
+    auto ret = join();
+    if (ret != OK) {
+        ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+                strerror(-ret), ret);
+    }
+
+    if (mBlobStreamId >= 0) {
+        // Camera devices may not be valid after switching to offline mode.
+        // In this case, all offline streams including internal composite streams
+        // are managed and released by the offline session.
+        sp<CameraDeviceBase> device = mDevice.promote();
+        if (device.get() != nullptr) {
+            ret = device->deleteStream(mBlobStreamId);
+        }
+
+        mBlobStreamId = -1;
+    }
+
+    if (mOutputSurface != nullptr) {
+        mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
+        mOutputSurface.clear();
+    }
+
+    return ret;
+}
+
+void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
+    if (item.mDataSpace == kJpegDataSpace) {
+        ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
+                __func__, ns2ms(item.mTimestamp));
+
+        Mutex::Autolock l(mMutex);
+        if (!mErrorState) {
+            mInputJpegBuffers.push_back(item.mTimestamp);
+            mInputReadyCondition.signal();
+        }
+    } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
+        ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
+                ns2ms(item.mTimestamp));
+
+        Mutex::Autolock l(mMutex);
+        if (!mErrorState) {
+            mInputP010Buffers.push_back(item.mTimestamp);
+            mInputReadyCondition.signal();
+        }
+    } else {
+        ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+    }
+}
+
+status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+        Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+    if (outputStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
+        outputStreamIds->push_back(mP010StreamId);
+    }
+    (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
+
+    if (mSupportInternalJpeg) {
+        if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
+            outputStreamIds->push_back(mBlobStreamId);
+        }
+        (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
+    }
+
+    if (currentStreamId != nullptr) {
+        *currentStreamId = mP010StreamId;
+    }
+
+    return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::insertCompositeStreamIds(
+        std::vector<int32_t>* compositeStreamIds /*out*/) {
+    if (compositeStreamIds == nullptr) {
+        return BAD_VALUE;
+    }
+
+    compositeStreamIds->push_back(mP010StreamId);
+    if (mSupportInternalJpeg) {
+        compositeStreamIds->push_back(mBlobStreamId);
+    }
+
+    return OK;
+}
+
+void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
+    // Processing can continue even in case of result errors.
+    // At the moment Jpeg/R composite stream processing relies mainly on static camera
+    // characteristics data. The actual result data can be used for the jpeg quality but
+    // in case it is absent we can default to maximum.
+    eraseResult(resultExtras.frameNumber);
+    mSessionStatsBuilder.incResultCounter(true /*dropped*/);
+}
+
+bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+    bool ret = false;
+    // Buffer errors concerning internal composite streams should not be directly visible to
+    // camera clients. They must only receive a single buffer error with the public composite
+    // stream id.
+    if ((resultExtras.errorStreamId == mP010StreamId) ||
+            (resultExtras.errorStreamId == mBlobStreamId)) {
+        flagAnErrorFrameNumber(resultExtras.frameNumber);
+        ret = true;
+    }
+
+    return ret;
+}
+
+status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+            const CameraMetadata& staticInfo,
+            std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+    if (compositeOutput == nullptr) {
+        return BAD_VALUE;
+    }
+
+    int64_t dynamicRange, dataSpace;
+    deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
+
+    compositeOutput->clear();
+    compositeOutput->push_back({});
+    (*compositeOutput)[0].width = streamInfo.width;
+    (*compositeOutput)[0].height = streamInfo.height;
+    (*compositeOutput)[0].format = kP010PixelFormat;
+    (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
+    (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+    (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
+    (*compositeOutput)[0].colorSpace =
+        ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
+                streamInfo.dynamicRangeProfile,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+        compositeOutput->push_back({});
+        (*compositeOutput)[1].width = streamInfo.width;
+        (*compositeOutput)[1].height = streamInfo.height;
+        (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+        (*compositeOutput)[1].dataSpace = kJpegDataSpace;
+        (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+        (*compositeOutput)[1].dynamicRangeProfile =
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+        (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
+    }
+
+    return NO_ERROR;
+}
+
+void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
+    if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
+        return;
+    }
+
+    bool deviceError;
+    std::map<int, StreamStats> stats;
+    mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
+            &deviceError, &stats);
+    if (stats.find(mP010StreamId) != stats.end()) {
+        streamStats->mWidth = mBlobWidth;
+        streamStats->mHeight = mBlobHeight;
+        streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
+        streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
+        streamStats->mDynamicRangeProfile = mP010DynamicRange;
+        streamStats->mColorSpace = mOutputColorSpace;
+        streamStats->mStreamUseCase = mOutputStreamUseCase;
+        streamStats->mStartLatencyMs = mFirstRequestLatency;
+        streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+        streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
+                stats[mP010StreamId].mCaptureLatencyBins.end());
+        streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
+                stats[mP010StreamId].mCaptureLatencyHistogram.end());
+    }
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
new file mode 100644
index 0000000..3dfed30
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+
+#include <gui/CpuConsumer.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "system/graphics-base-v1.1.h"
+
+#include "api1/client2/JpegProcessor.h"
+#include "utils/SessionStatsBuilder.h"
+
+#include "CompositeStream.h"
+
+namespace android {
+
+class CameraDeviceClient;
+class CameraMetadata;
+class Surface;
+
+namespace camera3 {
+
+class JpegRCompositeStream : public CompositeStream, public Thread,
+        public CpuConsumer::FrameAvailableListener {
+
+public:
+    JpegRCompositeStream(sp<CameraDeviceBase> device,
+            wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+    ~JpegRCompositeStream() override;
+
+    static bool isJpegRCompositeStream(const sp<Surface> &surface);
+
+    // CompositeStream overrides
+    status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+            bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            std::vector<int> *surfaceIds,
+            int streamSetId, bool isShared, int32_t colorSpace,
+            int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
+    status_t deleteInternalStreams() override;
+    status_t configureStream() override;
+    status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+            int32_t* /*out*/currentStreamId) override;
+    status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+    int getStreamId() override { return mP010StreamId; }
+
+    // CpuConsumer listener implementation
+    void onFrameAvailable(const BufferItem& item) override;
+
+    // Return stream information about the internal camera streams
+    static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+            const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+    // Get composite stream stats
+    void getStreamStats(hardware::CameraStreamStats* streamStats) override;
+
+protected:
+
+    bool threadLoop() override;
+    bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+    void onResultError(const CaptureResultExtras& resultExtras) override;
+
+private:
+    struct InputFrame {
+        CpuConsumer::LockedBuffer p010Buffer;
+        CpuConsumer::LockedBuffer jpegBuffer;
+        CameraMetadata            result;
+        bool                      error;
+        bool                      errorNotified;
+        int64_t                   frameNumber;
+        int32_t                   requestId;
+        nsecs_t                   requestTimeNs;
+
+        InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1),
+            requestTimeNs(-1) { }
+    };
+
+    status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
+
+    // Buffer/Results handling
+    void compilePendingInputLocked();
+    void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+    void releaseInputFramesLocked(int64_t currentTs);
+
+    // Find first complete and valid frame with smallest timestamp
+    bool getNextReadyInputLocked(int64_t *currentTs /*inout*/);
+
+    // Find next failing frame number with smallest timestamp and return respective frame number
+    int64_t getNextFailingInputLocked(int64_t *currentTs /*inout*/);
+
+    static void deriveDynamicRangeAndDataspace(int64_t dynamicProfile, int64_t* /*out*/dynamicRange,
+            int64_t* /*out*/dataSpace);
+
+    static const nsecs_t kWaitDuration = 10000000; // 10 ms
+    static const auto kP010PixelFormat = HAL_PIXEL_FORMAT_YCBCR_P010;
+    static const auto kP010DefaultDataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+    static const auto kP010DefaultDynamicRange =
+        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+    static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+    static const auto kJpegRDataSpace =
+        aidl::android::hardware::graphics::common::Dataspace::JPEG_R;
+
+    bool                 mSupportInternalJpeg = false;
+    int64_t              mP010DataSpace = HAL_DATASPACE_BT2020_HLG;
+    int64_t              mP010DynamicRange =
+        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+    int                  mBlobStreamId, mBlobSurfaceId, mP010StreamId, mP010SurfaceId;
+    size_t               mBlobWidth, mBlobHeight;
+    sp<CpuConsumer>      mBlobConsumer, mP010Consumer;
+    bool                 mP010BufferAcquired, mBlobBufferAcquired;
+    sp<Surface>          mP010Surface, mBlobSurface, mOutputSurface;
+    int32_t              mOutputColorSpace;
+    int64_t              mOutputStreamUseCase;
+    nsecs_t              mFirstRequestLatency;
+    sp<ProducerListener> mProducerListener;
+
+    ssize_t              mMaxJpegBufferSize;
+    ssize_t              mUHRMaxJpegBufferSize;
+
+    camera3::Size        mDefaultMaxJpegSize;
+    camera3::Size        mUHRMaxJpegSize;
+
+    // Keep all incoming P010 buffer timestamps pending further processing.
+    std::vector<int64_t> mInputP010Buffers;
+
+    // Keep all incoming Jpeg/Blob buffer timestamps pending further processing.
+    std::vector<int64_t> mInputJpegBuffers;
+
+    // Map of all input frames pending further processing.
+    std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
+
+    const CameraMetadata mStaticInfo;
+
+    SessionStatsBuilder  mSessionStatsBuilder;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index dc5002b..6e10f30 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -37,9 +37,9 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "utils/CameraThreadState.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
+
 using namespace camera2;
 
 // Interface used by CameraService
@@ -48,6 +48,7 @@
 Camera2ClientBase<TClientBase>::Camera2ClientBase(
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         const String16& clientPackageName,
         bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
@@ -59,11 +60,13 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
+        bool overrideToPortrait,
         bool legacyClient):
         TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid),
+                clientUid, servicePid, overrideToPortrait),
         mSharedCameraCallbacks(remoteCallback),
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
     ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
@@ -101,11 +104,6 @@
           TClientBase::mCameraIdStr.string());
     status_t res;
 
-    // Verify ops permissions
-    res = TClientBase::startCameraOps();
-    if (res != OK) {
-        return res;
-    }
     IPCTransport providerTransport = IPCTransport::INVALID;
     res = providerPtr->getCameraIdIPCTransport(TClientBase::mCameraIdStr.string(),
             &providerTransport);
@@ -115,13 +113,15 @@
     switch (providerTransport) {
         case IPCTransport::HIDL:
             mDevice =
-                    new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                    new HidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mCameraIdStr, mOverrideForPerfClass,
+                            TClientBase::mOverrideToPortrait, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
-                    new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                    new AidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mCameraIdStr, mOverrideForPerfClass,
+                            TClientBase::mOverrideToPortrait, mLegacyClient);
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
@@ -141,8 +141,30 @@
         return res;
     }
 
+    // Verify ops permissions
+    res = TClientBase::startCameraOps();
+    if (res != OK) {
+        TClientBase::finishCameraOps();
+        return res;
+    }
+
     wp<NotificationListener> weakThis(this);
     res = mDevice->setNotifyCallback(weakThis);
+    if (res != OK) {
+        ALOGE("%s: Camera %s: Unable to set notify callback: %s (%d)",
+                __FUNCTION__, TClientBase::mCameraIdStr.string(), strerror(-res), res);
+        return res;
+    }
+
+    /** Start watchdog thread */
+    mCameraServiceWatchdog = new CameraServiceWatchdog(TClientBase::mCameraIdStr,
+            mCameraServiceProxyWrapper);
+    res = mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
+    if (res != OK) {
+        ALOGE("%s: Unable to start camera service watchdog thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
 
     return OK;
 }
@@ -155,8 +177,13 @@
 
     disconnect();
 
-    ALOGI("Closed Camera %s. Client was: %s (PID %d, UID %u)",
-            TClientBase::mCameraIdStr.string(),
+    if (mCameraServiceWatchdog != NULL) {
+        mCameraServiceWatchdog->requestExit();
+        mCameraServiceWatchdog.clear();
+    }
+
+    ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
+            __FUNCTION__, TClientBase::mCameraIdStr.string(),
             String8(TClientBase::mClientPackageName).string(),
             mInitialClientPid, TClientBase::mClientUid);
 }
@@ -238,9 +265,24 @@
 
 // ICameraClient2BaseUser interface
 
-
 template <typename TClientBase>
 binder::Status Camera2ClientBase<TClientBase>::disconnect() {
+    if (mCameraServiceWatchdog != nullptr && mDevice != nullptr) {
+        // Timer for the disconnect call should be greater than getExpectedInFlightDuration
+        // since this duration is used to error handle methods in the disconnect sequence
+        // thus allowing existing error handling methods to execute first
+        uint64_t maxExpectedDuration =
+                ns2ms(mDevice->getExpectedInFlightDuration() + kBufferTimeDisconnectNs);
+
+        // Initialization from hal succeeded, time disconnect.
+        return mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(disconnectImpl(),
+                maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
+    }
+    return disconnectImpl();
+}
+
+template <typename TClientBase>
+binder::Status Camera2ClientBase<TClientBase>::disconnectImpl() {
     ATRACE_CALL();
     ALOGD("Camera %s: start to disconnect", TClientBase::mCameraIdStr.string());
     Mutex::Autolock icl(mBinderSerializationLock);
@@ -317,6 +359,29 @@
 }
 
 template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
+    // We're only interested in this notification if overrideToPortrait is turned on.
+    if (!TClientBase::mOverrideToPortrait) {
+        return;
+    }
+
+    String8 physicalId8(physicalId.c_str());
+    auto physicalCameraMetadata = mDevice->infoPhysical(physicalId8);
+    auto orientationEntry = physicalCameraMetadata.find(ANDROID_SENSOR_ORIENTATION);
+
+    if (orientationEntry.count == 1) {
+        int orientation = orientationEntry.data.i32[0];
+        int rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
+
+        if (orientation == 0 || orientation == 180) {
+            rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+        }
+
+        static_cast<TClientBase *>(this)->setRotateAndCropOverride(rotateAndCropMode);
+    }
+}
+
+template <typename TClientBase>
 status_t Camera2ClientBase<TClientBase>::notifyActive(float maxPreviewFps) {
     if (!mDeviceActive) {
         status_t res = TClientBase::startCameraStreamingOps();
@@ -325,7 +390,7 @@
                     TClientBase::mCameraIdStr.string(), res);
             return res;
         }
-        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr, maxPreviewFps);
+        mCameraServiceProxyWrapper->logActive(TClientBase::mCameraIdStr, maxPreviewFps);
     }
     mDeviceActive = true;
 
@@ -344,7 +409,7 @@
             ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
                     TClientBase::mCameraIdStr.string(), res);
         }
-        CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
+        mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
                 requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
                 streamStats);
     }
@@ -354,50 +419,38 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(const CaptureResultExtras& resultExtras,
-                                                   nsecs_t timestamp) {
-    (void)resultExtras;
-    (void)timestamp;
-
+void Camera2ClientBase<TClientBase>::notifyShutter(
+                [[maybe_unused]] const CaptureResultExtras& resultExtras,
+                [[maybe_unused]] nsecs_t timestamp) {
     ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
             __FUNCTION__, resultExtras.requestId, timestamp);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoFocus(uint8_t newState,
-                                                     int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+                                                     [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autofocus state now %d, last trigger %d",
           __FUNCTION__, newState, triggerId);
 
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoExposure(uint8_t newState,
-                                                        int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+                                                        [[maybe_unused]] int triggerId) {
     ALOGV("%s: Autoexposure state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(uint8_t newState,
-                                                            int triggerId) {
-    (void)newState;
-    (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(
+                [[maybe_unused]] uint8_t newState,
+                [[maybe_unused]] int triggerId) {
     ALOGV("%s: Auto-whitebalance state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyPrepared(int streamId) {
-    (void)streamId;
-
+void Camera2ClientBase<TClientBase>::notifyPrepared([[maybe_unused]] int streamId) {
     ALOGV("%s: Stream %d now prepared",
             __FUNCTION__, streamId);
 }
@@ -409,9 +462,8 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(long lastFrameNumber) {
-    (void)lastFrameNumber;
-
+void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(
+            [[maybe_unused]] long lastFrameNumber) {
     ALOGV("%s: Repeating request was stopped. Last frame number is %ld",
             __FUNCTION__, lastFrameNumber);
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index b0d1c3f..5cf3033 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -19,6 +19,8 @@
 
 #include "common/CameraDeviceBase.h"
 #include "camera/CaptureResult.h"
+#include "utils/CameraServiceProxyWrapper.h"
+#include "CameraServiceWatchdog.h"
 
 namespace android {
 
@@ -47,6 +49,7 @@
     // TODO: too many params, move into a ClientArgs<T>
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
+                      std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
                       const String16& clientPackageName,
                       bool systemNativeClient,
                       const std::optional<String16>& clientFeatureId,
@@ -58,6 +61,7 @@
                       uid_t clientUid,
                       int servicePid,
                       bool overrideForPerfClass,
+                      bool overrideToPortrait,
                       bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
@@ -73,6 +77,7 @@
 
     virtual void          notifyError(int32_t errorCode,
                                       const CaptureResultExtras& resultExtras);
+    virtual void          notifyPhysicalCameraChange(const std::string &physicalId) override;
     // Returns errors on app ops permission failures
     virtual status_t      notifyActive(float maxPreviewFps);
     virtual void          notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
@@ -131,10 +136,14 @@
 
 protected:
 
+    // Used for watchdog timeout to monitor disconnect
+    static const nsecs_t kBufferTimeDisconnectNs = 3000000000; // 3 sec.
+
     // The PID provided in the constructor call
     pid_t mInitialClientPid;
     bool mOverrideForPerfClass = false;
     bool mLegacyClient = false;
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
 
     virtual sp<IBinder> asBinderWrapper() {
         return IInterface::asBinder(this);
@@ -173,6 +182,12 @@
 private:
     template<typename TProviderPtr>
     status_t              initializeImpl(TProviderPtr providerPtr, const String8& monitorTags);
+
+    binder::Status disconnectImpl();
+
+    // Watchdog thread
+    sp<CameraServiceWatchdog> mCameraServiceWatchdog;
+
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 7e2f93c..919108d 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -113,6 +113,8 @@
      */
     virtual const CameraMetadata& infoPhysical(const String8& physicalId) const = 0;
 
+    virtual bool isCompositeJpegRDisabled() const { return false; };
+
     struct PhysicalCameraSettings {
         std::string cameraId;
         CameraMetadata metadata;
@@ -126,6 +128,9 @@
         int32_t mOriginalTestPatternMode = 0;
         int32_t mOriginalTestPatternData[4] = {};
 
+        // Original value of SETTINGS_OVERRIDE so that they can be restored if
+        // camera service isn't overwriting the app value.
+        int32_t mOriginalSettingsOverride = ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
     };
     typedef List<PhysicalCameraSettings> PhysicalCameraSettingsList;
 
@@ -192,7 +197,10 @@
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false)
+            = 0;
 
     /**
      * Create an output stream of the requested size, format, rotation and
@@ -213,7 +221,10 @@
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false)
+            = 0;
 
     /**
      * Create an input stream of width, height, and format.
@@ -235,11 +246,13 @@
         bool dataSpaceOverridden;
         android_dataspace originalDataSpace;
         int64_t dynamicRangeProfile;
+        int32_t colorSpace;
 
         StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
                 dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
                 originalDataSpace(HAL_DATASPACE_UNKNOWN),
-                dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
+                dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+                colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
         /**
          * Check whether the format matches the current or the original one in case
          * it got overridden.
@@ -434,6 +447,14 @@
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
 
     /**
+     * Set the current behavior for the AUTOFRAMING control when in AUTO.
+     *
+     * The value must be one of the AUTOFRAMING_* values besides AUTO.
+     */
+    virtual status_t setAutoframingAutoBehavior(
+            camera_metadata_enum_android_control_autoframing_t autoframingValue) = 0;
+
+    /**
      * Whether camera muting (producing black-only output) is supported.
      *
      * Calling setCameraMute(true) when this returns false will return an
@@ -449,16 +470,43 @@
     virtual status_t setCameraMute(bool enabled) = 0;
 
     /**
+     * Whether the camera device supports zoom override.
+     */
+    virtual bool supportsZoomOverride() = 0;
+
+    // Set/reset zoom override
+    virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
+    /**
+     * Enable/disable camera service watchdog
+     */
+    virtual status_t setCameraServiceWatchdog(bool enabled) = 0;
+
+    /**
      * Get the status tracker of the camera device
      */
     virtual wp<camera3::StatusTracker> getStatusTracker() = 0;
 
     /**
+     * If the device is in eror state
+     */
+    virtual bool hasDeviceError() = 0;
+
+    /**
      * Set bitmask for image dump flag
      */
     void setImageDumpMask(int mask) { mImageDumpMask = mask; }
 
     /**
+     * Set stream use case overrides
+     */
+    void setStreamUseCaseOverrides(const std::vector<int64_t>& useCaseOverrides) {
+          mStreamUseCaseOverrides = useCaseOverrides;
+    }
+
+    void clearStreamUseCaseOverrides() {}
+
+    /**
      * The injection camera session to replace the internal camera
      * session.
      */
@@ -472,6 +520,7 @@
 
 protected:
     bool mImageDumpMask = 0;
+    std::vector<int64_t> mStreamUseCaseOverrides;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index f39b92a..63abcf0 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -40,6 +40,10 @@
     // Required for API 1 and 2
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras &resultExtras) = 0;
+
+    // Optional for API 1 and 2
+    virtual void notifyPhysicalCameraChange(const std::string &/*physicalId*/) {}
+
     // May return an error since it checks appops
     virtual status_t notifyActive(float maxPreviewFps) = 0;
     virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index abaea66..fac6e4e 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
 #define LOG_TAG "CameraProviderManager"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
@@ -40,6 +42,7 @@
 #include <cutils/properties.h>
 #include <hwbinder/IPCThreadState.h>
 #include <utils/Trace.h>
+#include <ui/PublicFormat.h>
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
@@ -59,6 +62,8 @@
 } // anonymous namespace
 
 const float CameraProviderManager::kDepthARTolerance = .1f;
+const bool CameraProviderManager::kFrameworkJpegRDisabled =
+        property_get_bool("ro.camera.disableJpegR", false);
 
 CameraProviderManager::HidlServiceInteractionProxyImpl
 CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -197,12 +202,17 @@
     return std::make_pair(systemCameraCount, publicCameraCount);
 }
 
-std::vector<std::string> CameraProviderManager::getCameraDeviceIds() const {
+std::vector<std::string> CameraProviderManager::getCameraDeviceIds(std::unordered_map<
+            std::string, std::set<std::string>>* unavailablePhysicalIds) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     std::vector<std::string> deviceIds;
     for (auto& provider : mProviders) {
         for (auto& id : provider->mUniqueCameraIds) {
             deviceIds.push_back(id);
+            if (unavailablePhysicalIds != nullptr &&
+                    provider->mUnavailablePhysicalCameras.count(id) > 0) {
+                (*unavailablePhysicalIds)[id] = provider->mUnavailablePhysicalCameras.at(id);
+            }
         }
     }
     return deviceIds;
@@ -306,6 +316,18 @@
     return deviceInfo->supportNativeZoomRatio();
 }
 
+bool CameraProviderManager::isCompositeJpegRDisabled(const std::string &id) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+    return isCompositeJpegRDisabledLocked(id);
+}
+
+bool CameraProviderManager::isCompositeJpegRDisabledLocked(const std::string &id) const {
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return false;
+
+    return deviceInfo->isCompositeJpegRDisabled();
+}
+
 status_t CameraProviderManager::getResourceCost(const std::string &id,
         CameraResourceCost* cost) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -318,13 +340,13 @@
 }
 
 status_t CameraProviderManager::getCameraInfo(const std::string &id,
-        hardware::CameraInfo* info) const {
+        bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
-    return deviceInfo->getCameraInfo(info);
+    return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -356,9 +378,11 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
-        bool overrideForPerfClass, CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics,
+        bool overrideToPortrait) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics);
+    return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
+            overrideToPortrait);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -579,7 +603,11 @@
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     mDeviceState = newState;
     status_t res = OK;
-    for (auto& provider : mProviders) {
+    // Make a copy of mProviders because we unlock mInterfaceMutex temporarily
+    // within the loop. It's possible that during the time mInterfaceMutex is
+    // unlocked, mProviders has changed.
+    auto providers = mProviders;
+    for (auto& provider : providers) {
         ALOGV("%s: Notifying %s for new state 0x%" PRIx64,
                 __FUNCTION__, provider->mProviderName.c_str(), newState);
         // b/199240726 Camera providers can for example try to add/remove
@@ -839,9 +867,6 @@
 
 void CameraProviderManager::ProviderInfo::initializeProviderInfoCommon(
         const std::vector<std::string> &devices) {
-
-    sp<StatusListener> listener = mManager->getStatusListener();
-
     for (auto& device : devices) {
         std::string id;
         status_t res = addDevice(device, CameraDeviceStatus::PRESENT, &id);
@@ -856,38 +881,22 @@
             mProviderName.c_str(), mDevices.size());
 
     // Process cached status callbacks
-    std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
-            std::make_unique<std::vector<CameraStatusInfoT>>();
     {
         std::lock_guard<std::mutex> lock(mInitLock);
 
         for (auto& statusInfo : mCachedStatus) {
             std::string id, physicalId;
-            status_t res = OK;
             if (statusInfo.isPhysicalCameraStatus) {
-                res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+                physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
                     statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
             } else {
-                res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
-            }
-            if (res == OK) {
-                cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
-                        id.c_str(), physicalId.c_str(), statusInfo.status);
+                cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
             }
         }
         mCachedStatus.clear();
 
         mInitialized = true;
     }
-
-    // The cached status change callbacks cannot be fired directly from this
-    // function, due to same-thread deadlock trying to acquire mInterfaceMutex
-    // twice.
-    if (listener != nullptr) {
-        mInitialStatusCallbackFuture = std::async(std::launch::async,
-                &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
-                listener, std::move(cachedStatus));
-    }
 }
 
 CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
@@ -1007,19 +1016,21 @@
     auto availableDurations = ch.find(tag);
     if (availableDurations.count > 0) {
         // Duration entry contains 4 elements (format, width, height, duration)
-        for (size_t i = 0; i < availableDurations.count; i += 4) {
-            for (const auto& size : sizes) {
-                int64_t width = std::get<0>(size);
-                int64_t height = std::get<1>(size);
+        for (const auto& size : sizes) {
+            int64_t width = std::get<0>(size);
+            int64_t height = std::get<1>(size);
+            for (size_t i = 0; i < availableDurations.count; i += 4) {
                 if ((availableDurations.data.i64[i] == format) &&
                         (availableDurations.data.i64[i+1] == width) &&
                         (availableDurations.data.i64[i+2] == height)) {
                     durations->push_back(availableDurations.data.i64[i+3]);
+                    break;
                 }
             }
         }
     }
 }
+
 void CameraProviderManager::ProviderInfo::DeviceInfo3::getSupportedDynamicDepthDurations(
         const std::vector<int64_t>& depthDurations, const std::vector<int64_t>& blobDurations,
         std::vector<int64_t> *dynamicDepthDurations /*out*/) {
@@ -1079,6 +1090,212 @@
     }
 }
 
+bool CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+        const CameraMetadata& deviceInfo, int64_t profile, int64_t concurrentProfile) {
+    auto entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    if (entry.count == 0) {
+        return false;
+    }
+
+    const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
+            ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
+    if (it == entry.data.u8 + entry.count) {
+        return false;
+    }
+
+    entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
+    if (entry.count == 0 || ((entry.count % 3) != 0)) {
+        return false;
+    }
+
+    for (size_t i = 0; i < entry.count; i += 3) {
+        if (entry.data.i64[i] == profile) {
+            if ((entry.data.i64[i+1] == 0) || (entry.data.i64[i+1] & concurrentProfile)) {
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
+    if (kFrameworkJpegRDisabled || mCompositeJpegRDisabled) {
+        return OK;
+    }
+
+    const int32_t scalerSizesTag =
+              SessionConfigurationUtils::getAppropriateModeTag(
+                      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+            ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+    const int32_t scalerStallDurationsTag =
+                 SessionConfigurationUtils::getAppropriateModeTag(
+                        ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+    const int32_t jpegRSizesTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t jpegRStallDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                    ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS, maxResolution);
+    const int32_t jpegRMinFrameDurationsTag =
+            SessionConfigurationUtils::getAppropriateModeTag(
+                 ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
+
+    auto& c = mCameraCharacteristics;
+    std::vector<int32_t> supportedChTags;
+    auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    if (chTags.count == 0) {
+        ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
+    auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    if (capabilities.count == 0) {
+        ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    auto end = capabilities.data.u8 + capabilities.count;
+    bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+            ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+    if (!isTenBitOutputSupported) {
+        // No 10-bit support, nothing more to do.
+        return OK;
+    }
+
+    if (!isConcurrentDynamicRangeCaptureSupported(c,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) &&
+            !property_get_bool("ro.camera.enableCompositeAPI0JpegR", false)) {
+        // API0, P010 only Jpeg/R support is meant to be used only as a reference due to possible
+        // impact on quality and performance.
+        // This data path will be turned off by default and individual device builds must enable
+        // 'ro.camera.enableCompositeAPI0JpegR' in order to experiment using it.
+        mCompositeJpegRDisabled = true;
+        return OK;
+    }
+
+    getSupportedSizes(c, scalerSizesTag,
+            static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_BLOB), &supportedBlobSizes);
+    getSupportedSizes(c, scalerSizesTag,
+            static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+    auto it = supportedP010Sizes.begin();
+    while (it != supportedP010Sizes.end()) {
+        if (std::find(supportedBlobSizes.begin(), supportedBlobSizes.end(), *it) ==
+                supportedBlobSizes.end()) {
+            it = supportedP010Sizes.erase(it);
+        } else {
+            it++;
+        }
+    }
+    if (supportedP010Sizes.empty()) {
+        // Nothing to do in this case.
+        return OK;
+    }
+
+    std::vector<int32_t> jpegREntries;
+    for (const auto& it : supportedP010Sizes) {
+        int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+                static_cast<int32_t> (std::get<1>(it)),
+                ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT };
+        jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
+    }
+
+    std::vector<int64_t> blobMinDurations, blobStallDurations;
+    std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
+
+    // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
+    // durations.
+    getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedP010Sizes, &blobMinDurations);
+    getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+            supportedP010Sizes, &blobStallDurations);
+    if (blobStallDurations.empty() || blobMinDurations.empty() ||
+            supportedP010Sizes.size() != blobMinDurations.size() ||
+            blobMinDurations.size() != blobStallDurations.size()) {
+        ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
+                "supportedP010Sizes size: %zu", __FUNCTION__, blobMinDurations.size(),
+                blobStallDurations.size(), supportedP010Sizes.size());
+        return BAD_VALUE;
+    }
+
+    auto itDuration = blobMinDurations.begin();
+    auto itSize = supportedP010Sizes.begin();
+    while (itDuration != blobMinDurations.end()) {
+        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+        jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
+        itDuration++; itSize++;
+    }
+
+    itDuration = blobStallDurations.begin();
+    itSize = supportedP010Sizes.begin();
+    while (itDuration != blobStallDurations.end()) {
+        int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+                static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+        jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
+        itDuration++; itSize++;
+    }
+
+    supportedChTags.reserve(chTags.count + 3);
+    supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+            chTags.data.i32 + chTags.count);
+    supportedChTags.push_back(jpegRSizesTag);
+    supportedChTags.push_back(jpegRMinFrameDurationsTag);
+    supportedChTags.push_back(jpegRStallDurationsTag);
+    c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
+    c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
+    c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
+    c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+            supportedChTags.size());
+
+    auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+    if (colorSpaces.count > 0 && !maxResolution) {
+        bool displayP3Support = false;
+        int64_t dynamicRange = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+        for (size_t i = 0; i < colorSpaces.count; i += 3) {
+            auto colorSpace = colorSpaces.data.i64[i];
+            auto format = colorSpaces.data.i64[i+1];
+            bool formatMatch = (format == static_cast<int64_t>(PublicFormat::JPEG)) ||
+                    (format == static_cast<int64_t>(PublicFormat::UNKNOWN));
+            bool colorSpaceMatch =
+                colorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3;
+            if (formatMatch && colorSpaceMatch) {
+                displayP3Support = true;
+            }
+
+            // Jpeg/R will support the same dynamic range profiles as P010
+            if (format == static_cast<int64_t>(PublicFormat::YCBCR_P010)) {
+                dynamicRange |= colorSpaces.data.i64[i+2];
+            }
+        }
+        if (displayP3Support) {
+            std::vector<int64_t> supportedColorSpaces;
+            // Jpeg/R must support the default system as well ase display P3 color space
+            supportedColorSpaces.reserve(colorSpaces.count + 3*2);
+            supportedColorSpaces.insert(supportedColorSpaces.end(), colorSpaces.data.i64,
+                    colorSpaces.data.i64 + colorSpaces.count);
+
+            supportedColorSpaces.push_back(static_cast<int64_t>(
+                    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB));
+            supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+            supportedColorSpaces.push_back(dynamicRange);
+
+            supportedColorSpaces.push_back(static_cast<int64_t>(
+                    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3));
+            supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+            supportedColorSpaces.push_back(dynamicRange);
+            c.update(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+                    supportedColorSpaces.data(), supportedColorSpaces.size());
+        }
+    }
+
+    return OK;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
         bool maxResolution) {
     const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
@@ -1363,6 +1580,19 @@
     return res;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addAutoframingTags() {
+    status_t res = OK;
+    auto& c = mCameraCharacteristics;
+
+    auto availableAutoframingEntry = c.find(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE);
+    if (availableAutoframingEntry.count == 0) {
+        uint8_t  defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE;
+        res = c.update(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+                &defaultAutoframingEntry, 1);
+    }
+    return res;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
     status_t res = OK;
     auto& c = mCameraCharacteristics;
@@ -1408,6 +1638,27 @@
     return res;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addReadoutTimestampTag(
+        bool readoutTimestampSupported) {
+    status_t res = OK;
+    auto& c = mCameraCharacteristics;
+
+    auto entry = c.find(ANDROID_SENSOR_READOUT_TIMESTAMP);
+    if (entry.count != 0) {
+        ALOGE("%s: CameraCharacteristics must not contain ANDROID_SENSOR_READOUT_TIMESTAMP!",
+                __FUNCTION__);
+    }
+
+    uint8_t readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED;
+    if (readoutTimestampSupported) {
+        readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
+    }
+
+    res = c.update(ANDROID_SENSOR_READOUT_TIMESTAMP, &readoutTimestamp, 1);
+
+    return res;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::removeAvailableKeys(
         CameraMetadata& c, const std::vector<uint32_t>& keys, uint32_t keyTag) {
     status_t res = OK;
@@ -1845,13 +2096,12 @@
 CameraProviderManager::ProviderInfo::ProviderInfo(
         const std::string &providerName,
         const std::string &providerInstance,
-        CameraProviderManager *manager) :
+        [[maybe_unused]] CameraProviderManager *manager) :
         mProviderName(providerName),
         mProviderInstance(providerInstance),
         mProviderTagid(generateVendorTagId(providerName)),
         mUniqueDeviceCount(0),
         mManager(manager) {
-    (void) mManager;
 }
 
 const std::string& CameraProviderManager::ProviderInfo::getType() const {
@@ -1936,6 +2186,7 @@
     for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
         if ((*it)->mId == id) {
             mUniqueCameraIds.erase(id);
+            mUnavailablePhysicalCameras.erase(id);
             if ((*it)->isAPI1Compatible()) {
                 mUniqueAPI1CompatibleCameraIds.erase(std::remove(
                     mUniqueAPI1CompatibleCameraIds.begin(),
@@ -2006,7 +2257,9 @@
         dprintf(fd, "    Has a flash unit: %s\n",
                 device->hasFlashUnit() ? "true" : "false");
         hardware::CameraInfo info;
-        status_t res = device->getCameraInfo(&info);
+        int portraitRotation;
+        status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+                &info);
         if (res != OK) {
             dprintf(fd, "   <Error reading camera info: %s (%d)>\n",
                     strerror(-res), res);
@@ -2016,7 +2269,8 @@
             dprintf(fd, "    Orientation: %d\n", info.orientation);
         }
         CameraMetadata info2;
-        res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2);
+        res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
+                /*overrideToPortrait*/false);
         if (res == INVALID_OPERATION) {
             dprintf(fd, "  API2 not directly supported\n");
         } else if (res != OK) {
@@ -2203,6 +2457,15 @@
         return BAD_VALUE;
     }
 
+    if (mUnavailablePhysicalCameras.count(cameraId) == 0) {
+        mUnavailablePhysicalCameras.emplace(cameraId, std::set<std::string>{});
+    }
+    if (newStatus != CameraDeviceStatus::PRESENT) {
+        mUnavailablePhysicalCameras[cameraId].insert(physicalCameraDeviceName);
+    } else {
+        mUnavailablePhysicalCameras[cameraId].erase(physicalCameraDeviceName);
+    }
+
     *id = cameraId;
     *physicalId = physicalCameraDeviceName.c_str();
     return OK;
@@ -2261,20 +2524,6 @@
     }
 }
 
-void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
-        sp<StatusListener> listener,
-        std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
-    for (auto& statusInfo : *cachedStatus) {
-        if (statusInfo.isPhysicalCameraStatus) {
-            listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
-                    String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
-        } else {
-            listener->onDeviceStatusChanged(
-                    String8(statusInfo.cameraId.c_str()), statusInfo.status);
-        }
-    }
-}
-
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
@@ -2289,10 +2538,15 @@
             (mDeviceStateOrientationMap.find(newState) != mDeviceStateOrientationMap.end())) {
         mCameraCharacteristics.update(ANDROID_SENSOR_ORIENTATION,
                 &mDeviceStateOrientationMap[newState], 1);
+        if (mCameraCharNoPCOverride.get() != nullptr) {
+            mCameraCharNoPCOverride->update(ANDROID_SENSOR_ORIENTATION,
+                &mDeviceStateOrientationMap[newState], 1);
+        }
     }
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
+        bool overrideToPortrait, int *portraitRotation,
         hardware::CameraInfo *info) const {
     if (info == nullptr) return BAD_VALUE;
 
@@ -2323,6 +2577,17 @@
         return NAME_NOT_FOUND;
     }
 
+    if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+        *portraitRotation = 90;
+        if (info->facing == hardware::CAMERA_FACING_FRONT) {
+            info->orientation = (360 + info->orientation - 90) % 360;
+        } else {
+            info->orientation = (360 + info->orientation + 90) % 360;
+        }
+    } else {
+        *portraitRotation = 0;
+    }
+
     return OK;
 }
 bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAPI1Compatible() const {
@@ -2348,7 +2613,7 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
-        bool overrideForPerfClass, CameraMetadata *characteristics) const {
+        bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
     if (characteristics == nullptr) return BAD_VALUE;
 
     if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2357,6 +2622,37 @@
         *characteristics = mCameraCharacteristics;
     }
 
+    if (overrideToPortrait) {
+        const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
+        const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
+        uint8_t lensFacing = lensFacingEntry.data.u8[0];
+        if (lensFacingEntry.count > 0 && sensorOrientationEntry.count > 0) {
+            int32_t sensorOrientation = sensorOrientationEntry.data.i32[0];
+            int32_t newSensorOrientation = sensorOrientation;
+
+            if (sensorOrientation == 0 || sensorOrientation == 180) {
+                if (lensFacing == ANDROID_LENS_FACING_FRONT) {
+                    newSensorOrientation = (360 + sensorOrientation - 90) % 360;
+                } else if (lensFacing == ANDROID_LENS_FACING_BACK) {
+                    newSensorOrientation = (360 + sensorOrientation + 90) % 360;
+                }
+            }
+
+            if (newSensorOrientation != sensorOrientation) {
+                ALOGV("%s: Update ANDROID_SENSOR_ORIENTATION for lens facing %d "
+                        "from %d to %d", __FUNCTION__, lensFacing, sensorOrientation,
+                        newSensorOrientation);
+                characteristics->update(ANDROID_SENSOR_ORIENTATION, &newSensorOrientation, 1);
+            }
+        }
+
+        if (characteristics->exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
+            ALOGV("%s: Erasing ANDROID_INFO_DEVICE_STATE_ORIENTATIONS for lens facing %d",
+                    __FUNCTION__, lensFacing);
+            characteristics->erase(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
+        }
+    }
+
     return OK;
 }
 
@@ -2388,8 +2684,8 @@
     for (size_t i = 0; i < streamConfigs.count; i += 4) {
         if ((streamConfigs.data.i32[i] == HAL_PIXEL_FORMAT_BLOB) && (streamConfigs.data.i32[i+3] ==
                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
-            if (streamConfigs.data.i32[i+1] < thresholdW  ||
-                    streamConfigs.data.i32[i+2] < thresholdH) {
+            if (streamConfigs.data.i32[i+1] * streamConfigs.data.i32[i+2] <
+                    thresholdW * thresholdH) {
                 continue;
             } else {
                 largeJpegCount ++;
@@ -2409,8 +2705,8 @@
             mCameraCharacteristics.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
     for (size_t i = 0; i < minDurations.count; i += 4) {
         if (minDurations.data.i64[i] == HAL_PIXEL_FORMAT_BLOB) {
-            if (minDurations.data.i64[i+1] < thresholdW ||
-                    minDurations.data.i64[i+2] < thresholdH) {
+            if ((int32_t)minDurations.data.i64[i+1] * (int32_t)minDurations.data.i64[i+2] <
+                    thresholdW * thresholdH) {
                 continue;
             } else {
                 largeJpegCount++;
@@ -2430,8 +2726,8 @@
             mCameraCharacteristics.find(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
     for (size_t i = 0; i < stallDurations.count; i += 4) {
         if (stallDurations.data.i64[i] == HAL_PIXEL_FORMAT_BLOB) {
-            if (stallDurations.data.i64[i+1] < thresholdW ||
-                    stallDurations.data.i64[i+2] < thresholdH) {
+            if ((int32_t)stallDurations.data.i64[i+1] * (int32_t)stallDurations.data.i64[i+2] <
+                    thresholdW * thresholdH) {
                 continue;
             } else {
                 largeJpegCount++;
@@ -2624,9 +2920,6 @@
 }
 
 CameraProviderManager::ProviderInfo::~ProviderInfo() {
-    if (mInitialStatusCallbackFuture.valid()) {
-        mInitialStatusCallbackFuture.wait();
-    }
     // Destruction of ProviderInfo is only supposed to happen when the respective
     // CameraProvider interface dies, so do not unregister callbacks.
 }
@@ -2689,10 +2982,12 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
-        bool overrideForPerfClass, CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics,
+        bool overrideToPortrait) const {
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
-        return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics);
+        return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
+                overrideToPortrait);
     }
 
     // Find hidden physical camera characteristics
@@ -2727,7 +3022,9 @@
         combo.push_back(deviceId);
 
         hardware::CameraInfo info;
-        status_t res = deviceInfo->getCameraInfo(&info);
+        int portraitRotation;
+        status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+                &info);
         if (res != OK) {
             ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
             continue;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index a66598d..e6e4619 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -23,7 +23,6 @@
 #include <set>
 #include <string>
 #include <mutex>
-#include <future>
 
 #include <camera/camera2/ConcurrentCamera.h>
 #include <camera/CameraParameters2.h>
@@ -220,7 +219,14 @@
      */
     std::pair<int, int> getCameraCount() const;
 
-    std::vector<std::string> getCameraDeviceIds() const;
+    /**
+     * Upon the function return, if unavailablePhysicalIds is not nullptr, it
+     * will contain all of the unavailable physical camera Ids represented in
+     * the form of:
+     * {[logicalCamera, {physicalCamera1, physicalCamera2, ...}], ...}.
+     */
+    std::vector<std::string> getCameraDeviceIds(std::unordered_map<
+            std::string, std::set<std::string>>* unavailablePhysicalIds = nullptr) const;
 
     /**
      * Retrieve the number of API1 compatible cameras; these are internal and
@@ -242,6 +248,11 @@
     bool supportNativeZoomRatio(const std::string &id) const;
 
     /**
+     * Return true if the camera device has no composite Jpeg/R support.
+     */
+    bool isCompositeJpegRDisabled(const std::string &id) const;
+
+    /**
      * Return the resource cost of this camera device
      */
     status_t getResourceCost(const std::string &id,
@@ -251,14 +262,15 @@
      * Return the old camera API camera info
      */
     status_t getCameraInfo(const std::string &id,
-            hardware::CameraInfo* info) const;
+            bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
 
     /**
      * Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
      * not have a v3 or newer HAL version.
      */
     status_t getCameraCharacteristics(const std::string &id,
-            bool overrideForPerfClass, CameraMetadata* characteristics) const;
+            bool overrideForPerfClass, CameraMetadata* characteristics,
+            bool overrideToPortrait) const;
 
     status_t isConcurrentSessionConfigurationSupported(
             const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -400,7 +412,11 @@
 
     status_t notifyUsbDeviceEvent(int32_t eventId, const std::string &usbDeviceId);
 
+    static bool isConcurrentDynamicRangeCaptureSupported(const CameraMetadata& deviceInfo,
+            int64_t profile, int64_t concurrentProfile);
+
     static const float kDepthARTolerance;
+    static const bool kFrameworkJpegRDisabled;
 private:
     // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
     mutable std::mutex mInterfaceMutex;
@@ -557,22 +573,24 @@
 
             bool hasFlashUnit() const { return mHasFlashUnit; }
             bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
+            bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
+            virtual status_t getCameraInfo(bool overrideToPortrait,
+                    int *portraitRotation,
+                    hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
             virtual status_t dumpState(int fd) = 0;
-            virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
-                    CameraMetadata *characteristics) const {
-                (void) overrideForPerfClass;
-                (void) characteristics;
+            virtual status_t getCameraCharacteristics(
+                    [[maybe_unused]] bool overrideForPerfClass,
+                    [[maybe_unused]] CameraMetadata *characteristics,
+                    [[maybe_unused]] bool overrideToPortrait) {
                 return INVALID_OPERATION;
             }
-            virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
-                    CameraMetadata *characteristics) const {
-                (void) physicalCameraId;
-                (void) characteristics;
+            virtual status_t getPhysicalCameraCharacteristics(
+                    [[maybe_unused]] const std::string& physicalCameraId,
+                    [[maybe_unused]] CameraMetadata *characteristics) const {
                 return INVALID_OPERATION;
             }
 
@@ -597,16 +615,18 @@
                     mParentProvider(parentProvider), mTorchStrengthLevel(0),
                     mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
                     mHasFlashUnit(false), mSupportNativeZoomRatio(false),
-                    mPublicCameraIds(publicCameraIds) {}
+                    mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false) {}
             virtual ~DeviceInfo() {}
         protected:
 
             bool mHasFlashUnit; // const after constructor
             bool mSupportNativeZoomRatio; // const after constructor
             const std::vector<std::string>& mPublicCameraIds;
+            bool mCompositeJpegRDisabled;
         };
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
         std::unordered_set<std::string> mUniqueCameraIds;
+        std::unordered_map<std::string, std::set<std::string>> mUnavailablePhysicalCameras;
         int mUniqueDeviceCount;
         std::vector<std::string> mUniqueAPI1CompatibleCameraIds;
         // The initial public camera IDs published by the camera provider.
@@ -622,12 +642,15 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
+            virtual status_t getCameraInfo(bool overrideToPortrait,
+                    int *portraitRotation,
+                    hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
             virtual status_t dumpState(int fd) = 0;
             virtual status_t getCameraCharacteristics(
                     bool overrideForPerfClass,
-                    CameraMetadata *characteristics) const override;
+                    CameraMetadata *characteristics,
+                    bool overrideToPortrait) override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t isSessionConfigurationSupported(
@@ -661,8 +684,11 @@
             status_t fixupTorchStrengthTags();
             status_t addDynamicDepthTags(bool maxResolution = false);
             status_t deriveHeicTags(bool maxResolution = false);
+            status_t deriveJpegRTags(bool maxResolution = false);
             status_t addRotateCropTags();
+            status_t addAutoframingTags();
             status_t addPreCorrectionActiveArraySize();
+            status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
 
             static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
                     android_pixel_format_t format,
@@ -714,8 +740,6 @@
         std::vector<CameraStatusInfoT> mCachedStatus;
         // End of scope for mInitLock
 
-        std::future<void> mInitialStatusCallbackFuture;
-
         std::unique_ptr<ProviderInfo::DeviceInfo>
         virtual initializeDeviceInfo(
                 const std::string &name, const metadata_vendor_id_t tagId,
@@ -723,9 +747,6 @@
 
         virtual status_t reCacheConcurrentStreamingCameraIdsLocked() = 0;
 
-        void notifyInitialStatusChange(sp<StatusListener> listener,
-                std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
-
         std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
 
         // Parse provider instance name for type and id
@@ -790,6 +811,8 @@
     // No guarantees on the order of traversal
     ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
 
+    bool isCompositeJpegRDisabledLocked(const std::string &id) const;
+
     // Map external providers to USB devices in order to handle USB hotplug
     // events for lazy HALs
     std::pair<std::vector<std::string>, sp<ProviderInfo>>
@@ -829,7 +852,7 @@
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
     status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
-            CameraMetadata* characteristics) const;
+            CameraMetadata* characteristics, bool overrideToPortrait) const;
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
     status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 81b4779..b18cbd4 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -293,7 +293,7 @@
             if (link != STATUS_OK) {
                 ALOGW("%s: Unable to link to provider '%s' death notifications",
                         __FUNCTION__, mProviderName.c_str());
-                mManager->removeProvider(mProviderName);
+                mManager->removeProvider(mProviderInstance);
                 return nullptr;
             }
 
@@ -483,6 +483,9 @@
         }
     }
 
+    mCompositeJpegRDisabled = mCameraCharacteristics.exists(
+            ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+
     mSystemCameraKind = getSystemCameraKind();
 
     status_t res = fixupMonochromeTags();
@@ -501,8 +504,13 @@
         ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
-
-    if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+    res = deriveJpegRTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
+    using camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture;
+    if (supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
         status_t status = addDynamicDepthTags(/*maxResolution*/true);
         if (OK != status) {
             ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -514,6 +522,12 @@
             ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
                     "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
         }
+
+        status = deriveJpegRTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
     }
 
     res = addRotateCropTags();
@@ -521,6 +535,11 @@
         ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
     }
+    res = addAutoframingTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
     res = addPreCorrectionActiveArraySize();
     if (OK != res) {
         ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
@@ -532,6 +551,11 @@
         ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
+    res = addReadoutTimestampTag();
+    if (OK != res) {
+        ALOGE("%s: Unable to add sensorReadoutTimestamp tag: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
 
     camera_metadata_entry flashAvailable =
             mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
@@ -545,6 +569,11 @@
                     "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
                     strerror(-res), res);
         }
+
+        // b/247038031: In case of system_server crash, camera_server is
+        // restarted as well. If flashlight is turned on before the crash, it
+        // may be stuck to be on. As a workaround, set torch mode to be OFF.
+        interface->setTorchMode(false);
     } else {
         mHasFlashUnit = false;
     }
@@ -706,8 +735,8 @@
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
-            String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
-            streamConfiguration, overrideForPerfClass, &earlyExit);
+            String8(mId.c_str()), mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
+            mPhysicalIds, streamConfiguration, overrideForPerfClass, &earlyExit);
 
     if (!bRes.isOk()) {
         return UNKNOWN_ERROR;
@@ -754,7 +783,8 @@
         bool overrideForPerfClass =
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+                /*overrideToPortrait*/false);
         if (res != OK) {
             return res;
         }
@@ -762,7 +792,8 @@
                 [this](const String8 &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
-                                                   &physicalDeviceInfo);
+                                                   &physicalDeviceInfo,
+                                                   /*overrideToPortrait*/false);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
@@ -770,7 +801,8 @@
         bStatus =
             SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
-                    String8(cameraId.c_str()), deviceInfo, getMetadata,
+                    String8(cameraId.c_str()), deviceInfo,
+                    mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
                     physicalCameraIds, streamConfiguration,
                     overrideForPerfClass, &shouldExit);
         if (!bStatus.isOk()) {
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index bded9aa..8ff5c3f 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -388,7 +388,7 @@
                   __FUNCTION__,
                   mProviderName.c_str(),
                   linked.description().c_str());
-              mManager->removeProvider(mProviderName);
+              mManager->removeProvider(mProviderInstance);
               return nullptr;
             } else if (!linked) {
               ALOGW("%s: Unable to link to provider '%s' death notifications",
@@ -442,8 +442,7 @@
 }
 
 void HidlProviderInfo::serviceDied(uint64_t cookie,
-        const wp<hidl::base::V1_0::IBase>& who) {
-    (void) who;
+        [[maybe_unused]] const wp<hidl::base::V1_0::IBase>& who) {
     ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
     if (cookie != mId) {
         ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
@@ -625,7 +624,7 @@
                 __FUNCTION__, strerror(-res), res);
     }
 
-    if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+    if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
         status_t status = addDynamicDepthTags(/*maxResolution*/true);
         if (OK != status) {
             ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -644,6 +643,11 @@
         ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
     }
+    res = addAutoframingTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
     res = addPreCorrectionActiveArraySize();
     if (OK != res) {
         ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
@@ -655,6 +659,11 @@
         ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
     }
+    res = addReadoutTimestampTag(/*readoutTimestampSupported*/false);
+    if (OK != res) {
+        ALOGE("%s: Unable to add sensorReadoutTimestamp tag: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
 
     camera_metadata_entry flashAvailable =
             mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
@@ -688,6 +697,11 @@
 
     mTorchStrengthLevel = 0;
 
+    if (!kEnableLazyHal) {
+        // Save HAL reference indefinitely
+        mSavedInterface = interface;
+    }
+
     queryPhysicalCameraIds();
 
     // Get physical camera characteristics if applicable
@@ -748,13 +762,6 @@
             }
         }
     }
-
-    if (!kEnableLazyHal) {
-        // Save HAL reference indefinitely
-        mSavedInterface = interface;
-    }
-
-
 }
 
 status_t HidlProviderInfo::HidlDeviceInfo3::setTorchMode(bool enabled) {
@@ -914,7 +921,8 @@
         bool overrideForPerfClass =
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+                /*overrideToPortrait*/false);
         if (res != OK) {
             return res;
         }
@@ -922,7 +930,7 @@
                 [this](const String8 &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
-                                                   &physicalDeviceInfo);
+                            &physicalDeviceInfo, /*overrideToPortrait*/false);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index a556200..2ac38d5 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -451,10 +451,9 @@
     return OK;
 }
 
-void Camera3BufferManager::dump(int fd, const Vector<String16>& args) const {
+void Camera3BufferManager::dump(int fd, [[maybe_unused]] const Vector<String16>& args) const {
     Mutex::Autolock l(mLock);
 
-    (void) args;
     String8 lines;
     lines.appendFormat("      Total stream sets: %zu\n", mStreamSetMap.size());
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 7c2f34f..61c3298 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -52,18 +52,19 @@
 #include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
 
-#include "utils/CameraTraces.h"
-#include "mediautils/SchedulingPolicyService.h"
-#include "device3/Camera3Device.h"
-#include "device3/Camera3OutputStream.h"
-#include "device3/Camera3InputStream.h"
-#include "device3/Camera3FakeStream.h"
-#include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "aidl/AidlUtils.h"
+#include "device3/Camera3Device.h"
+#include "device3/Camera3FakeStream.h"
+#include "device3/Camera3InputStream.h"
+#include "device3/Camera3OutputStream.h"
+#include "device3/Camera3SharedOutputStream.h"
+#include "mediautils/SchedulingPolicyService.h"
 #include "utils/CameraThreadState.h"
+#include "utils/CameraTraces.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
-#include "utils/CameraServiceProxyWrapper.h"
 
 #include <algorithm>
 #include <tuple>
@@ -73,11 +74,14 @@
 
 namespace android {
 
-Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool legacyClient):
+Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8 &id, bool overrideForPerfClass, bool overrideToPortrait, bool legacyClient):
+        mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mId(id),
         mLegacyClient(legacyClient),
         mOperatingMode(NO_MODE),
         mIsConstrainedHighSpeedConfiguration(false),
+        mIsCompositeJpegRDisabled(false),
         mStatus(STATUS_UNINITIALIZED),
         mStatusWaiters(0),
         mUsePartialResult(false),
@@ -94,7 +98,13 @@
         mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
         mLastTemplateId(-1),
         mNeedFixupMonochromeTags(false),
-        mOverrideForPerfClass(overrideForPerfClass)
+        mOverrideForPerfClass(overrideForPerfClass),
+        mOverrideToPortrait(overrideToPortrait),
+        mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+        mComposerOutput(false),
+        mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
+        mSettingsOverride(-1),
+        mActivePhysicalId("")
 {
     ATRACE_CALL();
     ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
@@ -113,10 +123,6 @@
 
 status_t Camera3Device::initializeCommonLocked() {
 
-    /** Start watchdog thread */
-    mCameraServiceWatchdog = new CameraServiceWatchdog();
-    mCameraServiceWatchdog->run("CameraServiceWatchdog");
-
     /** Start up status tracker thread */
     mStatusTracker = new StatusTracker(this);
     status_t res = mStatusTracker->run(String8::format("C3Dev-%s-Status", mId.string()).string());
@@ -167,10 +173,21 @@
         }
     }
 
+    camera_metadata_entry_t availableSettingsOverrides = mDeviceInfo.find(
+            ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES);
+    for (size_t i = 0; i < availableSettingsOverrides.count; i++) {
+        if (availableSettingsOverrides.data.i32[i] ==
+                ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
+            mSupportZoomOverride = true;
+            break;
+        }
+    }
+
     /** Start up request queue thread */
     mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
-            mUseHalBufManager, mSupportCameraMute);
+            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+            mSupportZoomOverride);
     res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -218,7 +235,7 @@
     mZoomRatioMappers[mId.c_str()] = ZoomRatioMapper(&mDeviceInfo,
             mSupportNativeZoomRatio, usePrecorrectArray);
 
-    if (SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo)) {
+    if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mDeviceInfo)) {
         mUHRCropAndMeteringRegionMappers[mId.c_str()] =
                 UHRCropAndMeteringRegionMapper(mDeviceInfo, usePrecorrectArray);
     }
@@ -230,6 +247,15 @@
     // Hidl/AidlCamera3DeviceInjectionMethods
     mInjectionMethods = createCamera3DeviceInjectionMethods(this);
 
+    /** Start watchdog thread */
+    mCameraServiceWatchdog = new CameraServiceWatchdog(mId, mCameraServiceProxyWrapper);
+    res = mCameraServiceWatchdog->run("CameraServiceWatchdog");
+    if (res != OK) {
+        SET_ERR_L("Unable to start camera service watchdog thread: %s (%d)",
+                strerror(-res), res);
+        return res;
+    }
+
     return OK;
 }
 
@@ -239,25 +265,26 @@
 
 status_t Camera3Device::disconnectImpl() {
     ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+
     ALOGI("%s: E", __FUNCTION__);
 
     status_t res = OK;
     std::vector<wp<Camera3StreamInterface>> streams;
+    nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     {
-        Mutex::Autolock il(mInterfaceLock);
-        nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
-        {
-            Mutex::Autolock l(mLock);
-            if (mStatus == STATUS_UNINITIALIZED) return res;
+        Mutex::Autolock l(mLock);
+        if (mStatus == STATUS_UNINITIALIZED) return res;
 
-            if (mStatus == STATUS_ACTIVE ||
-                    (mStatus == STATUS_ERROR && mRequestThread != NULL)) {
-                res = mRequestThread->clearRepeatingRequests();
+        if (mRequestThread != NULL) {
+            if (mStatus == STATUS_ACTIVE || mStatus == STATUS_ERROR) {
+                res = mRequestThread->clear();
                 if (res != OK) {
                     SET_ERR_L("Can't stop streaming");
                     // Continue to close device even in case of error
                 } else {
-                    res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+                    res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                                  /*requestThreadInvocation*/ false);
                     if (res != OK) {
                         SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
                                 maxExpectedDuration);
@@ -265,79 +292,83 @@
                     }
                 }
             }
+            // Signal to request thread that we're not expecting any
+            // more requests. This will be true since once we're in
+            // disconnect and we've cleared off the request queue, the
+            // request thread can't receive any new requests through
+            // binder calls - since disconnect holds
+            // mBinderSerialization lock.
+            mRequestThread->setRequestClearing();
+        }
 
-            if (mStatus == STATUS_ERROR) {
-                CLOGE("Shutting down in an error state");
-            }
+        if (mStatus == STATUS_ERROR) {
+            CLOGE("Shutting down in an error state");
+        }
 
-            if (mStatusTracker != NULL) {
-                mStatusTracker->requestExit();
-            }
+        if (mStatusTracker != NULL) {
+            mStatusTracker->requestExit();
+        }
 
-            if (mRequestThread != NULL) {
-                mRequestThread->requestExit();
-            }
+        if (mRequestThread != NULL) {
+            mRequestThread->requestExit();
+        }
 
-            streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
-            for (size_t i = 0; i < mOutputStreams.size(); i++) {
-                streams.push_back(mOutputStreams[i]);
-            }
-            if (mInputStream != nullptr) {
-                streams.push_back(mInputStream);
-            }
+        streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+        for (size_t i = 0; i < mOutputStreams.size(); i++) {
+            streams.push_back(mOutputStreams[i]);
+        }
+        if (mInputStream != nullptr) {
+            streams.push_back(mInputStream);
         }
     }
-    // Joining done without holding mLock and mInterfaceLock, otherwise deadlocks may ensue
-    // as the threads try to access parent state (b/143513518)
+
+    // Joining done without holding mLock, otherwise deadlocks may ensue
+    // as the threads try to access parent state
     if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
         // HAL may be in a bad state, so waiting for request thread
         // (which may be stuck in the HAL processCaptureRequest call)
         // could be dangerous.
-        // give up mInterfaceLock here and then lock it again. Could this lead
-        // to other deadlocks
         mRequestThread->join();
     }
+
+    if (mStatusTracker != NULL) {
+        mStatusTracker->join();
+    }
+
+    if (mInjectionMethods->isInjecting()) {
+        mInjectionMethods->stopInjection();
+    }
+
+    HalInterface* interface;
     {
-        Mutex::Autolock il(mInterfaceLock);
-        if (mStatusTracker != NULL) {
-            mStatusTracker->join();
-        }
+        Mutex::Autolock l(mLock);
+        mRequestThread.clear();
+        Mutex::Autolock stLock(mTrackerLock);
+        mStatusTracker.clear();
+        interface = mInterface.get();
+    }
 
-        if (mInjectionMethods->isInjecting()) {
-            mInjectionMethods->stopInjection();
-        }
+    // Call close without internal mutex held, as the HAL close may need to
+    // wait on assorted callbacks,etc, to complete before it can return.
+    mCameraServiceWatchdog->WATCH(interface->close());
 
-        HalInterface* interface;
-        {
-            Mutex::Autolock l(mLock);
-            mRequestThread.clear();
-            Mutex::Autolock stLock(mTrackerLock);
-            mStatusTracker.clear();
-            interface = mInterface.get();
-        }
+    flushInflightRequests();
 
-        // Call close without internal mutex held, as the HAL close may need to
-        // wait on assorted callbacks,etc, to complete before it can return.
-        mCameraServiceWatchdog->WATCH(interface->close());
+    {
+        Mutex::Autolock l(mLock);
+        mInterface->clear();
+        mOutputStreams.clear();
+        mInputStream.clear();
+        mDeletedStreams.clear();
+        mBufferManager.clear();
+        internalUpdateStatusLocked(STATUS_UNINITIALIZED);
+    }
 
-        flushInflightRequests();
-
-        {
-            Mutex::Autolock l(mLock);
-            mInterface->clear();
-            mOutputStreams.clear();
-            mInputStream.clear();
-            mDeletedStreams.clear();
-            mBufferManager.clear();
-            internalUpdateStatusLocked(STATUS_UNINITIALIZED);
-        }
-
-        for (auto& weakStream : streams) {
-              sp<Camera3StreamInterface> stream = weakStream.promote();
-            if (stream != nullptr) {
-                ALOGE("%s: Stream %d leaked! strong reference (%d)!",
-                        __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
-            }
+    for (auto& weakStream : streams) {
+        sp<Camera3StreamInterface> stream = weakStream.promote();
+        if (stream != nullptr) {
+            ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+                    __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
         }
     }
     ALOGI("%s: X", __FUNCTION__);
@@ -389,7 +420,7 @@
     // Get max jpeg size (area-wise) for default sensor pixel mode
     camera3::Size maxDefaultJpegResolution =
             SessionConfigurationUtils::getMaxJpegResolution(info,
-                    /*isUltraHighResolutionSensor*/false);
+                    /*supportsUltraHighResolutionCapture*/false);
     // Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
     // not ultra high res sensor
     camera3::Size uhrMaxJpegResolution =
@@ -482,9 +513,8 @@
     return BAD_VALUE;
 }
 
-status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
+status_t Camera3Device::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     ATRACE_CALL();
-    (void)args;
 
     // Try to lock, but continue in case of failure (to avoid blocking in
     // deadlocks)
@@ -810,7 +840,7 @@
     }
 
     if (res == OK) {
-        waitUntilStateThenRelock(/*active*/true, kActiveTimeout);
+        waitUntilStateThenRelock(/*active*/true, kActiveTimeout, /*requestThreadInvocation*/false);
         if (res != OK) {
             SET_ERR_L("Can't transition to active in %f seconds!",
                     kActiveTimeout/1e9);
@@ -935,7 +965,8 @@
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            res = internalPauseAndWaitLocked(maxExpectedDuration);
+            res = internalPauseAndWaitLocked(maxExpectedDuration,
+                          /*requestThreadInvocation*/ false);
             if (res != OK) {
                 SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
@@ -986,7 +1017,7 @@
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
             uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
-            int timestampBase, int mirrorMode) {
+            int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1000,7 +1031,7 @@
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
             format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
             streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
-            streamUseCase, timestampBase, mirrorMode);
+            streamUseCase, timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
 }
 
 static bool isRawFormat(int format) {
@@ -1021,7 +1052,7 @@
         const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
         uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
-        int timestampBase, int mirrorMode) {
+        int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
     ATRACE_CALL();
 
     Mutex::Autolock il(mInterfaceLock);
@@ -1030,10 +1061,11 @@
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
             " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
             " dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
-            " mirrorMode %d",
+            " mirrorMode %d, colorSpace %d, useReadoutTimestamp %d",
             mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
             consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
-            dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
+            dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
+            useReadoutTimestamp);
 
     status_t res;
     bool wasActive = false;
@@ -1051,7 +1083,8 @@
             break;
         case STATUS_ACTIVE:
             ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
-            res = internalPauseAndWaitLocked(maxExpectedDuration);
+            res = internalPauseAndWaitLocked(maxExpectedDuration,
+                          /*requestThreadInvocation*/ false);
             if (res != OK) {
                 SET_ERR_L("Can't pause captures to reconfigure streams!");
                 return res;
@@ -1104,7 +1137,7 @@
                 width, height, blobBufferSize, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
     } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
         bool maxResolution =
                 sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1119,25 +1152,25 @@
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
     } else if (isShared) {
         newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 width, height, format, consumerUsage, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
     } else if (consumers.size() == 0 && hasDeferredConsumer) {
         newStream = new Camera3OutputStream(mNextStreamId,
                 width, height, format, consumerUsage, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, format, dataSpace, rotation,
                 mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
                 isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
-                timestampBase, mirrorMode);
+                timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
     }
 
     size_t consumerCount = consumers.size();
@@ -1225,6 +1258,7 @@
     streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
     streamInfo->originalDataSpace = stream->getOriginalDataSpace();
     streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
+    streamInfo->colorSpace = stream->getColorSpace();
     return OK;
 }
 
@@ -1345,12 +1379,49 @@
     set_camera_metadata_vendor_id(meta, mVendorTagId);
     filteredParams.unlock(meta);
     if (availableSessionKeys.count > 0) {
+        bool rotateAndCropSessionKey = false;
+        bool autoframingSessionKey = false;
         for (size_t i = 0; i < availableSessionKeys.count; i++) {
             camera_metadata_ro_entry entry = params.find(
                     availableSessionKeys.data.i32[i]);
             if (entry.count > 0) {
                 filteredParams.update(entry);
             }
+            if (ANDROID_SCALER_ROTATE_AND_CROP == availableSessionKeys.data.i32[i]) {
+                rotateAndCropSessionKey = true;
+            }
+            if (ANDROID_CONTROL_AUTOFRAMING == availableSessionKeys.data.i32[i]) {
+                autoframingSessionKey = true;
+            }
+        }
+
+        if (rotateAndCropSessionKey || autoframingSessionKey) {
+            sp<CaptureRequest> request = new CaptureRequest();
+            PhysicalCameraSettings settingsList;
+            settingsList.metadata = filteredParams;
+            request->mSettingsList.push_back(settingsList);
+
+            if (rotateAndCropSessionKey) {
+                auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
+                if (rotateAndCropEntry.count > 0 &&
+                        rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+                    request->mRotateAndCropAuto = true;
+                } else {
+                    request->mRotateAndCropAuto = false;
+                }
+
+                overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+            }
+
+            if (autoframingSessionKey) {
+                auto autoframingEntry = filteredParams.find(ANDROID_CONTROL_AUTOFRAMING);
+                if (autoframingEntry.count > 0 &&
+                        autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+                    overrideAutoframing(request, mAutoframingOverride);
+                }
+            }
+
+            filteredParams = request->mSettingsList.begin()->metadata;
         }
     }
 
@@ -1446,6 +1517,13 @@
                     &kDefaultJpegQuality, 1);
         }
 
+        // Fill in AUTOFRAMING if not available
+        if (!mRequestTemplateCache[templateId].exists(ANDROID_CONTROL_AUTOFRAMING)) {
+            static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
+            mRequestTemplateCache[templateId].update(ANDROID_CONTROL_AUTOFRAMING,
+                    &kDefaultAutoframingMode, 1);
+        }
+
         *request = mRequestTemplateCache[templateId];
         mLastTemplateId = templateId;
     }
@@ -1479,7 +1557,8 @@
     }
     ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.string(),
             maxExpectedDuration);
-    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                           /*requestThreadInvocation*/ false);
     if (res != OK) {
         mStatusTracker->dumpActiveComponents();
         SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
@@ -1490,12 +1569,14 @@
 
 void Camera3Device::internalUpdateStatusLocked(Status status) {
     mStatus = status;
-    mRecentStatusUpdates.add(mStatus);
+    mStatusIsInternal = mPauseStateNotify ? true : false;
+    mRecentStatusUpdates.add({mStatus, mStatusIsInternal});
     mStatusChanged.broadcast();
 }
 
 // Pause to reconfigure
-status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
+status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+        bool requestThreadInvocation) {
     if (mRequestThread.get() != nullptr) {
         mRequestThread->setPaused(true);
     } else {
@@ -1504,8 +1585,10 @@
 
     ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
           maxExpectedDuration);
-    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+    status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                           requestThreadInvocation);
     if (res != OK) {
+        mStatusTracker->dumpActiveComponents();
         SET_ERR_L("Can't idle device in %f seconds!",
                 maxExpectedDuration/1e9);
     }
@@ -1521,7 +1604,9 @@
 
     ALOGV("%s: Camera %s: Internal wait until active (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
             kActiveTimeout);
-    res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+    // internalResumeLocked is always called from a binder thread.
+    res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout,
+                  /*requestThreadInvocation*/ false);
     if (res != OK) {
         SET_ERR_L("Can't transition to active in %f seconds!",
                 kActiveTimeout/1e9);
@@ -1530,7 +1615,8 @@
     return OK;
 }
 
-status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout) {
+status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout,
+        bool requestThreadInvocation) {
     status_t res = OK;
 
     size_t startIndex = 0;
@@ -1559,7 +1645,12 @@
     bool stateSeen = false;
     nsecs_t startTime = systemTime();
     do {
-        if (active == (mStatus == STATUS_ACTIVE)) {
+        if (mStatus == STATUS_ERROR) {
+            // Device in error state. Return right away.
+            break;
+        }
+        if (active == (mStatus == STATUS_ACTIVE) &&
+            (requestThreadInvocation || !mStatusIsInternal)) {
             // Desired state is current
             break;
         }
@@ -1581,9 +1672,19 @@
                 "%s: Skipping status updates in Camera3Device, may result in deadlock.",
                 __FUNCTION__);
 
-        // Encountered desired state since we began waiting
+        // Encountered desired state since we began waiting. Internal invocations coming from
+        // request threads (such as reconfigureCamera) should be woken up immediately, whereas
+        // invocations from binder threads (such as createInputStream) should only be woken up if
+        // they are not paused. This avoids intermediate pause signals from reconfigureCamera as it
+        // changes the status to active right after.
         for (size_t i = startIndex; i < mRecentStatusUpdates.size(); i++) {
-            if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) {
+            if (mRecentStatusUpdates[i].status == STATUS_ERROR) {
+                // Device in error state. Return right away.
+                stateSeen = true;
+                break;
+            }
+            if (active == (mRecentStatusUpdates[i].status == STATUS_ACTIVE) &&
+                (requestThreadInvocation || !mRecentStatusUpdates[i].isInternal)) {
                 stateSeen = true;
                 break;
             }
@@ -1737,7 +1838,7 @@
     }
 
     // Calculate expected duration for flush with additional buffer time in ms for watchdog
-    uint64_t maxExpectedDuration = (getExpectedInFlightDuration() + kBaseGetBufferWait) / 1e6;
+    uint64_t maxExpectedDuration = ns2ms(getExpectedInFlightDuration() + kBaseGetBufferWait);
     status_t res = mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(mRequestThread->flush(),
             maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
 
@@ -1814,7 +1915,7 @@
     camera_metadata_entry minDurations =
             mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
     for (size_t i = 0; i < minDurations.count; i += 4) {
-        if (minDurations.data.i64[i] == stream->getFormat()
+        if (minDurations.data.i64[i] == stream->getOriginalFormat()
                 && minDurations.data.i64[i+1] == stream->getWidth()
                 && minDurations.data.i64[i+2] == stream->getHeight()) {
             int64_t minFrameDuration = minDurations.data.i64[i+3];
@@ -1870,10 +1971,11 @@
                     streamUseCase = camera3Stream->getStreamUseCase();
                 }
                 streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
-                    stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
+                    stream->getOriginalFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
                     stream->getMaxHalBuffers(),
                     stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
-                    stream->getDynamicRangeProfile(), streamUseCase);
+                    stream->getDynamicRangeProfile(), streamUseCase,
+                    stream->getColorSpace());
             }
         }
     }
@@ -2123,6 +2225,15 @@
         newRequest->mRotateAndCropAuto = false;
     }
 
+    auto autoframingEntry =
+            newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+    if (autoframingEntry.count > 0 &&
+            autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+        newRequest->mAutoframingAuto = true;
+    } else {
+        newRequest->mAutoframingAuto = false;
+    }
+
     auto zoomRatioEntry =
             newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
     if (zoomRatioEntry.count > 0 &&
@@ -2154,6 +2265,16 @@
         }
     }
 
+    if (mSupportZoomOverride) {
+        for (auto& settings : newRequest->mSettingsList) {
+            auto settingsOverrideEntry =
+                    settings.metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+            settings.mOriginalSettingsOverride = settingsOverrideEntry.count > 0 ?
+                    settingsOverrideEntry.data.i32[0] :
+                    ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
+        }
+    }
+
     return newRequest;
 }
 
@@ -2210,7 +2331,9 @@
 
     nsecs_t startTime = systemTime();
 
-    Mutex::Autolock il(mInterfaceLock);
+    // We must not hold mInterfaceLock here since this function is called from
+    // RequestThread::threadLoop and holding mInterfaceLock could lead to
+    // deadlocks (http://b/143513518)
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
 
     Mutex::Autolock l(mLock);
@@ -2227,7 +2350,16 @@
         mPauseStateNotify = true;
         mStatusTracker->markComponentIdle(clientStatusId, Fence::NO_FENCE);
 
-        rc = internalPauseAndWaitLocked(maxExpectedDuration);
+        // This is essentially the same as calling rc = internalPauseAndWaitLocked(..), except that
+        // we don't want to call setPaused(true) to avoid it interfering with setPaused() called
+        // from createInputStream/createStream.
+        rc = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+                /*requestThreadInvocation*/ true);
+        if (rc != OK) {
+            mStatusTracker->dumpActiveComponents();
+            SET_ERR_L("Can't idle device in %f seconds!",
+                maxExpectedDuration/1e9);
+        }
     }
 
     if (rc == NO_ERROR) {
@@ -2242,6 +2374,9 @@
             //present streams end up with outstanding buffers that will
             //not get drained.
             internalUpdateStatusLocked(STATUS_ACTIVE);
+
+            mCameraServiceProxyWrapper->logStreamConfigured(mId, mOperatingMode,
+                    true /*internalReconfig*/, ns2ms(systemTime() - startTime));
         } else if (rc == DEAD_OBJECT) {
             // DEAD_OBJECT can be returned if either the consumer surface is
             // abandoned, or the HAL has died.
@@ -2257,9 +2392,6 @@
         ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
     }
 
-    CameraServiceProxyWrapper::logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
-        ns2ms(systemTime() - startTime));
-
     if (markClientActive) {
         mStatusTracker->markComponentActive(clientStatusId);
     }
@@ -2330,6 +2462,9 @@
         tryRemoveFakeStreamLocked();
     }
 
+    // Override stream use case based on "adb shell command"
+    overrideStreamUseCaseLocked();
+
     // Start configuring the streams
     ALOGV("%s: Camera %s: Starting stream configuration", __FUNCTION__, mId.string());
 
@@ -2359,7 +2494,7 @@
     }
 
     mGroupIdPhysicalCameraMap.clear();
-    bool composerSurfacePresent = false;
+    mComposerOutput = false;
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
 
         // Don't configure bidi streams twice, nor add them twice to the list
@@ -2382,7 +2517,10 @@
         if (outputStream->format == HAL_PIXEL_FORMAT_BLOB) {
             size_t k = i + ((mInputStream != nullptr) ? 1 : 0); // Input stream if present should
                                                                 // always occupy the initial entry.
-            if (outputStream->data_space == HAL_DATASPACE_V0_JFIF) {
+            if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
+                    (outputStream->data_space ==
+                     static_cast<android_dataspace_t>(
+                         aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
                 bufferSizes[k] = static_cast<uint32_t>(
                         getJpegBufferSize(infoPhysical(String8(outputStream->physical_camera_id)),
                                 outputStream->width, outputStream->height));
@@ -2402,7 +2540,7 @@
         }
 
         if (outputStream->usage & GraphicBuffer::USAGE_HW_COMPOSER) {
-            composerSurfacePresent = true;
+            mComposerOutput = true;
         }
     }
 
@@ -2412,8 +2550,9 @@
     // max_buffers, usage, and priv fields, as well as data_space and format
     // fields for IMPLEMENTATION_DEFINED formats.
 
+    int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
     const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
-    res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes);
+    res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes, logId);
     sessionParams.unlock(sessionBuffer);
 
     if (res == BAD_VALUE) {
@@ -2471,7 +2610,7 @@
         }
     }
 
-    mRequestThread->setComposerSurface(composerSurfacePresent);
+    mRequestThread->setComposerSurface(mComposerOutput);
 
     // Request thread needs to know to avoid using repeat-last-settings protocol
     // across configure_streams() calls
@@ -2674,8 +2813,8 @@
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
         bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
-        const std::set<std::set<String8>>& physicalCameraIds,
-        bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+        bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
+        bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
         const std::set<std::string>& cameraIdsWithZoom,
         const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
     ATRACE_CALL();
@@ -2683,9 +2822,9 @@
 
     ssize_t res;
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
-            hasAppCallback, minExpectedDuration, maxExpectedDuration, physicalCameraIds,
-            isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
-            outputSurfaces));
+            hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
+            isStillCapture, isZslCapture, rotateAndCropAuto, autoframingAuto, cameraIdsWithZoom,
+            requestTimeNs, outputSurfaces));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -2874,7 +3013,9 @@
         sp<StatusTracker> statusTracker,
         sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
         bool useHalBufManager,
-        bool supportCameraMute) :
+        bool supportCameraMute,
+        bool overrideToPortrait,
+        bool supportSettingsOverride) :
         Thread(/*canCallJava*/false),
         mParent(parent),
         mStatusTracker(statusTracker),
@@ -2892,9 +3033,11 @@
         mCurrentAfTriggerId(0),
         mCurrentPreCaptureTriggerId(0),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+        mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
         mComposerOutput(false),
         mCameraMute(ANDROID_SENSOR_TEST_PATTERN_MODE_OFF),
         mCameraMuteChanged(false),
+        mSettingsOverride(ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF),
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
         mPrepareVideoStream(false),
@@ -2903,8 +3046,11 @@
         mSessionParamKeys(sessionParamKeys),
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager),
-        mSupportCameraMute(supportCameraMute){
+        mSupportCameraMute(supportCameraMute),
+        mOverrideToPortrait(overrideToPortrait),
+        mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
+    mVndkVersion = property_get_int32("ro.vndk.version", __ANDROID_API_FUTURE__);
 }
 
 Camera3Device::RequestThread::~RequestThread() {}
@@ -3047,7 +3193,8 @@
 
 }
 
-status_t Camera3Device::RequestThread::clearRepeatingRequestsLocked(/*out*/int64_t *lastFrameNumber) {
+status_t Camera3Device::RequestThread::clearRepeatingRequestsLocked(
+        /*out*/int64_t *lastFrameNumber) {
     std::vector<int32_t> streamIds;
     for (const auto& request : mRepeatingRequests) {
         for (const auto& stream : request->mOutputStreams) {
@@ -3072,8 +3219,6 @@
     Mutex::Autolock l(mRequestLock);
     ALOGV("RequestThread::%s:", __FUNCTION__);
 
-    mRepeatingRequests.clear();
-
     // Send errors for all requests pending in the request queue, including
     // pending repeating requests
     sp<NotificationListener> listener = mListener.promote();
@@ -3111,10 +3256,7 @@
 
     Mutex::Autolock al(mTriggerMutex);
     mTriggerMap.clear();
-    if (lastFrameNumber != NULL) {
-        *lastFrameNumber = mRepeatingLastFrameNumber;
-    }
-    mRepeatingLastFrameNumber = hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
+    clearRepeatingRequestsLocked(lastFrameNumber);
     mRequestClearing = true;
     mRequestSignal.signal();
     return OK;
@@ -3243,16 +3385,18 @@
     return true;
 }
 
-std::pair<nsecs_t, nsecs_t> Camera3Device::RequestThread::calculateExpectedDurationRange(
-        const camera_metadata_t *request) {
-    std::pair<nsecs_t, nsecs_t> expectedRange(
+Camera3Device::RequestThread::ExpectedDurationInfo
+        Camera3Device::RequestThread::calculateExpectedDurationRange(
+                const camera_metadata_t *request) {
+    ExpectedDurationInfo expectedDurationInfo = {
             InFlightRequest::kDefaultMinExpectedDuration,
-            InFlightRequest::kDefaultMaxExpectedDuration);
+            InFlightRequest::kDefaultMaxExpectedDuration,
+            /*isFixedFps*/false};
     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
     find_camera_metadata_ro_entry(request,
             ANDROID_CONTROL_AE_MODE,
             &e);
-    if (e.count == 0) return expectedRange;
+    if (e.count == 0) return expectedDurationInfo;
 
     switch (e.data.u8[0]) {
         case ANDROID_CONTROL_AE_MODE_OFF:
@@ -3260,29 +3404,32 @@
                     ANDROID_SENSOR_EXPOSURE_TIME,
                     &e);
             if (e.count > 0) {
-                expectedRange.first = e.data.i64[0];
-                expectedRange.second = expectedRange.first;
+                expectedDurationInfo.minDuration = e.data.i64[0];
+                expectedDurationInfo.maxDuration = expectedDurationInfo.minDuration;
             }
             find_camera_metadata_ro_entry(request,
                     ANDROID_SENSOR_FRAME_DURATION,
                     &e);
             if (e.count > 0) {
-                expectedRange.first = std::max(e.data.i64[0], expectedRange.first);
-                expectedRange.second = expectedRange.first;
+                expectedDurationInfo.minDuration =
+                        std::max(e.data.i64[0], expectedDurationInfo.minDuration);
+                expectedDurationInfo.maxDuration = expectedDurationInfo.minDuration;
             }
+            expectedDurationInfo.isFixedFps = false;
             break;
         default:
             find_camera_metadata_ro_entry(request,
                     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
                     &e);
             if (e.count > 1) {
-                expectedRange.first = 1e9 / e.data.i32[1];
-                expectedRange.second = 1e9 / e.data.i32[0];
+                expectedDurationInfo.minDuration = 1e9 / e.data.i32[1];
+                expectedDurationInfo.maxDuration = 1e9 / e.data.i32[0];
             }
+            expectedDurationInfo.isFixedFps = (e.data.i32[1] == e.data.i32[0]);
             break;
     }
 
-    return expectedRange;
+    return expectedDurationInfo;
 }
 
 bool Camera3Device::RequestThread::skipHFRTargetFPSUpdate(int32_t tag,
@@ -3421,6 +3568,17 @@
         latestRequestId = NAME_NOT_FOUND;
     }
 
+    for (size_t i = 0; i < mNextRequests.size(); i++) {
+        auto& nextRequest = mNextRequests.editItemAt(i);
+        sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+        // Do not override rotate&crop for stream configurations that include
+        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
+        captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
+            overrideAutoRotateAndCrop(captureRequest);
+        captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
+    }
+
     // 'mNextRequests' will at this point contain either a set of HFR batched requests
     //  or a single request from streaming or burst. In either case the first element
     //  should contain the latest camera settings that we need to check for any session
@@ -3444,7 +3602,6 @@
             if (parent != nullptr) {
                 mReconfigured |= parent->reconfigureCamera(mLatestSessionParams, mStatusId);
             }
-            setPaused(false);
 
             if (mNextRequests[0].captureRequest->mInputStream != nullptr) {
                 mNextRequests[0].captureRequest->mInputStream->restoreConfiguredState();
@@ -3566,18 +3723,16 @@
         bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
         mPrevTriggers = triggerCount;
 
-        // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output. The display rotation there will be
-        // compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        bool rotateAndCropChanged = mComposerOutput ? false :
-            overrideAutoRotateAndCrop(captureRequest);
         bool testPatternChanged = overrideTestPattern(captureRequest);
+        bool settingsOverrideChanged = overrideSettingsOverride(captureRequest);
 
         // If the request is the same as last, or we had triggers now or last time or
         // changing overrides this time
         bool newRequest =
                 (mPrevRequest != captureRequest || triggersMixedIn ||
-                        rotateAndCropChanged || testPatternChanged) &&
+                         captureRequest->mRotateAndCropChanged ||
+                         captureRequest->mAutoframingChanged ||
+                         testPatternChanged || settingsOverrideChanged) &&
                 // Request settings are all the same within one batch, so only treat the first
                 // request in a batch as new
                 !(batchedRequest && i > 0);
@@ -3693,6 +3848,17 @@
                         }
                         captureRequest->mRotationAndCropUpdated = true;
                     }
+
+                    for (it = captureRequest->mSettingsList.begin();
+                            it != captureRequest->mSettingsList.end(); it++) {
+                        res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
+                                mVndkVersion, it->metadata, false /*isStatic*/);
+                        if (res != OK) {
+                            SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
+                                    "%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
+                            return INVALID_OPERATION;
+                        }
+                    }
                 }
             }
 
@@ -3897,15 +4063,17 @@
                 isZslCapture = true;
             }
         }
-        auto expectedDurationRange = calculateExpectedDurationRange(settings);
+        auto expectedDurationInfo = calculateExpectedDurationRange(settings);
         res = parent->registerInFlight(halRequest->frame_number,
                 totalNumBuffers, captureRequest->mResultExtras,
                 /*hasInput*/halRequest->input_buffer != NULL,
                 hasCallback,
-                /*min*/expectedDurationRange.first,
-                /*max*/expectedDurationRange.second,
+                expectedDurationInfo.minDuration,
+                expectedDurationInfo.maxDuration,
+                expectedDurationInfo.isFixedFps,
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
-                captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
+                captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
+                mPrevCameraIdsWithZoom,
                 (mUseHalBufManager) ? uniqueSurfaceIdMap :
                                       SurfaceMap{}, captureRequest->mRequestTimeNs);
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
@@ -4039,13 +4207,18 @@
         camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
-        return BAD_VALUE;
-    }
     mRotateAndCropOverride = rotateAndCropValue;
     return OK;
 }
 
+status_t Camera3Device::RequestThread::setAutoframingAutoBehaviour(
+        camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    mAutoframingOverride = autoframingValue;
+    return OK;
+}
+
 status_t Camera3Device::RequestThread::setComposerSurface(bool composerSurfacePresent) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
@@ -4063,6 +4236,13 @@
     return OK;
 }
 
+status_t Camera3Device::RequestThread::setZoomOverride(int32_t zoomOverride) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    mSettingsOverride = zoomOverride;
+    return OK;
+}
+
 nsecs_t Camera3Device::getExpectedInFlightDuration() {
     ATRACE_CALL();
     std::lock_guard<std::mutex> l(mInFlightLock);
@@ -4094,6 +4274,36 @@
     }
 }
 
+status_t Camera3Device::setCameraServiceWatchdog(bool enabled) {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    if (mCameraServiceWatchdog != NULL) {
+        mCameraServiceWatchdog->setEnabled(enabled);
+    }
+
+    return OK;
+}
+
+void Camera3Device::setStreamUseCaseOverrides(
+        const std::vector<int64_t>& useCaseOverrides) {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+    mStreamUseCaseOverrides = useCaseOverrides;
+}
+
+void Camera3Device::clearStreamUseCaseOverrides() {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+    mStreamUseCaseOverrides.clear();
+}
+
+bool Camera3Device::hasDeviceError() {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+    return mStatus == STATUS_ERROR;
+}
+
 void Camera3Device::RequestThread::cleanUpFailedRequests(bool sendRequestError) {
     if (mNextRequests.empty()) {
         return;
@@ -4209,6 +4419,11 @@
     return;
 }
 
+void Camera3Device::RequestThread::setRequestClearing() {
+    Mutex::Autolock l(mRequestLock);
+    mRequestClearing = true;
+}
+
 sp<Camera3Device::CaptureRequest>
         Camera3Device::RequestThread::waitForNextRequestLocked() {
     status_t res;
@@ -4594,29 +4809,76 @@
     return OK;
 }
 
-bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(
-        const sp<CaptureRequest> &request) {
+bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mTriggerMutex);
+    return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+            this->mRotateAndCropOverride);
+}
+
+bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
+        bool overrideToPortrait,
+        camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
     ATRACE_CALL();
 
+    if (overrideToPortrait) {
+        uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
+        CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+        metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
+                &rotateAndCrop_u8, 1);
+        return true;
+    }
+
     if (request->mRotateAndCropAuto) {
-        Mutex::Autolock l(mTriggerMutex);
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
 
         auto rotateAndCropEntry = metadata.find(ANDROID_SCALER_ROTATE_AND_CROP);
         if (rotateAndCropEntry.count > 0) {
-            if (rotateAndCropEntry.data.u8[0] == mRotateAndCropOverride) {
+            if (rotateAndCropEntry.data.u8[0] == rotateAndCropOverride) {
                 return false;
             } else {
-                rotateAndCropEntry.data.u8[0] = mRotateAndCropOverride;
+                rotateAndCropEntry.data.u8[0] = rotateAndCropOverride;
                 return true;
             }
         } else {
-            uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
-            metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
-                    &rotateAndCrop_u8, 1);
+            uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
+            metadata.update(ANDROID_SCALER_ROTATE_AND_CROP, &rotateAndCrop_u8, 1);
             return true;
         }
     }
+
+    return false;
+}
+
+bool Camera3Device::overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+        camera_metadata_enum_android_control_autoframing_t autoframingOverride) {
+    CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+    auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+    if (autoframingEntry.count > 0) {
+        if (autoframingEntry.data.u8[0] == autoframingOverride) {
+            return false;
+        } else {
+            autoframingEntry.data.u8[0] = autoframingOverride;
+            return true;
+        }
+    } else {
+        uint8_t autoframing_u8 = autoframingOverride;
+        metadata.update(ANDROID_CONTROL_AUTOFRAMING,
+                &autoframing_u8, 1);
+        return true;
+    }
+
+    return false;
+}
+
+bool Camera3Device::RequestThread::overrideAutoframing(const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+
+    if (request->mAutoframingAuto) {
+        Mutex::Autolock l(mTriggerMutex);
+        return Camera3Device::overrideAutoframing(request, mAutoframingOverride);
+    }
+
     return false;
 }
 
@@ -4682,6 +4944,33 @@
     return changed;
 }
 
+bool Camera3Device::RequestThread::overrideSettingsOverride(
+        const sp<CaptureRequest> &request) {
+    ATRACE_CALL();
+
+    if (!mSupportSettingsOverride) return false;
+
+    Mutex::Autolock l(mTriggerMutex);
+
+    // For a multi-camera, only override the logical camera's metadata.
+    CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+    camera_metadata_entry entry = metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+    int32_t originalValue = request->mSettingsList.begin()->mOriginalSettingsOverride;
+    if (mSettingsOverride != -1 &&
+            (entry.count == 0 || entry.data.i32[0] != mSettingsOverride)) {
+        metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+                &mSettingsOverride, 1);
+        return true;
+    } else if (mSettingsOverride == -1 &&
+            (entry.count == 0 || entry.data.i32[0] != originalValue)) {
+        metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+                &originalValue, 1);
+        return true;
+    }
+
+    return false;
+}
+
 status_t Camera3Device::RequestThread::setHalInterface(
         sp<HalInterface> newHalInterface) {
     if (newHalInterface.get() == nullptr) {
@@ -4751,7 +5040,8 @@
     }
 
     // queue up the work
-    mPendingStreams.emplace(maxCount, stream);
+    mPendingStreams.push_back(
+            std::tuple<int, sp<camera3::Camera3StreamInterface>>(maxCount, stream));
     ALOGV("%s: Stream %d queued for preparing", __FUNCTION__, stream->getId());
 
     return OK;
@@ -4762,8 +5052,8 @@
 
     Mutex::Autolock l(mLock);
 
-    std::unordered_map<int, sp<camera3::Camera3StreamInterface> > pendingStreams;
-    pendingStreams.insert(mPendingStreams.begin(), mPendingStreams.end());
+    std::list<std::tuple<int, sp<camera3::Camera3StreamInterface>>> pendingStreams;
+    pendingStreams.insert(pendingStreams.begin(), mPendingStreams.begin(), mPendingStreams.end());
     sp<camera3::Camera3StreamInterface> currentStream = mCurrentStream;
     int currentMaxCount = mCurrentMaxCount;
     mPendingStreams.clear();
@@ -4784,18 +5074,19 @@
     //of the streams in the pending list.
     if (currentStream != nullptr) {
         if (!mCurrentPrepareComplete) {
-            pendingStreams.emplace(currentMaxCount, currentStream);
+            pendingStreams.push_back(std::tuple(currentMaxCount, currentStream));
         }
     }
 
-    mPendingStreams.insert(pendingStreams.begin(), pendingStreams.end());
+    mPendingStreams.insert(mPendingStreams.begin(), pendingStreams.begin(), pendingStreams.end());
     for (const auto& it : mPendingStreams) {
-        it.second->cancelPrepare();
+        std::get<1>(it)->cancelPrepare();
     }
 }
 
 status_t Camera3Device::PreparerThread::resume() {
     ATRACE_CALL();
+    ALOGV("%s: PreparerThread", __FUNCTION__);
     status_t res;
 
     Mutex::Autolock l(mLock);
@@ -4808,10 +5099,10 @@
 
     auto it = mPendingStreams.begin();
     for (; it != mPendingStreams.end();) {
-        res = it->second->startPrepare(it->first, true /*blockRequest*/);
+        res = std::get<1>(*it)->startPrepare(std::get<0>(*it), true /*blockRequest*/);
         if (res == OK) {
             if (listener != NULL) {
-                listener->notifyPrepared(it->second->getId());
+                listener->notifyPrepared(std::get<1>(*it)->getId());
             }
             it = mPendingStreams.erase(it);
         } else if (res != NOT_ENOUGH_DATA) {
@@ -4845,7 +5136,7 @@
     Mutex::Autolock l(mLock);
 
     for (const auto& it : mPendingStreams) {
-        it.second->cancelPrepare();
+        std::get<1>(it)->cancelPrepare();
     }
     mPendingStreams.clear();
     mCancelNow = true;
@@ -4876,8 +5167,8 @@
 
             // Get next stream to prepare
             auto it = mPendingStreams.begin();
-            mCurrentStream = it->second;
-            mCurrentMaxCount = it->first;
+            mCurrentMaxCount = std::get<0>(*it);
+            mCurrentStream = std::get<1>(*it);
             mCurrentPrepareComplete = false;
             mPendingStreams.erase(it);
             ATRACE_ASYNC_BEGIN("stream prepare", mCurrentStream->getId());
@@ -5102,9 +5393,28 @@
     if (mRequestThread == nullptr) {
         return INVALID_OPERATION;
     }
+    if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+        return BAD_VALUE;
+    }
+    mRotateAndCropOverride = rotateAndCropValue;
     return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
 }
 
+status_t Camera3Device::setAutoframingAutoBehavior(
+    camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+    if (mRequestThread == nullptr) {
+        return INVALID_OPERATION;
+    }
+    if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+        return BAD_VALUE;
+    }
+    mAutoframingOverride = autoframingValue;
+    return mRequestThread->setAutoframingAutoBehaviour(autoframingValue);
+}
+
 bool Camera3Device::supportsCameraMute() {
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
@@ -5127,6 +5437,25 @@
     return mRequestThread->setCameraMute(muteMode);
 }
 
+bool Camera3Device::supportsZoomOverride() {
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    return mSupportZoomOverride;
+}
+
+status_t Camera3Device::setZoomOverride(int32_t zoomOverride) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    if (mRequestThread == nullptr || !mSupportZoomOverride) {
+        return INVALID_OPERATION;
+    }
+
+    return mRequestThread->setZoomOverride(zoomOverride);
+}
+
 status_t Camera3Device::injectCamera(const String8& injectedCamId,
                                      sp<CameraProviderManager> manager) {
     ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.string());
@@ -5198,4 +5527,56 @@
     return mInjectionMethods->stopInjection();
 }
 
+void Camera3Device::overrideStreamUseCaseLocked() {
+    if (mStreamUseCaseOverrides.size() == 0) {
+        return;
+    }
+
+    // Start from an array of indexes in mStreamUseCaseOverrides, and sort them
+    // based first on size, and second on formats of [JPEG, RAW, YUV, PRIV].
+    // Refer to CameraService::printHelp for details.
+    std::vector<int> outputStreamsIndices(mOutputStreams.size());
+    for (size_t i = 0; i < outputStreamsIndices.size(); i++) {
+        outputStreamsIndices[i] = i;
+    }
+
+    std::sort(outputStreamsIndices.begin(), outputStreamsIndices.end(),
+            [&](int a, int b) -> bool {
+
+                auto formatScore = [](int format) {
+                    switch (format) {
+                    case HAL_PIXEL_FORMAT_BLOB:
+                        return 4;
+                    case HAL_PIXEL_FORMAT_RAW16:
+                    case HAL_PIXEL_FORMAT_RAW10:
+                    case HAL_PIXEL_FORMAT_RAW12:
+                        return 3;
+                    case HAL_PIXEL_FORMAT_YCBCR_420_888:
+                        return 2;
+                    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                        return 1;
+                    default:
+                        return 0;
+                    }
+                };
+
+                int sizeA = mOutputStreams[a]->getWidth() * mOutputStreams[a]->getHeight();
+                int sizeB = mOutputStreams[a]->getWidth() * mOutputStreams[a]->getHeight();
+                int formatAScore = formatScore(mOutputStreams[a]->getFormat());
+                int formatBScore = formatScore(mOutputStreams[b]->getFormat());
+                if (sizeA > sizeB ||
+                        (sizeA == sizeB && formatAScore >= formatBScore)) {
+                    return true;
+                } else {
+                    return false;
+                }
+            });
+
+    size_t overlapSize = std::min(mStreamUseCaseOverrides.size(), mOutputStreams.size());
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        mOutputStreams[outputStreamsIndices[i]]->setStreamUseCase(
+                mStreamUseCaseOverrides[std::min(i, overlapSize-1)]);
+    }
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index f927b4d..b1dd135 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -20,6 +20,7 @@
 #include <utility>
 #include <unordered_map>
 #include <set>
+#include <tuple>
 
 #include <utils/Condition.h>
 #include <utils/Errors.h>
@@ -49,6 +50,7 @@
 #include "utils/TagMonitor.h"
 #include "utils/IPCTransport.h"
 #include "utils/LatencyHistogram.h"
+#include "utils/CameraServiceProxyWrapper.h"
 #include <camera_metadata_hidden.h>
 
 using android::camera3::camera_capture_request_t;
@@ -82,7 +84,9 @@
   friend class AidlCamera3Device;
   public:
 
-    explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool legacyClient = false);
+    explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+            bool legacyClient = false);
 
     virtual ~Camera3Device();
     // Delete and optionally close native handles and clear the input vector afterward
@@ -115,6 +119,7 @@
     status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
     const CameraMetadata& info() const override;
     const CameraMetadata& infoPhysical(const String8& physicalId) const override;
+    bool isCompositeJpegRDisabled() const override { return mIsCompositeJpegRDisabled; };
 
     // Capture and setStreamingRequest will configure streams if currently in
     // idle state
@@ -149,7 +154,10 @@
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false)
+            override;
 
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -164,7 +172,10 @@
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false)
+            override;
 
     status_t createInputStream(
             uint32_t width, uint32_t height, int format, bool isMultiResolution,
@@ -267,6 +278,14 @@
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
 
     /**
+     * Set the current behavior for the AUTOFRAMING control when in AUTO.
+     *
+     * The value must be one of the AUTOFRAMING_* values besides AUTO.
+     */
+    status_t setAutoframingAutoBehavior(
+            camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
+    /**
      * Whether camera muting (producing black-only output) is supported.
      *
      * Calling setCameraMute(true) when this returns false will return an
@@ -281,9 +300,32 @@
      */
     status_t setCameraMute(bool enabled);
 
+    /**
+     * Enables/disables camera service watchdog
+     */
+    status_t setCameraServiceWatchdog(bool enabled);
+
+    // Set stream use case overrides
+    void setStreamUseCaseOverrides(
+            const std::vector<int64_t>& useCaseOverrides);
+
+    // Clear stream use case overrides
+    void clearStreamUseCaseOverrides();
+
+    /**
+     * Whether the camera device supports zoom override.
+     */
+    bool supportsZoomOverride();
+
+    // Set/reset zoom override
+    status_t setZoomOverride(int32_t zoomOverride);
+
     // Get the status trackeer for the camera device
     wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
 
+    // Whether the device is in error state
+    bool hasDeviceError();
+
     /**
      * The injection camera session to replace the internal camera
      * session.
@@ -320,8 +362,11 @@
     // Constant to use for stream ID when one doesn't exist
     static const int           NO_STREAM = -1;
 
+    std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
     // A lock to enforce serialization on the input/configure side
     // of the public interface.
+    // Only locked by public methods inherited from CameraDeviceBase.
     // Not locked by methods guarded by mOutputLock, since they may act
     // concurrently to the input/configure side of the interface.
     // Must be locked before mLock if both will be locked by a method
@@ -376,7 +421,7 @@
 
         virtual status_t configureStreams(const camera_metadata_t * sessionParams,
                 /*inout*/ camera_stream_configuration_t * config,
-                const std::vector<uint32_t>& bufferSizes) = 0;
+                const std::vector<uint32_t>& bufferSizes, int64_t logId) = 0;
 
         // The injection camera configures the streams to hal.
         virtual status_t configureInjectedStreams(
@@ -453,6 +498,28 @@
                 // Verify buffer caches
                 std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
                         offlineStream.circulatingBufferIds.end());
+                {
+                    // Due to timing it is possible that we may not have any remaining pending
+                    // capture requests that can update the caches on Hal side. This can result in
+                    // buffer cache mismatch between the service and the Hal and must be accounted
+                    // for.
+                    std::lock_guard<std::mutex> l(mFreedBuffersLock);
+                    for (const auto& it : mFreedBuffers) {
+                        if (it.first == id) {
+                            ALOGV("%s: stream ID %d buffer id %" PRIu64 " cache removal still "
+                                    "pending", __FUNCTION__, id, it.second);
+                            const auto& cachedEntry = std::find(bufIds.begin(), bufIds.end(),
+                                    it.second);
+                            if (cachedEntry != bufIds.end()) {
+                                bufIds.erase(cachedEntry);
+                            } else {
+                                ALOGE("%s: stream ID %d buffer id %" PRIu64 " cache removal still "
+                                        "pending however buffer is no longer in the offline stream "
+                                        "info!", __FUNCTION__, id, it.second);
+                            }
+                        }
+                    }
+                }
                 if (!verifyBufferIds(id, bufIds)) {
                     ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
                     return UNKNOWN_ERROR;
@@ -486,6 +553,7 @@
 
     CameraMetadata             mDeviceInfo;
     bool                       mSupportNativeZoomRatio;
+    bool                       mIsCompositeJpegRDisabled;
     std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
 
     CameraMetadata             mRequestTemplateCache[CAMERA_TEMPLATE_COUNT];
@@ -504,8 +572,15 @@
         STATUS_ACTIVE
     }                          mStatus;
 
+    struct StatusInfo {
+        Status status;
+        bool isInternal; // status triggered by internal reconfigureCamera.
+    };
+
+    bool                       mStatusIsInternal;
+
     // Only clear mRecentStatusUpdates, mStatusWaiters from waitUntilStateThenRelock
-    Vector<Status>             mRecentStatusUpdates;
+    Vector<StatusInfo>         mRecentStatusUpdates;
     int                        mStatusWaiters;
 
     Condition                  mStatusChanged;
@@ -568,6 +643,14 @@
         // overriding of ROTATE_AND_CROP value and adjustment of coordinates
         // in several other controls in both the request and the result
         bool                                mRotateAndCropAuto;
+        // Indicates that the ROTATE_AND_CROP value within 'mSettingsList' was modified
+        // irrespective of the original value.
+        bool                                mRotateAndCropChanged = false;
+        // Whether this request has AUTOFRAMING_AUTO set, so need to override the AUTOFRAMING value
+        // in the capture request.
+        bool                                mAutoframingAuto;
+        // Indicates that the auto framing value within 'mSettingsList' was modified
+        bool                                mAutoframingChanged = false;
 
         // Whether this capture request has its zoom ratio set to 1.0x before
         // the framework overrides it for camera HAL consumption.
@@ -581,6 +664,8 @@
         // Whether this capture request's rotation and crop update has been
         // done.
         bool                                mRotationAndCropUpdated = false;
+        // Whether this capture request's autoframing has been done.
+        bool                                mAutoframingUpdated = false;
         // Whether this capture request's zoom ratio update has been done.
         bool                                mZoomRatioUpdated = false;
         // Whether this max resolution capture request's  crop / metering region update has been
@@ -640,7 +725,8 @@
      * CameraDeviceBase interface we shouldn't need to.
      * Must be called with mLock and mInterfaceLock both held.
      */
-    status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration);
+    status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+                     bool requestThreadInvocation);
 
     /**
      * Resume work after internalPauseAndWaitLocked()
@@ -659,7 +745,8 @@
      * During the wait mLock is released.
      *
      */
-    status_t waitUntilStateThenRelock(bool active, nsecs_t timeout);
+    status_t waitUntilStateThenRelock(bool active, nsecs_t timeout,
+                     bool requestThreadInvocation);
 
     /**
      * Implementation of waitUntilDrained. On success, will transition to IDLE state.
@@ -754,6 +841,15 @@
      */
     static nsecs_t getMonoToBoottimeOffset();
 
+    // Override rotate_and_crop control if needed
+    static bool    overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
+            bool overrideToPortrait,
+            camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
+
+    // Override auto framing control if needed
+    static bool    overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+            camera_metadata_enum_android_control_autoframing_t autoframingOverride);
+
     struct RequestTrigger {
         // Metadata tag number, e.g. android.control.aePrecaptureTrigger
         uint32_t metadataTag;
@@ -783,7 +879,9 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride);
         ~RequestThread();
 
         void     setNotificationListener(wp<NotificationListener> listener);
@@ -834,6 +932,9 @@
          */
         void     setPaused(bool paused);
 
+        // set mRequestClearing - no new requests are expected to be queued to RequestThread
+        void setRequestClearing();
+
         /**
          * Wait until thread processes the capture request with settings'
          * android.request.id == requestId.
@@ -879,10 +980,16 @@
 
         status_t setRotateAndCropAutoBehavior(
                 camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
+
+        status_t setAutoframingAutoBehaviour(
+                camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
         status_t setComposerSurface(bool composerSurfacePresent);
 
         status_t setCameraMute(int32_t muteMode);
 
+        status_t setZoomOverride(int32_t zoomOverride);
+
         status_t setHalInterface(sp<HalInterface> newHalInterface);
 
       protected:
@@ -903,12 +1010,19 @@
         status_t           addFakeTriggerIds(const sp<CaptureRequest> &request);
 
         // Override rotate_and_crop control if needed; returns true if the current value was changed
-        bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
+        bool               overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/);
+
+        // Override autoframing control if needed; returns true if the current value was changed
+        bool               overrideAutoframing(const sp<CaptureRequest> &request);
 
         // Override test_pattern control if needed for camera mute; returns true
         // if the current value was changed
         bool               overrideTestPattern(const sp<CaptureRequest> &request);
 
+        // Override settings override if needed for lower zoom latency; return
+        // true if the current value was changed
+        bool               overrideSettingsOverride(const sp<CaptureRequest> &request);
+
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // TODO: does this need to be adjusted for long exposure requests?
@@ -962,8 +1076,13 @@
         // send request in mNextRequests to HAL in a batch. Return true = sucssess
         bool sendRequestsBatch();
 
-        // Calculate the expected (minimum, maximum) duration range for a request
-        std::pair<nsecs_t, nsecs_t> calculateExpectedDurationRange(
+        // Calculate the expected (minimum, maximum, isFixedFps) duration info for a request
+        struct ExpectedDurationInfo {
+            nsecs_t minDuration;
+            nsecs_t maxDuration;
+            bool isFixedFps;
+        };
+        ExpectedDurationInfo calculateExpectedDurationRange(
                 const camera_metadata_t *request);
 
         // Check and update latest session parameters based on the current request settings.
@@ -983,7 +1102,7 @@
 
         wp<NotificationListener> mListener;
 
-        const String8&     mId;       // The camera ID
+        const String8      mId;       // The camera ID
         int                mStatusId; // The RequestThread's component ID for
                                       // status tracking
 
@@ -1034,9 +1153,12 @@
         uint32_t           mCurrentAfTriggerId;
         uint32_t           mCurrentPreCaptureTriggerId;
         camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+        camera_metadata_enum_android_control_autoframing_t mAutoframingOverride;
         bool               mComposerOutput;
         int32_t            mCameraMute; // 0 = no mute, otherwise the TEST_PATTERN_MODE to use
         bool               mCameraMuteChanged;
+        int32_t            mSettingsOverride; // -1 = use original, otherwise
+                                              // the settings override to use.
 
         int64_t            mRepeatingLastFrameNumber;
 
@@ -1055,6 +1177,9 @@
 
         const bool         mUseHalBufManager;
         const bool         mSupportCameraMute;
+        const bool         mOverrideToPortrait;
+        const bool         mSupportSettingsOverride;
+        int32_t            mVndkVersion = -1;
     };
 
     virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> /*parent*/,
@@ -1062,7 +1187,9 @@
                 sp<HalInterface> /*interface*/,
                 const Vector<int32_t>& /*sessionParamKeys*/,
                 bool /*useHalBufManager*/,
-                bool /*supportCameraMute*/) = 0;
+                bool /*supportCameraMute*/,
+                bool /*overrideToPortrait*/,
+                bool /*supportSettingsOverride*/) = 0;
 
     sp<RequestThread> mRequestThread;
 
@@ -1082,8 +1209,8 @@
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
             bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
-            const std::set<std::set<String8>>& physicalCameraIds,
-            bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+            bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
+            bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
             const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
             nsecs_t requestTimeNs);
 
@@ -1140,7 +1267,7 @@
         // Guarded by mLock
 
         wp<NotificationListener> mListener;
-        std::unordered_map<int, sp<camera3::Camera3StreamInterface> > mPendingStreams;
+        std::list<std::tuple<int, sp<camera3::Camera3StreamInterface>>> mPendingStreams;
         bool mActive;
         bool mCancelNow;
 
@@ -1327,13 +1454,34 @@
     bool mSupportCameraMute = false;
     // Whether the HAL supports SOLID_COLOR or BLACK if mSupportCameraMute is true
     bool mSupportTestPatternSolidColor = false;
+    // Whether the HAL supports zoom settings override
+    bool mSupportZoomOverride = false;
 
     // Whether the camera framework overrides the device characteristics for
     // performance class.
     bool mOverrideForPerfClass;
 
+    // Whether the camera framework overrides the device characteristics for
+    // app compatibility reasons.
+    bool mOverrideToPortrait;
+    camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+    bool mComposerOutput;
+
+    // Auto framing override value
+    camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+
+    // Settings override value
+    int32_t mSettingsOverride; // -1 = use original, otherwise
+                               // the settings override to use.
+
+    // Current active physical id of the logical multi-camera, if any
+    std::string mActivePhysicalId;
+
     // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
     nsecs_t mMinExpectedDuration = 0;
+    // Whether the camera device runs at fixed frame rate based on AE_MODE and
+    // AE_TARGET_FPS_RANGE
+    bool mIsFixedFps = false;
 
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
@@ -1417,6 +1565,8 @@
 
     sp<Camera3DeviceInjectionMethods> mInjectionMethods;
 
+    void overrideStreamUseCaseLocked();
+
 }; // class Camera3Device
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
index 031c255..ab772d4 100644
--- a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
@@ -58,7 +58,8 @@
     if (parent->mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Let the device be IDLE and the request thread is paused",
                 __FUNCTION__);
-        res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+        res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+                                                 /*requestThreadInvocation*/false);
         if (res != OK) {
             ALOGE("%s: Can't pause captures to inject camera!", __FUNCTION__);
             return res;
@@ -117,7 +118,8 @@
     if (parent->mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Let the device be IDLE and the request thread is paused",
                 __FUNCTION__);
-        res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+        res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+                                                 /*requestThreadInvocation*/false);
         if (res != OK) {
             ALOGE("%s: Can't pause captures to stop injection!", __FUNCTION__);
             return res;
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 19afd69..8c0ac71 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -67,8 +67,7 @@
     return INVALID_OPERATION;
 }
 
-void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Fake\n", mId);
     write(fd, lines.string(), lines.size());
@@ -82,9 +81,8 @@
     return OK;
 }
 
-status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
-    (void) buffer;
-    (void) fenceFd;
+status_t Camera3FakeStream::detachBuffer([[maybe_unused]] sp<GraphicBuffer>* buffer,
+                [[maybe_unused]] int* fenceFd) {
     // Do nothing
     return OK;
 }
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 8cecabd..1e9f478 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -100,7 +100,9 @@
 
     virtual status_t setBatchSize(size_t batchSize) override;
 
-    virtual void onMinDurationChanged(nsecs_t /*duration*/) {}
+    virtual void onMinDurationChanged(nsecs_t /*duration*/, bool /*fixedFps*/) {}
+
+    virtual void setStreamUseCase(int64_t /*streamUseCase*/) {}
   protected:
 
     /**
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index add1483..fbaaf7b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -35,14 +35,17 @@
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
-        bool deviceTimeBaseIsRealtime, int timestampBase) :
+        bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
                 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
-                dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
+                dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase,
+                colorSpace),
         mTotalBufferCount(0),
+        mMaxCachedBufferCount(0),
         mHandoutTotalBufferCount(0),
         mHandoutOutputBufferCount(0),
+        mCachedOutputBufferCount(0),
         mFrameCount(0),
         mLastTimestamp(0) {
 
@@ -71,8 +74,7 @@
     return false;
 }
 
-void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
 
     uint64_t consumerUsage = 0;
@@ -91,12 +93,13 @@
     }
     lines.appendFormat("      Dynamic Range Profile: 0x%" PRIx64 "\n",
             camera_stream::dynamic_range_profile);
+    lines.appendFormat("      Color Space: %d\n", camera_stream::color_space);
     lines.appendFormat("      Stream use case: %" PRId64 "\n", camera_stream::use_case);
     lines.appendFormat("      Timestamp base: %d\n", getTimestampBase());
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
-    lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu\n",
-            mTotalBufferCount, mHandoutTotalBufferCount);
+    lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu, currently cached: %zu\n",
+            mTotalBufferCount, mHandoutTotalBufferCount, mCachedOutputBufferCount);
     write(fd, lines.string(), lines.size());
 
     Camera3Stream::dump(fd, args);
@@ -135,6 +138,14 @@
     return (mHandoutTotalBufferCount - mHandoutOutputBufferCount);
 }
 
+size_t Camera3IOStreamBase::getCachedOutputBufferCountLocked() const {
+    return mCachedOutputBufferCount;
+}
+
+size_t Camera3IOStreamBase::getMaxCachedOutputBuffersLocked() const {
+    return mMaxCachedBufferCount;
+}
+
 status_t Camera3IOStreamBase::disconnectLocked() {
     switch (mState) {
         case STATE_IN_RECONFIG:
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index f389d53..6af0875 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -41,7 +41,8 @@
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
-            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
 
   public:
 
@@ -56,11 +57,18 @@
     int              getMaxTotalBuffers() const { return mTotalBufferCount; }
   protected:
     size_t            mTotalBufferCount;
+    // The maximum number of cached buffers allowed for this stream
+    size_t            mMaxCachedBufferCount;
+
     // sum of input and output buffers that are currently acquired by HAL
     size_t            mHandoutTotalBufferCount;
     // number of output buffers that are currently acquired by HAL. This will be
     // Redundant when camera3 streams are no longer bidirectional streams.
     size_t            mHandoutOutputBufferCount;
+    // number of cached output buffers that are currently queued in the camera
+    // server but not yet queued to the buffer queue.
+    size_t            mCachedOutputBufferCount;
+
     uint32_t          mFrameCount;
     // Last received output buffer's timestamp
     nsecs_t           mLastTimestamp;
@@ -97,6 +105,9 @@
 
     virtual size_t   getHandoutInputBufferCountLocked();
 
+    virtual size_t   getCachedOutputBufferCountLocked() const;
+    virtual size_t   getMaxCachedOutputBuffersLocked() const;
+
     virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
 
     status_t getBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 9a3f7ed..631bb43 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -104,17 +104,14 @@
 
 status_t Camera3InputStream::returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
-            nsecs_t timestamp,
-            nsecs_t readoutTimestamp,
-            bool output,
+            [[maybe_unused]] nsecs_t timestamp,
+            [[maybe_unused]] nsecs_t readoutTimestamp,
+            [[maybe_unused]] bool output,
             int32_t /*transform*/,
             const std::vector<size_t>&,
             /*out*/
             sp<Fence> *releaseFenceOut) {
 
-    (void)timestamp;
-    (void)readoutTimestamp;
-    (void)output;
     ALOG_ASSERT(!output, "Expected output to be false");
 
     status_t res;
@@ -218,8 +215,7 @@
     return OK;
 }
 
-void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Input\n", mId);
     write(fd, lines.string(), lines.size());
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index 7cfa255..1e7bd57 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -81,7 +81,6 @@
 Camera3OfflineSession::~Camera3OfflineSession() {
     ATRACE_CALL();
     ALOGV("%s: Tearing down offline session for camera id %s", __FUNCTION__, mId.string());
-    disconnectImpl();
 }
 
 const String8& Camera3OfflineSession::getId() const {
@@ -96,7 +95,6 @@
 
 status_t Camera3OfflineSession::disconnect() {
     ATRACE_CALL();
-    disconnectSession();
     return disconnectImpl();
 }
 
@@ -132,6 +130,8 @@
         streams.push_back(mInputStream);
     }
 
+    closeSessionLocked();
+
     FlushInflightReqStates states {
         mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
         listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
@@ -140,6 +140,7 @@
 
     {
         std::lock_guard<std::mutex> lock(mLock);
+        releaseSessionLocked();
         mOutputStreams.clear();
         mInputStream.clear();
         mStatus = STATUS_CLOSED;
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index a799719..e780043 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -248,6 +248,9 @@
 
     // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
     nsecs_t mMinExpectedDuration = 0;
+    // Whether the camera device runs at fixed frame rate based on AE_MODE and
+    // AE_TARGET_FPS_RANGE
+    bool mIsFixedFps = false;
 
     // SetErrorInterface
     void setErrorState(const char *fmt, ...) override;
@@ -271,7 +274,12 @@
     void setErrorStateLockedV(const char *fmt, va_list args);
 
     status_t disconnectImpl();
-    virtual void disconnectSession() = 0;
+
+    // Clients need to ensure that 'mInterfaceLock' is acquired before calling this method
+    virtual void closeSessionLocked() = 0;
+
+    // Clients need to ensure that 'mLock' is acquired before calling this method
+    virtual void releaseSessionLocked() = 0;
 
 }; // class Camera3OfflineSession
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 1e20ee0..a387064 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -24,6 +24,7 @@
 
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
 
 #include <android-base/unique_fd.h>
 #include <cutils/properties.h>
@@ -56,17 +57,18 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
-                            timestampBase),
+                            timestampBase, colorSpace),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
+        mUseReadoutTime(useReadoutTimestamp),
         mConsumerUsage(0),
         mDropBuffers(false),
         mMirrorMode(mirrorMode),
@@ -90,16 +92,17 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
                             setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
-                            deviceTimeBaseIsRealtime, timestampBase),
+                            deviceTimeBaseIsRealtime, timestampBase, colorSpace),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
+        mUseReadoutTime(useReadoutTimestamp),
         mConsumerUsage(0),
         mDropBuffers(false),
         mMirrorMode(mirrorMode),
@@ -129,17 +132,18 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
-                            timestampBase),
+                            timestampBase, colorSpace),
         mConsumer(nullptr),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
+        mUseReadoutTime(useReadoutTimestamp),
         mConsumerUsage(consumerUsage),
         mDropBuffers(false),
         mMirrorMode(mirrorMode),
@@ -177,17 +181,19 @@
                                          int setId, bool isMultiResolution,
                                          int64_t dynamicRangeProfile, int64_t streamUseCase,
                                          bool deviceTimeBaseIsRealtime, int timestampBase,
-                                         int mirrorMode) :
+                                         int mirrorMode, int32_t colorSpace,
+                                         bool useReadoutTimestamp) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
                             format, dataSpace, rotation,
                             physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
                             dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
-                            timestampBase),
+                            timestampBase, colorSpace),
         mTransform(0),
         mTraceFirstBuffer(true),
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
+        mUseReadoutTime(useReadoutTimestamp),
         mConsumerUsage(consumerUsage),
         mDropBuffers(false),
         mMirrorMode(mirrorMode),
@@ -327,7 +333,7 @@
     status_t res =
             gbLocker.lockAsync(
                     GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
-                    &mapped, fenceFd.get());
+                    &mapped, fenceFd.release());
     if (res != OK) {
         ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
         return res;
@@ -389,13 +395,12 @@
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
             nsecs_t readoutTimestamp,
-            bool output,
+            [[maybe_unused]] bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
             /*out*/
             sp<Fence> *releaseFenceOut) {
 
-    (void)output;
     ALOG_ASSERT(output, "Expected output to be true");
 
     status_t res;
@@ -415,6 +420,7 @@
     mLock.unlock();
 
     ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
+    bool bufferDeferred = false;
     /**
      * Return buffer back to ANativeWindow
      */
@@ -451,7 +457,10 @@
             mTraceFirstBuffer = false;
         }
         // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
-        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF) {
+        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
+                    (getDataSpace() ==
+                     static_cast<android_dataspace_t>(
+                         aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
             if (mIPCTransport == IPCTransport::HIDL) {
                 fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
             }
@@ -462,19 +471,22 @@
             }
         }
 
+        nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
+                readoutTimestamp : timestamp) - mTimestampOffset;
         if (mPreviewFrameSpacer != nullptr) {
-            res = mPreviewFrameSpacer->queuePreviewBuffer(timestamp - mTimestampOffset,
-                    readoutTimestamp - mTimestampOffset, transform, anwBuffer, anwReleaseFence);
+            nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
+                    - mTimestampOffset;
+            res = mPreviewFrameSpacer->queuePreviewBuffer(captureTime, readoutTime,
+                    transform, anwBuffer, anwReleaseFence);
             if (res != OK) {
                 ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
                         __FUNCTION__, mId, strerror(-res), res);
                 return res;
             }
+            bufferDeferred = true;
         } else {
-            nsecs_t captureTime = (mSyncToDisplay ? readoutTimestamp : timestamp)
-                    - mTimestampOffset;
             nsecs_t presentTime = mSyncToDisplay ?
-                    syncTimestampToDisplayLocked(captureTime) : captureTime;
+                    syncTimestampToDisplayLocked(captureTime, releaseFence->dup()) : captureTime;
 
             setTransform(transform, true/*mayChangeMirror*/);
             res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
@@ -495,23 +507,22 @@
     }
     mLock.lock();
 
+    if (bufferDeferred) {
+        mCachedOutputBufferCount++;
+    }
+
     // Once a valid buffer has been returned to the queue, can no longer
     // dequeue all buffers for preallocation.
     if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
         mStreamUnpreparable = true;
     }
 
-    if (res != OK) {
-        close(anwReleaseFence);
-    }
-
     *releaseFenceOut = releaseFence;
 
     return res;
 }
 
-void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
-    (void) args;
+void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
     String8 lines;
     lines.appendFormat("    Stream[%d]: Output\n", mId);
     lines.appendFormat("      Consumer name: %s\n", mConsumerName.string());
@@ -690,13 +701,19 @@
                 !isVideoStream());
         if (forceChoreographer || defaultToChoreographer) {
             mSyncToDisplay = true;
+            // For choreographer synced stream, extra buffers aren't kept by
+            // camera service. So no need to update mMaxCachedBufferCount.
             mTotalBufferCount += kDisplaySyncExtraBuffer;
         } else if (defaultToSpacer) {
             mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
-            mTotalBufferCount ++;
+            // For preview frame spacer, the extra buffer is kept by camera
+            // service. So update mMaxCachedBufferCount.
+            mMaxCachedBufferCount = 1;
+            mTotalBufferCount += mMaxCachedBufferCount;
             res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
             if (res != OK) {
-                ALOGE("%s: Unable to start preview spacer", __FUNCTION__);
+                ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
+                        strerror(-res), res);
                 return res;
             }
         }
@@ -720,7 +737,7 @@
             // Reverse offset for monotonicTime -> bootTime
             mTimestampOffset = -mTimestampOffset;
         } else {
-            // If timestampBase is DEFAULT, MONOTONIC, SENSOR, or
+            // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
             // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
             mTimestampOffset = 0;
         }
@@ -958,6 +975,14 @@
     return true;
 }
 
+void Camera3OutputStream::onCachedBufferQueued() {
+    Mutex::Autolock l(mLock);
+    mCachedOutputBufferCount--;
+    // Signal whoever is waiting for the buffer to be returned to the buffer
+    // queue.
+    mOutputBufferReturnedSignal.signal();
+}
+
 status_t Camera3OutputStream::disconnectLocked() {
     status_t res;
 
@@ -1298,7 +1323,7 @@
     void* mapped = nullptr;
     base::unique_fd fenceFd(dup(fence));
     status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
-            fenceFd.get());
+            fenceFd.release());
     if (res != OK) {
         ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
         return;
@@ -1357,9 +1382,15 @@
     return OK;
 }
 
-void Camera3OutputStream::onMinDurationChanged(nsecs_t duration) {
+void Camera3OutputStream::onMinDurationChanged(nsecs_t duration, bool fixedFps) {
     Mutex::Autolock l(mLock);
     mMinExpectedDuration = duration;
+    mFixedFps = fixedFps;
+}
+
+void Camera3OutputStream::setStreamUseCase(int64_t streamUseCase) {
+    Mutex::Autolock l(mLock);
+    camera_stream::use_case = streamUseCase;
 }
 
 void Camera3OutputStream::returnPrefetchedBuffersLocked() {
@@ -1379,31 +1410,41 @@
     }
 }
 
-nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, int releaseFence) {
+    nsecs_t currentTime = systemTime();
+    if (!mFixedFps) {
+        mLastCaptureTime = t;
+        mLastPresentTime = currentTime;
+        return t;
+    }
+
     ParcelableVsyncEventData parcelableVsyncEventData;
     auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
     if (res != OK) {
         ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
                 __FUNCTION__, mId, strerror(-res), res);
         mLastCaptureTime = t;
-        mLastPresentTime = t;
+        mLastPresentTime = currentTime;
         return t;
     }
 
     const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
-    nsecs_t currentTime = systemTime();
+    nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
 
-    // Reset capture to present time offset if:
-    // - More than 1 second between frames.
-    // - The frame duration deviates from multiples of vsync frame intervals.
+    // Find the best presentation time without worrying about previous frame's
+    // presentation time if capture interval is more than kSpacingResetIntervalNs.
+    //
+    // When frame interval is more than 50 ms apart (3 vsyncs for 60hz refresh rate),
+    // there is little risk in starting over and finding the earliest vsync to latch onto.
+    // - Update captureToPresentTime offset to be used for later frames.
+    // - Example use cases:
+    //   - when frame rate drops down to below 20 fps, or
+    //   - A new streaming session starts (stopPreview followed by
+    //   startPreview)
+    //
     nsecs_t captureInterval = t - mLastCaptureTime;
-    float captureToVsyncIntervalRatio = 1.0f * captureInterval / vsyncEventData.frameInterval;
-    float ratioDeviation = std::fabs(
-            captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
-    if (captureInterval > kSpacingResetIntervalNs ||
-            ratioDeviation >= kMaxIntervalRatioDeviation) {
-        nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
-        for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
+    if (captureInterval > kSpacingResetIntervalNs) {
+        for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
             const auto& timeline = vsyncEventData.frameTimelines[i];
             if (timeline.deadlineTimestamp >= currentTime &&
                     timeline.expectedPresentationTime > minPresentT) {
@@ -1412,6 +1453,17 @@
                 mLastCaptureTime = t;
                 mLastPresentTime = presentT;
 
+                // If releaseFence is available, store the fence to check signal
+                // time later.
+                mRefVsyncData = vsyncEventData;
+                mReferenceCaptureTime = t;
+                mReferenceArrivalTime = currentTime;
+                if (releaseFence != -1) {
+                    mReferenceFrameFence = new Fence(releaseFence);
+                } else {
+                    mFenceSignalOffset = 0;
+                }
+
                 // Move the expected presentation time back by 1/3 of frame interval to
                 // mitigate the time drift. Due to time drift, if we directly use the
                 // expected presentation time, often times 2 expected presentation time
@@ -1421,25 +1473,91 @@
         }
     }
 
+    // If there is a reference frame release fence, get the signal time and
+    // update the captureToPresentOffset.
+    if (mReferenceFrameFence != nullptr) {
+        mFenceSignalOffset = 0;
+        nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
+        // Now that the fence has signaled, recalculate the offsets based on
+        // the timeline which was actually latched
+        if (signalTime != INT64_MAX) {
+            for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
+                const auto& timeline = mRefVsyncData.frameTimelines[i];
+                if (timeline.deadlineTimestamp >= signalTime) {
+                    nsecs_t originalOffset = mCaptureToPresentOffset;
+                    mCaptureToPresentOffset = timeline.expectedPresentationTime
+                            - mReferenceCaptureTime;
+                    mLastPresentTime = timeline.expectedPresentationTime;
+                    mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
+                            signalTime - mReferenceArrivalTime : 0;
+
+                    ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
+                            " original offset %" PRId64 " new offset %" PRId64
+                            " fencesignal offset %" PRId64, __FUNCTION__,
+                            timeline.deadlineTimestamp, signalTime, originalOffset,
+                            mCaptureToPresentOffset, mFenceSignalOffset);
+                    break;
+                }
+            }
+            mReferenceFrameFence.clear();
+        }
+    }
+
     nsecs_t idealPresentT = t + mCaptureToPresentOffset;
     nsecs_t expectedPresentT = mLastPresentTime;
     nsecs_t minDiff = INT64_MAX;
-    // Derive minimum intervals between presentation times based on minimal
+
+    // In fixed FPS case, when frame durations are close to multiples of display refresh
+    // rate, derive minimum intervals between presentation times based on minimal
     // expected duration. The minimum number of Vsyncs is:
     // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
     // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
     // - and so on.
+    //
+    // This spaces out the displaying of the frames so that the frame
+    // presentations are roughly in sync with frame captures.
     int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
             vsyncEventData.frameInterval;
     if (minVsyncs < 0) minVsyncs = 0;
     nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
+
+    // In fixed FPS case, if the frame duration deviates from multiples of
+    // display refresh rate, find the closest Vsync without requiring a minimum
+    // number of Vsync.
+    //
+    // Example: (24fps camera, 60hz refresh):
+    //   capture readout:  |  t1  |  t1  | .. |  t1  | .. |  t1  | .. |  t1  |
+    //   display VSYNC:      | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
+    //   |  : 1 frame
+    //   t1 : 41.67ms
+    //   t2 : 16.67ms
+    //   t1/t2 = 2.5
+    //
+    //   24fps is a commonly used video frame rate. Because the capture
+    //   interval is 2.5 times of display refresh interval, the minVsyncs
+    //   calculation will directly fall at the boundary condition. In this case,
+    //   we should fall back to the basic logic of finding closest vsync
+    //   timestamp without worrying about minVsyncs.
+    float captureToVsyncIntervalRatio = 1.0f * mMinExpectedDuration / vsyncEventData.frameInterval;
+    float ratioDeviation = std::fabs(
+            captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
+    bool captureDeviateFromVsync = ratioDeviation >= kMaxIntervalRatioDeviation;
+    bool cameraDisplayInSync = (mFixedFps && !captureDeviateFromVsync);
+
     // Find best timestamp in the vsync timelines:
-    // - Only use at most 3 timelines to avoid long latency
-    // - closest to the ideal present time,
+    // - Only use at most kMaxTimelines timelines to avoid long latency
+    // - Add an extra timeline if display fence is used
+    // - closest to the ideal presentation time,
     // - deadline timestamp is greater than the current time, and
-    // - the candidate present time is at least minInterval in the future
-    //   compared to last present time.
-    int maxTimelines = std::min(kMaxTimelines, (int)VsyncEventData::kFrameTimelinesLength);
+    // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
+    //   the candidate presentation time is at least minInterval in the future compared to last
+    //   presentation time.
+    // - For variable FPS, or if the capture interval deviates from refresh
+    //   interval for more than 5%, find a presentation time closest to the
+    //   (lastPresentationTime + captureToPresentOffset) instead.
+    int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
+    int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
+            (int)vsyncEventData.frameTimelinesLength);
     float biasForShortDelay = 1.0f;
     for (int i = 0; i < maxTimelines; i ++) {
         const auto& vsyncTime = vsyncEventData.frameTimelines[i];
@@ -1450,13 +1568,52 @@
             biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
         }
         if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
-                vsyncTime.deadlineTimestamp >= currentTime &&
-                vsyncTime.expectedPresentationTime >
-                mLastPresentTime + minInterval + biasForShortDelay * kTimelineThresholdNs) {
+                vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
+                ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
+                 (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
+                mLastPresentTime + minInterval +
+                    static_cast<nsecs_t>(biasForShortDelay * kTimelineThresholdNs)))) {
             expectedPresentT = vsyncTime.expectedPresentationTime;
             minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
         }
     }
+
+    if (expectedPresentT == mLastPresentTime && expectedPresentT <
+            vsyncEventData.frameTimelines[maxTimelines-1].expectedPresentationTime) {
+        // Couldn't find a reasonable presentation time. Using last frame's
+        // presentation time would cause a frame drop. The best option now
+        // is to use the next VSync as long as the last presentation time
+        // doesn't already has the maximum latency, in which case dropping the
+        // buffer is more desired than increasing latency.
+        //
+        // Example: (60fps camera, 59.9hz refresh):
+        //   capture readout:  | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
+        //                      \    \    \     \    \    \    \     \   \
+        //   queue to BQ:       |    |    |     |    |    |    |      |    |
+        //                      \    \    \     \    \     \    \      \    \
+        //   display VSYNC:      | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
+        //
+        //   |: 1 frame
+        //   t1 : 16.67ms
+        //   t2 : 16.69ms
+        //
+        // It takes 833 frames for capture readout count and display VSYNC count to be off
+        // by 1.
+        //  - At frames [0, 832], presentationTime is set to timeline[0]
+        //  - At frames [833, 833*2-1], presentationTime is set to timeline[1]
+        //  - At frames [833*2, 833*3-1] presentationTime is set to timeline[2]
+        //  - At frame 833*3, no presentation time is found because we only
+        //    search for timeline[0..2].
+        //  - Drop one buffer is better than further extend the presentation
+        //    time.
+        //
+        // However, if frame 833*2 arrives 16.67ms early (right after frame
+        // 833*2-1), no presentation time can be found because
+        // getLatestVsyncEventData is called early. In that case, it's better to
+        // set presentation time by offseting last presentation time.
+        expectedPresentT += vsyncEventData.frameInterval;
+    }
+
     mLastCaptureTime = t;
     mLastPresentTime = expectedPresentT;
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index e8065ce..1435081 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -96,7 +96,9 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false);
     /**
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
@@ -113,7 +115,9 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false);
     /**
      * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
      * RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -129,7 +133,9 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false);
 
     virtual ~Camera3OutputStream();
 
@@ -247,9 +253,15 @@
     virtual status_t setBatchSize(size_t batchSize = 1) override;
 
     /**
-     * Notify the stream on change of min frame durations.
+     * Notify the stream on change of min frame durations or variable/fixed
+     * frame rate.
      */
-    virtual void onMinDurationChanged(nsecs_t duration) override;
+    virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) override;
+
+    /**
+     * Modify stream use case
+     */
+    virtual void setStreamUseCase(int64_t streamUseCase) override;
 
     /**
      * Apply ZSL related consumer usage quirk.
@@ -258,6 +270,7 @@
 
     void setImageDumpMask(int mask) { mImageDumpMask = mask; }
     bool shouldLogError(status_t res);
+    void onCachedBufferQueued();
 
   protected:
     Camera3OutputStream(int id, camera_stream_type_t type,
@@ -271,7 +284,9 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false);
 
     /**
      * Note that we release the lock briefly in this function
@@ -341,6 +356,11 @@
     nsecs_t mTimestampOffset;
 
     /**
+     * If camera readout time is used rather than the start-of-exposure time.
+     */
+    bool mUseReadoutTime;
+
+    /**
      * Consumer end point usage flag set by the constructor for the deferred
      * consumer case.
      */
@@ -414,6 +434,7 @@
 
     // Re-space frames by overriding timestamp to align with display Vsync.
     // Default is on for SurfaceView bound streams.
+    bool    mFixedFps = false;
     nsecs_t mMinExpectedDuration = 0;
     bool mSyncToDisplay = false;
     DisplayEventReceiver mDisplayEventReceiver;
@@ -424,8 +445,15 @@
     static constexpr nsecs_t kSpacingResetIntervalNs = 50000000LL; // 50 millisecond
     static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
     static constexpr float kMaxIntervalRatioDeviation = 0.05f;
-    static constexpr int kMaxTimelines = 3;
-    nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+    static constexpr int kMaxTimelines = 2;
+    nsecs_t syncTimestampToDisplayLocked(nsecs_t t, int releaseFence);
+
+    // In case of fence being used
+    sp<Fence> mReferenceFrameFence;
+    nsecs_t mReferenceCaptureTime = 0;
+    nsecs_t mReferenceArrivalTime = 0;
+    nsecs_t mFenceSignalOffset = 0;
+    VsyncEventData  mRefVsyncData;
 
     // Re-space frames by delaying queueBuffer so that frame delivery has
     // the same cadence as capture. Default is on for SurfaceTexture bound
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index a6d4b96..4baa7e8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -110,12 +110,18 @@
     virtual status_t setBatchSize(size_t batchSize = 1) = 0;
 
     /**
-     * Notify the output stream that the minimum frame duration has changed.
+     * Notify the output stream that the minimum frame duration has changed, or
+     * frame rate has switched between variable and fixed.
      *
      * The minimum frame duration is calculated based on the upper bound of
      * AE_TARGET_FPS_RANGE in the capture request.
      */
-    virtual void onMinDurationChanged(nsecs_t duration) = 0;
+    virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) = 0;
+
+    /**
+     * Modify the stream use case for this output.
+     */
+    virtual void setStreamUseCase(int64_t streamUseCase) = 0;
 };
 
 // Helper class to organize a synchronized mapping of stream IDs to stream instances
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index ed66df0..f742a6d 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -128,12 +128,71 @@
     return res;
 }
 
+status_t fixupAutoframingTags(CameraMetadata& resultMetadata) {
+    status_t res = OK;
+    camera_metadata_entry autoframingEntry =
+            resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING);
+    if (autoframingEntry.count == 0) {
+        const uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_OFF;
+        res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING, &defaultAutoframingEntry, 1);
+        if (res != OK) {
+            ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING: %s (%d)",
+                  __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    camera_metadata_entry autoframingStateEntry =
+            resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING_STATE);
+    if (autoframingStateEntry.count == 0) {
+        const uint8_t defaultAutoframingStateEntry = ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE;
+        res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING_STATE,
+                                    &defaultAutoframingStateEntry, 1);
+        if (res != OK) {
+            ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING_STATE: %s (%d)",
+                  __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    return res;
+}
+
+void correctMeteringRegions(camera_metadata_t *meta) {
+    if (meta == nullptr) return;
+
+    uint32_t meteringRegionKeys[] = {
+            ANDROID_CONTROL_AE_REGIONS,
+            ANDROID_CONTROL_AWB_REGIONS,
+            ANDROID_CONTROL_AF_REGIONS };
+
+    for (uint32_t key : meteringRegionKeys) {
+        camera_metadata_entry_t entry;
+        int res = find_camera_metadata_entry(meta, key, &entry);
+        if (res != OK) continue;
+
+        for (size_t i = 0; i < entry.count; i += 5) {
+            if (entry.data.i32[0] > entry.data.i32[2]) {
+                ALOGW("%s: Invalid metering region (%d): left: %d, right: %d",
+                        __FUNCTION__, key, entry.data.i32[0], entry.data.i32[2]);
+                entry.data.i32[2] = entry.data.i32[0];
+            }
+            if (entry.data.i32[1] > entry.data.i32[3]) {
+                ALOGW("%s: Invalid metering region (%d): top: %d, bottom: %d",
+                        __FUNCTION__, key, entry.data.i32[1], entry.data.i32[3]);
+                entry.data.i32[3] = entry.data.i32[1];
+            }
+        }
+    }
+}
+
 void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
     if (result == nullptr) return;
 
     camera_metadata_t *meta = const_cast<camera_metadata_t *>(
             result->mMetadata.getAndLock());
     set_camera_metadata_vendor_id(meta, states.vendorTagId);
+    correctMeteringRegions(meta);
     result->mMetadata.unlock(meta);
 
     if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
@@ -152,6 +211,7 @@
         camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
                 physicalMetadata.mPhysicalCameraMetadata.getAndLock());
         set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+        correctMeteringRegions(pmeta);
         physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
     }
 
@@ -324,6 +384,22 @@
         }
     }
 
+    // Fix up autoframing metadata
+    res = fixupAutoframingTags(captureResult.mMetadata);
+    if (res != OK) {
+        SET_ERR("Failed to set autoframing defaults in result metadata: %s (%d)",
+                strerror(-res), res);
+        return;
+    }
+    for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+        res = fixupAutoframingTags(physicalMetadata.mPhysicalCameraMetadata);
+        if (res != OK) {
+            SET_ERR("Failed to set autoframing defaults in physical result metadata: %s (%d)",
+                    strerror(-res), res);
+            return;
+        }
+    }
+
     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
         String8 cameraId8(physicalMetadata.mPhysicalCameraId);
         auto mapper = states.distortionMappers.find(cameraId8.c_str());
@@ -464,6 +540,32 @@
     return found;
 }
 
+const std::set<std::string>& getCameraIdsWithZoomLocked(
+        const InFlightRequestMap& inflightMap, const CameraMetadata& metadata,
+        const std::set<std::string>& cameraIdsWithZoom) {
+    camera_metadata_ro_entry overrideEntry =
+            metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+    camera_metadata_ro_entry frameNumberEntry =
+            metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
+    if (overrideEntry.count != 1
+            || overrideEntry.data.i32[0] != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM
+            || frameNumberEntry.count != 1) {
+        // No valid overriding frame number, skip
+        return cameraIdsWithZoom;
+    }
+
+    uint32_t overridingFrameNumber = frameNumberEntry.data.i32[0];
+    ssize_t idx = inflightMap.indexOfKey(overridingFrameNumber);
+    if (idx < 0) {
+        ALOGE("%s: Failed to find pending request #%d in inflight map",
+                __FUNCTION__, overridingFrameNumber);
+        return cameraIdsWithZoom;
+    }
+
+    const InFlightRequest &r = inflightMap.valueFor(overridingFrameNumber);
+    return r.cameraIdsWithZoom;
+}
+
 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
     ATRACE_CALL();
 
@@ -521,27 +623,50 @@
         if (result->partial_result != 0)
             request.resultExtras.partialResultCount = result->partial_result;
 
-        if ((result->result != nullptr) && !states.legacyClient) {
+        if (result->result != nullptr) {
             camera_metadata_ro_entry entry;
             auto ret = find_camera_metadata_ro_entry(result->result,
                     ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
             if ((ret == OK) && (entry.count > 0)) {
                 std::string physicalId(reinterpret_cast<const char *>(entry.data.u8));
-                auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
-                if (deviceInfo != states.physicalDeviceInfoMap.end()) {
-                    auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
-                    if (orientation.count > 0) {
-                        ret = CameraUtils::getRotationTransform(deviceInfo->second,
-                                OutputConfiguration::MIRROR_MODE_AUTO, &request.transform);
-                        if (ret != OK) {
-                            ALOGE("%s: Failed to calculate current stream transformation: %s (%d)",
-                                    __FUNCTION__, strerror(-ret), ret);
+                if (!states.activePhysicalId.empty() && physicalId != states.activePhysicalId) {
+                    states.listener->notifyPhysicalCameraChange(physicalId);
+                }
+                states.activePhysicalId = physicalId;
+
+                if (!states.legacyClient && !states.overrideToPortrait) {
+                    auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
+                    if (deviceInfo != states.physicalDeviceInfoMap.end()) {
+                        auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
+                        if (orientation.count > 0) {
+                            int32_t transform;
+                            ret = CameraUtils::getRotationTransform(deviceInfo->second,
+                                    OutputConfiguration::MIRROR_MODE_AUTO, &transform);
+                            if (ret == OK) {
+                                // It is possible for camera providers to return the capture
+                                // results after the processed frames. In such scenario, we will
+                                // not be able to set the output transformation before the frames
+                                // return back to the consumer for the current capture request
+                                // but we could still try and configure it for any future requests
+                                // that are still in flight. The assumption is that the physical
+                                // device id remains the same for the duration of the pending queue.
+                                for (size_t i = 0; i < states.inflightMap.size(); i++) {
+                                    auto &r = states.inflightMap.editValueAt(i);
+                                    if (r.requestTimeNs >= request.requestTimeNs) {
+                                        r.transform = transform;
+                                    }
+                                }
+                            } else {
+                                ALOGE("%s: Failed to calculate current stream transformation: %s "
+                                        "(%d)", __FUNCTION__, strerror(-ret), ret);
+                            }
+                        } else {
+                            ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
                         }
                     } else {
-                        ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
+                        ALOGE("%s: Physical device not found in device info map found!",
+                                __FUNCTION__);
                     }
-                } else {
-                    ALOGE("%s: Physical device not found in device info map found!", __FUNCTION__);
                 }
             }
         }
@@ -652,10 +777,12 @@
             } else if (request.hasCallback) {
                 CameraMetadata metadata;
                 metadata = result->result;
+                auto cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+                        states.inflightMap, metadata, request.cameraIdsWithZoom);
                 sendCaptureResult(states, metadata, request.resultExtras,
                     collectedPartialResult, frameNumber,
                     hasInputBufferInRequest, request.zslCapture && request.stillCapture,
-                    request.rotateAndCropAuto, request.cameraIdsWithZoom,
+                    request.rotateAndCropAuto, cameraIdsWithZoom,
                     request.physicalMetadatas);
             }
         }
@@ -787,10 +914,12 @@
         SessionStatsBuilder& sessionStatsBuilder) {
     bool timestampIncreasing =
             !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
+    nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
+            request.resultExtras.readoutTimestamp : 0;
     returnOutputBuffers(useHalBufManager, listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(),
-            request.shutterTimestamp, request.shutterReadoutTimestamp,
+            request.shutterTimestamp, readoutTimestamp,
             /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
@@ -852,13 +981,18 @@
             }
 
             r.shutterTimestamp = msg.timestamp;
-            r.shutterReadoutTimestamp = msg.readout_timestamp;
-            if (r.minExpectedDuration != states.minFrameDuration) {
+            if (msg.readout_timestamp_valid) {
+                r.resultExtras.hasReadoutTimestamp = true;
+                r.resultExtras.readoutTimestamp = msg.readout_timestamp;
+            }
+            if (r.minExpectedDuration != states.minFrameDuration ||
+                    r.isFixedFps != states.isFixedFps) {
                 for (size_t i = 0; i < states.outputStreams.size(); i++) {
                     auto outputStream = states.outputStreams[i];
-                    outputStream->onMinDurationChanged(r.minExpectedDuration);
+                    outputStream->onMinDurationChanged(r.minExpectedDuration, r.isFixedFps);
                 }
                 states.minFrameDuration = r.minExpectedDuration;
+                states.isFixedFps = r.isFixedFps;
             }
             if (r.hasCallback) {
                 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
@@ -875,11 +1009,13 @@
                     states.listener->notifyShutter(r.resultExtras, msg.timestamp);
                 }
                 // send pending result and buffers
+                const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+                        inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
                 sendCaptureResult(states,
                     r.pendingMetadata, r.resultExtras,
                     r.collectedPartialResult, msg.frame_number,
                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
-                    r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
+                    r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
             }
             returnAndRemovePendingOutputBuffers(
                     states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index d6107c2..d5328c5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -106,6 +106,9 @@
         BufferRecordsInterface& bufferRecordsIntf;
         bool legacyClient;
         nsecs_t& minFrameDuration;
+        bool& isFixedFps;
+        bool overrideToPortrait;
+        std::string &activePhysicalId;
     };
 
     void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 9215f23..f3a7359 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,13 @@
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool useHalBufManager, int64_t dynamicProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
-        int mirrorMode) :
+        int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
         Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
                             transport, consumerUsage, timestampOffset, setId,
                             /*isMultiResolution*/false, dynamicProfile, streamUseCase,
-                            deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
+                            deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace,
+                            useReadoutTimestamp),
         mUseHalBufManager(useHalBufManager) {
     size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
     if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index aac3c2a..1102ecb 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -45,7 +45,9 @@
             int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
             bool deviceTimeBaseIsRealtime = false,
             int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
-            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+            int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+            bool useReadoutTimestamp = false);
 
     virtual ~Camera3SharedOutputStream();
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 7ad6649..4395455 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,8 @@
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int32_t colorSpace) :
     camera_stream(),
     mId(id),
     mSetId(setId),
@@ -95,6 +96,7 @@
     camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
     camera_stream::dynamic_range_profile = dynamicRangeProfile;
     camera_stream::use_case = streamUseCase;
+    camera_stream::color_space = colorSpace;
 
     if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
             maxSize == 0) {
@@ -135,6 +137,10 @@
     return camera_stream::data_space;
 }
 
+int32_t Camera3Stream::getColorSpace() const {
+    return camera_stream::color_space;
+}
+
 uint64_t Camera3Stream::getUsage() const {
     return mUsage;
 }
@@ -665,11 +671,19 @@
         }
     }
 
-    // Wait for new buffer returned back if we are running into the limit.
+    // Wait for new buffer returned back if we are running into the limit. There
+    // are 2 limits:
+    // 1. The number of HAL buffers is greater than max_buffers
+    // 2. The number of HAL buffers + cached buffers is greater than max_buffers
+    //    + maxCachedBuffers
     size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
-    if (numOutstandingBuffers == camera_stream::max_buffers) {
-        ALOGV("%s: Already dequeued max output buffers (%d), wait for next returned one.",
-                        __FUNCTION__, camera_stream::max_buffers);
+    size_t numCachedBuffers = getCachedOutputBufferCountLocked();
+    size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
+    while (numOutstandingBuffers == camera_stream::max_buffers ||
+            numOutstandingBuffers + numCachedBuffers ==
+            camera_stream::max_buffers + maxNumCachedBuffers) {
+        ALOGV("%s: Already dequeued max output buffers (%d(+%zu)), wait for next returned one.",
+                        __FUNCTION__, camera_stream::max_buffers, maxNumCachedBuffers);
         nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
         if (waitBufferTimeout < kWaitForBufferDuration) {
             waitBufferTimeout = kWaitForBufferDuration;
@@ -687,12 +701,16 @@
         }
 
         size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
-        if (updatedNumOutstandingBuffers >= numOutstandingBuffers) {
-            ALOGE("%s: outsanding buffer count goes from %zu to %zu, "
+        size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
+        if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
+                updatedNumCachedBuffers == numCachedBuffers) {
+            ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
                     "getBuffer(s) call must not run in parallel!", __FUNCTION__,
                     numOutstandingBuffers, updatedNumOutstandingBuffers);
             return INVALID_OPERATION;
         }
+        numOutstandingBuffers = updatedNumOutstandingBuffers;
+        numCachedBuffers = updatedNumCachedBuffers;
     }
 
     res = getBufferLocked(buffer, surface_ids);
@@ -937,9 +955,8 @@
     }
 }
 
-void Camera3Stream::dump(int fd, const Vector<String16> &args) const
+void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const
 {
-    (void)args;
     mBufferLimitLatency.dump(fd,
             "      Latency histogram for wait on max_buffers");
 }
@@ -1057,11 +1074,20 @@
     }
 
     size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
-    // Wait for new buffer returned back if we are running into the limit.
-    while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers) {
-        ALOGV("%s: Already dequeued %zu output buffers and requesting %zu (max is %d), waiting.",
-                __FUNCTION__, numOutstandingBuffers, numBuffersRequested,
-                camera_stream::max_buffers);
+    size_t numCachedBuffers = getCachedOutputBufferCountLocked();
+    size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
+    // Wait for new buffer returned back if we are running into the limit. There
+    // are 2 limits:
+    // 1. The number of HAL buffers is greater than max_buffers
+    // 2. The number of HAL buffers + cached buffers is greater than max_buffers
+    //    + maxCachedBuffers
+    while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers ||
+            numOutstandingBuffers + numCachedBuffers + numBuffersRequested >
+            camera_stream::max_buffers + maxNumCachedBuffers) {
+        ALOGV("%s: Already dequeued %zu(+%zu) output buffers and requesting %zu "
+                "(max is %d(+%zu)), waiting.", __FUNCTION__, numOutstandingBuffers,
+                numCachedBuffers, numBuffersRequested, camera_stream::max_buffers,
+                maxNumCachedBuffers);
         nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
         if (waitBufferTimeout < kWaitForBufferDuration) {
             waitBufferTimeout = kWaitForBufferDuration;
@@ -1078,13 +1104,16 @@
             return res;
         }
         size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
-        if (updatedNumOutstandingBuffers >= numOutstandingBuffers) {
-            ALOGE("%s: outsanding buffer count goes from %zu to %zu, "
+        size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
+        if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
+                updatedNumCachedBuffers == numCachedBuffers) {
+            ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
                     "getBuffer(s) call must not run in parallel!", __FUNCTION__,
                     numOutstandingBuffers, updatedNumOutstandingBuffers);
             return INVALID_OPERATION;
         }
         numOutstandingBuffers = updatedNumOutstandingBuffers;
+        numCachedBuffers = updatedNumCachedBuffers;
     }
 
     res = getBuffersLocked(buffers);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index d429e6c..f32053b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,7 @@
     uint32_t          getHeight() const;
     int               getFormat() const;
     android_dataspace getDataSpace() const;
+    int32_t           getColorSpace() const;
     uint64_t          getUsage() const;
     void              setUsage(uint64_t usage);
     void              setFormatOverride(bool formatOverriden);
@@ -509,7 +510,8 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
-            int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+            int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+            int32_t colorSpace);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
@@ -558,6 +560,10 @@
     // Get handout input buffer count.
     virtual size_t   getHandoutInputBufferCountLocked() = 0;
 
+    // Get cached output buffer count.
+    virtual size_t   getCachedOutputBufferCountLocked() const = 0;
+    virtual size_t   getMaxCachedOutputBuffersLocked() const = 0;
+
     // Get the usage flags for the other endpoint, or return
     // INVALID_OPERATION if they cannot be obtained.
     virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
@@ -576,6 +582,8 @@
 
     uint64_t mUsage;
 
+    Condition mOutputBufferReturnedSignal;
+
   private:
     // Previously configured stream properties (post HAL override)
     uint64_t mOldUsage;
@@ -583,7 +591,6 @@
     int mOldFormat;
     android_dataspace mOldDataSpace;
 
-    Condition mOutputBufferReturnedSignal;
     Condition mInputBufferReturnedSignal;
     static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 6812e89..823be2e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -67,6 +67,7 @@
     std::unordered_set<int32_t> sensor_pixel_modes_used;
     int64_t dynamic_range_profile;
     int64_t use_case;
+    int32_t color_space;
 } camera_stream_t;
 
 typedef struct camera_stream_buffer {
@@ -114,20 +115,24 @@
         int64_t streamUseCase;
         int timestampBase;
         int mirrorMode;
+        int32_t colorSpace;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
             consumerUsage(0),
             dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
             streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
             timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
-            mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
+            mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
+            colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
                 uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
-                int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
+                int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+                int32_t _colorSpace) :
             width(_width), height(_height), format(_format),
             dataSpace(_dataSpace), consumerUsage(_consumerUsage),
             sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
-            streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+            streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
+            colorSpace(_colorSpace) {}
 };
 
 // Utility class to lock and unlock a GraphicBuffer
@@ -206,6 +211,7 @@
     virtual int      getFormat() const = 0;
     virtual int64_t  getDynamicRangeProfile() const = 0;
     virtual android_dataspace getDataSpace() const = 0;
+    virtual int32_t getColorSpace() const = 0;
     virtual void setFormatOverride(bool formatOverriden) = 0;
     virtual bool isFormatOverridden() const = 0;
     virtual int getOriginalFormat() const = 0;
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 15807bf..f0764b4 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -67,7 +67,7 @@
         return res;
     }
 
-    bool mMaxResolution = SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+    bool mMaxResolution = SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
     if (mMaxResolution) {
         res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/true);
     }
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 493a9e2..870825a 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -65,6 +65,7 @@
 typedef struct camera_shutter_msg {
     uint32_t frame_number;
     uint64_t timestamp;
+    bool readout_timestamp_valid;
     uint64_t readout_timestamp;
 } camera_shutter_msg_t;
 
@@ -104,8 +105,6 @@
 struct InFlightRequest {
     // Set by notify() SHUTTER call.
     nsecs_t shutterTimestamp;
-    // Set by notify() SHUTTER call with readout time.
-    nsecs_t shutterReadoutTimestamp;
     // Set by process_capture_result().
     nsecs_t sensorTimestamp;
     int     requestStatus;
@@ -153,6 +152,9 @@
     // For auto-exposure modes, equal to 1/(lower end of target FPS range)
     nsecs_t maxExpectedDuration;
 
+    // Whether the FPS range is fixed, aka, minFps == maxFps
+    bool isFixedFps;
+
     // Whether the result metadata for this request is to be skipped. The
     // result metadata should be skipped in the case of
     // REQUEST/RESULT error.
@@ -180,6 +182,9 @@
     // Indicates that ROTATE_AND_CROP was set to AUTO
     bool rotateAndCropAuto;
 
+    // Indicates that AUTOFRAMING was set to AUTO
+    bool autoframingAuto;
+
     // Requested camera ids (both logical and physical) with zoomRatio != 1.0f
     std::set<std::string> cameraIdsWithZoom;
 
@@ -206,20 +211,23 @@
             hasCallback(true),
             minExpectedDuration(kDefaultMinExpectedDuration),
             maxExpectedDuration(kDefaultMaxExpectedDuration),
+            isFixedFps(false),
             skipResultMetadata(false),
             errorBufStrategy(ERROR_BUF_CACHE),
             stillCapture(false),
             zslCapture(false),
             rotateAndCropAuto(false),
+            autoframingAuto(false),
             requestTimeNs(0),
             transform(-1) {
     }
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
-            bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration,
+            bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
             const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
-            bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
-            nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
+            bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
+            const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
+            const SurfaceMap& outSurfaces = SurfaceMap{}) :
             shutterTimestamp(0),
             sensorTimestamp(0),
             requestStatus(OK),
@@ -230,12 +238,14 @@
             hasCallback(hasAppCallback),
             minExpectedDuration(minDuration),
             maxExpectedDuration(maxDuration),
+            isFixedFps(fixedFps),
             skipResultMetadata(false),
             errorBufStrategy(ERROR_BUF_CACHE),
             physicalCameraIds(physicalCameraIdSet),
             stillCapture(isStillCapture),
             zslCapture(isZslCapture),
             rotateAndCropAuto(rotateAndCropAuto),
+            autoframingAuto(autoframingAuto),
             cameraIdsWithZoom(idsWithZoom),
             requestTimeNs(requestNs),
             outputSurfaces(outSurfaces),
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index 0439501..83caa00 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -68,8 +68,10 @@
         return true;
     }
 
-    // Cache the frame to match readout time interval, for up to 33ms
-    nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
+    // Cache the frame to match readout time interval, for up to kMaxFrameWaitTime
+    // Because the code between here and queueBuffer() takes time to execute, make sure the
+    // presentationInterval is slightly shorter than readoutInterval.
+    nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval - kFrameAdjustThreshold;
     nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
     if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
         mBufferCond.waitRelative(mLock, frameWaitTime);
@@ -78,9 +80,9 @@
         }
         currentTime = systemTime();
     }
-    ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+    ALOGV("%s: readoutInterval %" PRId64 ", waited for %" PRId64
             ", timestamp %" PRId64, __FUNCTION__, readoutInterval,
-            currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
+            mPendingBuffers.size() < 2 ? frameWaitTime : 0, buffer.timestamp);
     mPendingBuffers.pop();
     queueBufferToClientLocked(buffer, currentTime);
     return true;
@@ -122,6 +124,7 @@
         }
     }
 
+    parent->onCachedBufferQueued();
     mLastCameraPresentTime = currentTime;
     mLastCameraReadoutTime = bufferHolder.readoutTimestamp;
 }
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index e165768..f46de3d 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -85,7 +85,8 @@
     nsecs_t mLastCameraPresentTime = 0;
     static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
     static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
-    static constexpr nsecs_t kMaxFrameWaitTime = 33333333LL; // 33ms
+    static constexpr nsecs_t kMaxFrameWaitTime = 10000000LL; // 10ms
+    static constexpr nsecs_t kFrameAdjustThreshold = 2000000LL; // 2ms
 };
 
 }; //namespace camera3
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index a02e5f6..9cdd365 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -142,13 +142,13 @@
                    ch :                 // pillarbox or 1:1, full height
                    cw / mRotateAspect;  // letterbox, not full height
         switch (rotateMode) {
-            case ANDROID_SCALER_ROTATE_AND_CROP_90:
+            case ANDROID_SCALER_ROTATE_AND_CROP_270:
                 transformMat[1] = -rw / ch; // +y -> -x
                 transformMat[2] =  rh / cw; // +x -> +y
                 xShift = (cw + rw) / 2; // left edge of crop to right edge of rotated
                 yShift = (ch - rh) / 2; // top edge of crop to top edge of rotated
                 break;
-            case ANDROID_SCALER_ROTATE_AND_CROP_270:
+            case ANDROID_SCALER_ROTATE_AND_CROP_90:
                 transformMat[1] =  rw / ch; // +y -> +x
                 transformMat[2] = -rh / cw; // +x -> -y
                 xShift = (cw - rw) / 2; // left edge of crop to left edge of rotated
@@ -271,13 +271,13 @@
         rx = cx + (cw - rw) / 2;
         ry = cy + (ch - rh) / 2;
         switch (rotateMode) {
-            case ANDROID_SCALER_ROTATE_AND_CROP_90:
+            case ANDROID_SCALER_ROTATE_AND_CROP_270:
                 transformMat[1] =  ch / rw; // +y -> +x
                 transformMat[2] = -cw / rh; // +x -> -y
                 xShift = -(cw - rw) / 2; // left edge of rotated to left edge of cropped
                 yShift = ry - cy + ch;   // top edge of rotated to bottom edge of cropped
                 break;
-            case ANDROID_SCALER_ROTATE_AND_CROP_270:
+            case ANDROID_SCALER_ROTATE_AND_CROP_90:
                 transformMat[1] = -ch / rw; // +y -> -x
                 transformMat[2] =  cw / rh; // +x -> +y
                 xShift = (cw + rw) / 2; // left edge of rotated to left edge of cropped
diff --git a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
index c558d91..ce7097a 100644
--- a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
+++ b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
@@ -91,6 +91,8 @@
         if (meteringRegionsSetEntry.count == 1 &&
                 meteringRegionsSetEntry.data.u8[0] == entry.second.second) {
             // metering region set by client, doesn't need to be fixed.
+            ALOGV("%s: Metering region %u set by client, they don't need to be fixed",
+                    __FUNCTION__, entry.first);
             continue;
         }
         camera_metadata_entry meteringRegionEntry = request->find(entry.first);
@@ -121,6 +123,7 @@
     if (cropRegionSetEntry.count == 1 &&
         cropRegionSetEntry.data.u8[0] == ANDROID_SCALER_CROP_REGION_SET_TRUE) {
         // crop regions set by client, doesn't need to be fixed.
+        ALOGV("%s: crop region set by client, doesn't need to be fixed", __FUNCTION__);
         return;
     }
     camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 27b00c9..aaa1b70 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -153,9 +153,9 @@
         return;
     }
 
-    bool isUltraHighResolutionSensor =
-            camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
-    if (isUltraHighResolutionSensor) {
+    bool supportsUltraHighResolutionCapture =
+            camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture(*deviceInfo);
+    if (supportsUltraHighResolutionCapture) {
         if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
                 ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
                 &arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
@@ -354,17 +354,8 @@
             if (weight == 0) {
                 continue;
             }
-            // Top left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+            scaleRegion(entry.data.i32 + j, zoomRatio, arrayWidth,
                     arrayHeight);
-            // Bottom right (exclusive): Use adjacent inclusive pixel to
-            // calculate.
-            entry.data.i32[j+2] -= 1;
-            entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
-                    arrayHeight);
-            entry.data.i32[j+2] += 1;
-            entry.data.i32[j+3] += 1;
         }
     }
 
@@ -401,17 +392,8 @@
             if (weight == 0) {
                 continue;
             }
-            // Top-left (inclusive)
-            scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+            scaleRegion(entry.data.i32 + j, 1.0 / zoomRatio, arrayWidth,
                     arrayHeight);
-            // Bottom-right (exclusive): Use adjacent inclusive pixel to
-            // calculate.
-            entry.data.i32[j+2] -= 1;
-            entry.data.i32[j+3] -= 1;
-            scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
-                    arrayHeight);
-            entry.data.i32[j+2] += 1;
-            entry.data.i32[j+3] += 1;
         }
     }
     for (auto rect : kRectsToCorrect) {
@@ -470,6 +452,24 @@
     }
 }
 
+void ZoomRatioMapper::scaleRegion(int32_t* region, float scaleRatio,
+        int32_t arrayWidth, int32_t arrayHeight) {
+    // Top-left (inclusive)
+    scaleCoordinates(region, 1, scaleRatio, true /*clamp*/, arrayWidth,
+            arrayHeight);
+    // Bottom-right (exclusive): Use adjacent inclusive pixel to
+    // calculate.
+    region[2] -= 1;
+    region[3] -= 1;
+    scaleCoordinates(region + 2, 1, scaleRatio, true /*clamp*/, arrayWidth,
+            arrayHeight);
+    region[2] += 1;
+    region[3] += 1;
+    // Make sure bottom-right >= top-left
+    region[2] = std::max(region[0], region[2]);
+    region[3] = std::max(region[1], region[3]);
+}
+
 void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
         float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
     for (int i = 0; i < rectCount * 4; i += 4) {
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index b7a9e41..1aa8e78 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -69,6 +69,8 @@
   public: // Visible for testing. Do not use concurently.
     void scaleCoordinates(int32_t* coordPairs, int coordCount,
             float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
+    void scaleRegion(int32_t* region, float scaleRatio,
+            int32_t arrayWidth, int32_t arrayHeight);
 
     bool isValid() { return mIsValid; }
   private:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index f05520f..aa941b0 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -51,6 +51,7 @@
 
 #include <aidl/android/hardware/camera/device/ICameraInjectionSession.h>
 #include <aidlcommonsupport/NativeHandle.h>
+#include <android/binder_ibinder_platform.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
 
 #include "utils/CameraTraces.h"
@@ -161,9 +162,13 @@
     return (uint64_t)usage;
 }
 
-AidlCamera3Device::AidlCamera3Device(const String8& id, bool overrideForPerfClass,
-            bool legacyClient) : Camera3Device(id, overrideForPerfClass, legacyClient) {
-        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+AidlCamera3Device::AidlCamera3Device(
+        std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+        bool legacyClient) :
+        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+        legacyClient) {
+    mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
 status_t AidlCamera3Device::initialize(sp<CameraProviderManager> manager,
@@ -192,13 +197,15 @@
       SET_ERR("Session iface returned is null");
       return INVALID_OPERATION;
     }
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+            mOverrideToPortrait);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
         return res;
     }
     mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
+    mIsCompositeJpegRDisabled = manager->isCompositeJpegRDisabled(mId.string());
 
     std::vector<std::string> physicalCameraIds;
     bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
@@ -206,7 +213,8 @@
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
-                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+                    mOverrideToPortrait);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -231,7 +239,7 @@
                     &mPhysicalDeviceInfoMap[physicalId],
                     mSupportNativeZoomRatio, usePrecorrectArray);
 
-            if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+            if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
                     mPhysicalDeviceInfoMap[physicalId])) {
                 mUHRCropAndMeteringRegionMappers[physicalId] =
                         UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -371,7 +379,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -412,7 +421,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -669,6 +679,12 @@
     return p->returnStreamBuffers(buffers);
 }
 
+::ndk::SpAIBinder AidlCamera3Device::AidlCameraDeviceCallbacks::createBinder() {
+    auto binder = BnCameraDeviceCallback::createBinder();
+    AIBinder_setInheritRt(binder.get(), /*inheritRt*/ true);
+    return binder;
+}
+
 ::ndk::ScopedAStatus AidlCamera3Device::returnStreamBuffers(
         const std::vector<camera::device::StreamBuffer>& buffers) {
     ReturnBufferStates states {
@@ -859,8 +875,9 @@
 }
 
 status_t AidlCamera3Device::AidlHalInterface::configureStreams(
-    const camera_metadata_t *sessionParams,
-        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+        const camera_metadata_t *sessionParams,
+        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+        int64_t logId) {
     using camera::device::StreamType;
     using camera::device::StreamConfigurationMode;
 
@@ -902,6 +919,7 @@
                     cam3stream->getOriginalFormat() : src->format);
         dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
                     cam3stream->getOriginalDataSpace() : src->data_space);
+        dst.colorSpace = src->color_space;
 
         dst.bufferSize = bufferSizes[i];
         if (src->physical_camera_id != nullptr) {
@@ -944,6 +962,7 @@
 
     requestedConfiguration.streamConfigCounter = mNextStreamConfigCounter++;
     requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
+    requestedConfiguration.logId = logId;
     auto err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
     if (!err.isOk()) {
         ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
@@ -1399,9 +1418,11 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) :
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute) {}
+                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
@@ -1570,9 +1591,11 @@
                 sp<Camera3Device::HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) {
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute);
+            useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index d20a7eb..99a308b 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,7 +39,9 @@
     using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
     class AidlCameraDeviceCallbacks;
     friend class AidlCameraDeviceCallbacks;
-    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass,
+    explicit AidlCamera3Device(
+            std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
@@ -99,7 +101,9 @@
 
         virtual status_t configureStreams(const camera_metadata_t *sessionParams,
                 /*inout*/ camera_stream_configuration_t *config,
-                const std::vector<uint32_t>& bufferSizes) override;
+                const std::vector<uint32_t>& bufferSizes,
+                int64_t logId) override;
+
         // The injection camera configures the streams to hal.
         virtual status_t configureInjectedStreams(
                 const camera_metadata_t* sessionParams,
@@ -174,7 +178,9 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -242,6 +248,10 @@
         ::ndk::ScopedAStatus returnStreamBuffers(
                 const std::vector<
                         aidl::android::hardware::camera::device::StreamBuffer>& buffers) override;
+
+        protected:
+        ::ndk::SpAIBinder createBinder() override;
+
         private:
             wp<AidlCamera3Device> mParent = nullptr;
     };
@@ -255,7 +265,9 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) override;
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 336719d..3c3db97 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -30,6 +30,7 @@
 #include <utils/Trace.h>
 
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include <android/binder_ibinder_platform.h>
 
 #include "device3/aidl/AidlCamera3OfflineSession.h"
 #include "device3/Camera3OutputStream.h"
@@ -47,7 +48,7 @@
 AidlCamera3OfflineSession::~AidlCamera3OfflineSession() {
     ATRACE_CALL();
     ALOGV("%s: Tearing down aidl offline session for camera id %s", __FUNCTION__, mId.string());
-    AidlCamera3OfflineSession::disconnectSession();
+    Camera3OfflineSession::disconnectImpl();
 }
 
 status_t AidlCamera3OfflineSession::initialize(wp<NotificationListener> listener) {
@@ -110,6 +111,7 @@
         listener = mListener.promote();
     }
 
+    std::string activePhysicalId(""); // Unused
     AidlCaptureOutputStates states {
       {mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -123,7 +125,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -155,6 +158,7 @@
         listener = mListener.promote();
     }
 
+    std::string activePhysicalId(""); // Unused
     AidlCaptureOutputStates states {
       {mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -168,7 +172,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -218,6 +223,12 @@
     return p->returnStreamBuffers(buffers);
 }
 
+::ndk::SpAIBinder AidlCamera3OfflineSession::AidlCameraDeviceCallbacks::createBinder() {
+    auto binder = BnCameraDeviceCallback::createBinder();
+    AIBinder_setInheritRt(binder.get(), /*inheritRt*/ true);
+    return binder;
+}
+
 ::ndk::ScopedAStatus AidlCamera3OfflineSession::returnStreamBuffers(
         const std::vector<camera::device::StreamBuffer>& buffers) {
     {
@@ -236,12 +247,17 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
-void AidlCamera3OfflineSession::disconnectSession() {
-  std::lock_guard<std::mutex> lock(mLock);
-  if (mSession != nullptr) {
-      mSession->close();
-  }
-  mSession.reset();
+void AidlCamera3OfflineSession::closeSessionLocked() {
+    if (mSession != nullptr) {
+        auto err = mSession->close();
+        if (!err.isOk()) {
+            ALOGE("%s: Close transaction error: %s", __FUNCTION__, err.getDescription().c_str());
+        }
+    }
+}
+
+void AidlCamera3OfflineSession::releaseSessionLocked() {
+    mSession.reset();
 }
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
index 33de2c5..b31ffb7 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -97,6 +97,10 @@
         ::ndk::ScopedAStatus returnStreamBuffers(
                 const std::vector<
                         aidl::android::hardware::camera::device::StreamBuffer>& buffers) override;
+        protected:
+
+        ::ndk::SpAIBinder createBinder() override;
+
         private:
             wp<AidlCamera3OfflineSession> mParent = nullptr;
     };
@@ -127,7 +131,9 @@
 
     std::shared_ptr<AidlCameraDeviceCallbacks> mCallbacks;
 
-    virtual void disconnectSession() override;
+    virtual void closeSessionLocked() override;
+
+    virtual void releaseSessionLocked() override;
 
 }; // class AidlCamera3OfflineSession
 
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index 02eebd2..b2accc1 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -110,6 +110,7 @@
             m.type = CAMERA_MSG_SHUTTER;
             m.message.shutter.frame_number = msg.get<Tag::shutter>().frameNumber;
             m.message.shutter.timestamp = msg.get<Tag::shutter>().timestamp;
+            m.message.shutter.readout_timestamp_valid = true;
             m.message.shutter.readout_timestamp = msg.get<Tag::shutter>().readoutTimestamp;
             break;
     }
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 4bb426c..0d44dd5 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -162,7 +162,8 @@
         return res;
     }
 
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+            /*overrideToPortrait*/false);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -176,7 +177,8 @@
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
-                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+                    /*overrideToPortrait*/false);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -201,7 +203,7 @@
                     &mPhysicalDeviceInfoMap[physicalId],
                     mSupportNativeZoomRatio, usePrecorrectArray);
 
-            if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+            if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
                     mPhysicalDeviceInfoMap[physicalId])) {
                 mUHRCropAndMeteringRegionMappers[physicalId] =
                         UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -363,7 +365,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        mActivePhysicalId}, mResultMetadataQueue
     };
 
     //HidlCaptureOutputStates hidlStates {
@@ -425,7 +428,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        mActivePhysicalId}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -472,7 +476,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -698,9 +703,11 @@
                 sp<Camera3Device::HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) {
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute);
+                useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -874,7 +881,8 @@
 
 status_t HidlCamera3Device::HidlHalInterface::configureStreams(
         const camera_metadata_t *sessionParams,
-        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+        int64_t /*logId*/) {
     ATRACE_NAME("CameraHal::configureStreams");
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
@@ -1693,9 +1701,11 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) :
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute) {}
+                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
 
 status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 2e98fe0..1e50844 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,8 +31,11 @@
             public Camera3Device {
   public:
 
-   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass,
-          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, legacyClient) { }
+   explicit HidlCamera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+        bool legacyClient = false) :
+        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+                legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -81,9 +84,6 @@
             const hardware::hidl_vec<
                     hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
 
-    // Handle one notify message
-    void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
-
     status_t switchToOffline(const std::vector<int32_t>& streamsToKeep,
             /*out*/ sp<CameraOfflineSessionBase>* session) override;
 
@@ -110,7 +110,8 @@
 
         virtual status_t configureStreams(const camera_metadata_t *sessionParams,
                 /*inout*/ camera_stream_configuration_t *config,
-                const std::vector<uint32_t>& bufferSizes) override;
+                const std::vector<uint32_t>& bufferSizes,
+                int64_t logId) override;
 
         // The injection camera configures the streams to hal.
         virtual status_t configureInjectedStreams(
@@ -175,7 +176,9 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -222,7 +225,9 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) override;
+                bool supportCameraMute,
+                bool overrideToPortrait,
+                bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index 5c97f0e..28b2b47 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -39,7 +39,7 @@
 HidlCamera3OfflineSession::~HidlCamera3OfflineSession() {
     ATRACE_CALL();
     ALOGV("%s: Tearing down hidl offline session for camera id %s", __FUNCTION__, mId.string());
-    HidlCamera3OfflineSession::disconnectSession();
+    Camera3OfflineSession::disconnectImpl();
 }
 
 status_t HidlCamera3OfflineSession::initialize(wp<NotificationListener> listener) {
@@ -92,6 +92,7 @@
         listener = mListener.promote();
     }
 
+    std::string activePhysicalId("");
     HidlCaptureOutputStates states {
       {mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -105,7 +106,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -132,6 +134,7 @@
 
     hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
 
+    std::string activePhysicalId("");
     HidlCaptureOutputStates states {
       {mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -145,7 +148,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -167,6 +171,7 @@
         listener = mListener.promote();
     }
 
+    std::string activePhysicalId("");
     HidlCaptureOutputStates states {
       {mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -180,7 +185,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -223,13 +229,17 @@
     return hardware::Void();
 }
 
-void HidlCamera3OfflineSession::disconnectSession() {
-  // TODO: Make sure this locking is correct.
-  std::lock_guard<std::mutex> lock(mLock);
-  if (mSession != nullptr) {
-      mSession->close();
-  }
-  mSession.clear();
+void HidlCamera3OfflineSession::closeSessionLocked() {
+    if (mSession != nullptr) {
+        auto err = mSession->close();
+        if (!err.isOk()) {
+            ALOGE("%s: Close transaction error: %s", __FUNCTION__, err.description().c_str());
+        }
+    }
+}
+
+void HidlCamera3OfflineSession::releaseSessionLocked() {
+    mSession.clear();
 }
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
index 597cc5d..d22a447 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
@@ -101,7 +101,9 @@
     // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
     std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
 
-    virtual void disconnectSession() override;
+    virtual void closeSessionLocked() override;
+
+    virtual void releaseSessionLocked() override;
 }; // class Camera3OfflineSession
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
index 8b0cd65..ff6fc17 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
@@ -105,6 +105,7 @@
             m.type = CAMERA_MSG_SHUTTER;
             m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
             m.message.shutter.timestamp = msg.msg.shutter.timestamp;
+            m.message.shutter.readout_timestamp_valid = false;
             m.message.shutter.readout_timestamp = 0LL;
             break;
     }
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
index 3392db1..de51ffa 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
@@ -17,6 +17,7 @@
 
 #include <hidl/AidlCameraDeviceCallbacks.h>
 #include <hidl/Utils.h>
+#include <aidl/AidlUtils.h>
 
 namespace android {
 namespace frameworks {
@@ -144,7 +145,7 @@
 
     // Convert Metadata into HCameraMetadata;
     FmqSizeOrMetadata hResult;
-    using hardware::cameraservice::utils::conversion::filterVndkKeys;
+    using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
     if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
         ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
                 __FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 26e813a..0f7f127 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -19,10 +19,12 @@
 #include <gui/Surface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 
+#include <aidl/AidlUtils.h>
 #include <hidl/AidlCameraDeviceCallbacks.h>
 #include <hidl/HidlCameraDeviceUser.h>
 #include <hidl/Utils.h>
 #include <android/hardware/camera/device/3.2/types.h>
+#include <android-base/properties.h>
 
 namespace android {
 namespace frameworks {
@@ -31,6 +33,7 @@
 namespace V2_1 {
 namespace implementation {
 
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
 using hardware::cameraservice::utils::conversion::convertToHidl;
 using hardware::cameraservice::utils::conversion::convertFromHidl;
 using hardware::cameraservice::utils::conversion::B2HStatus;
@@ -55,6 +58,7 @@
     const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
   : mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
+    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
 }
 
 bool HidlCameraDeviceUser::initDevice() {
@@ -235,8 +239,16 @@
     android::CameraMetadata cameraMetadata;
     binder::Status ret = mDeviceRemote->createDefaultRequest(convertFromHidl(templateId),
                                                              &cameraMetadata);
-    HStatus hStatus = B2HStatus(ret);
+
     HCameraMetadata hidlMetadata;
+    if (filterVndkKeys(mVndkVersion, cameraMetadata, /*isStatic*/false) != OK) {
+        ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+              __FUNCTION__, mVndkVersion);
+        _hidl_cb(HStatus::UNKNOWN_ERROR, hidlMetadata);
+        return Void();
+    }
+
+    HStatus hStatus = B2HStatus(ret);
     const camera_metadata_t *rawMetadata = cameraMetadata.getAndLock();
     convertToHidl(rawMetadata, &hidlMetadata);
     _hidl_cb(hStatus, hidlMetadata);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index 0e2ab3d..a653ca2 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -127,6 +127,7 @@
     std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
     bool mInitSuccess = false;
     int32_t mRequestId = REQUEST_ID_NONE;
+    int mVndkVersion = -1;
 };
 
 } // implementation
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 65a0300..d6910fe 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -21,6 +21,7 @@
 #include <hidl/HidlCameraService.h>
 #include <hidl/HidlCameraDeviceUser.h>
 #include <hidl/Utils.h>
+#include <aidl/AidlUtils.h>
 
 #include <hidl/HidlTransportSupport.h>
 
@@ -34,9 +35,9 @@
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using hardware::hidl_vec;
 using hardware::cameraservice::utils::conversion::convertToHidl;
-using hardware::cameraservice::utils::conversion::filterVndkKeys;
 using hardware::cameraservice::utils::conversion::B2HStatus;
 using hardware::Void;
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
 
 using device::V2_0::implementation::H2BCameraDeviceCallbacks;
 using device::V2_1::implementation::HidlCameraDeviceUser;
@@ -65,7 +66,8 @@
     HStatus status = HStatus::NO_ERROR;
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()),
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraMetadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -116,7 +118,8 @@
     binder::Status serviceRet = mAidlICameraService->connectDevice(
             callbacks, String16(cameraId.c_str()), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&deviceRemote);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+            /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/Utils.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
index 057a6e9..2a24a23 100644
--- a/services/camera/libcameraservice/hidl/Utils.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -15,7 +15,6 @@
  */
 
 #include <hidl/Utils.h>
-#include <hidl/VndkVersionMetadataTags.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <cutils/native_handle.h>
 #include <mediautils/AImageReaderUtils.h>
@@ -298,31 +297,6 @@
     return hPhysicalCaptureResultInfos;
 }
 
-status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
-    if (vndkVersion == __ANDROID_API_FUTURE__) {
-        // VNDK version in ro.vndk.version is a version code-name that
-        // corresponds to the current version.
-        return OK;
-    }
-    const auto &apiLevelToKeys =
-            isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
-    // Find the vndk versions above the given vndk version. All the vndk
-    // versions above the given one, need to have their keys filtered from the
-    // metadata in order to avoid metadata invalidation.
-    auto it = apiLevelToKeys.upper_bound(vndkVersion);
-    while (it != apiLevelToKeys.end()) {
-        for (const auto &key : it->second) {
-            status_t res = metadata.erase(key);
-            if (res != OK) {
-                ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
-                return res;
-            }
-        }
-        it++;
-    }
-    return OK;
-}
-
 } //conversion
 } // utils
 } //cameraservice
diff --git a/services/camera/libcameraservice/hidl/Utils.h b/services/camera/libcameraservice/hidl/Utils.h
index e6d4393..ec06571 100644
--- a/services/camera/libcameraservice/hidl/Utils.h
+++ b/services/camera/libcameraservice/hidl/Utils.h
@@ -97,8 +97,6 @@
 
 HStatus B2HStatus(const binder::Status &bStatus);
 
-status_t filterVndkKeys(int vndk_version, CameraMetadata &metadata, bool isStatic = true);
-
 } // conversion
 } // utils
 } // cameraservice
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index e43b91f..b1bf41e 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -52,7 +52,7 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
@@ -64,6 +64,7 @@
     fuzz_config: {
         cc: [
             "android-media-fuzzing-reports@google.com",
+            "android-camera-fwk-eng@google.com",
         ],
         componentid: 155276,
         libfuzzer_options: [
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 97d7bf4..b397573 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -111,12 +111,15 @@
     size_t mPreviewBufferCount = 0;
     bool mAutoFocusMessage = false;
     bool mSnapshotNotification = false;
+    bool mRecordingNotification = false;
     mutable Mutex mPreviewLock;
     mutable Condition mPreviewCondition;
     mutable Mutex mAutoFocusLock;
     mutable Condition mAutoFocusCondition;
     mutable Mutex mSnapshotLock;
     mutable Condition mSnapshotCondition;
+    mutable Mutex mRecordingLock;
+    mutable Condition mRecordingCondition;
 
     void getNumCameras();
     void getCameraInformation(int32_t cameraId);
@@ -125,6 +128,7 @@
     void invokeDump();
     void invokeShellCommand();
     void invokeNotifyCalls();
+    void invokeTorchAPIs(int32_t cameraId);
 
     // CameraClient interface
     void notifyCallback(int32_t msgType, int32_t, int32_t) override;
@@ -152,6 +156,8 @@
             Mutex::Autolock l(mPreviewLock);
             ++mPreviewBufferCount;
             mPreviewCondition.broadcast();
+            mRecordingNotification = true;
+            mRecordingCondition.broadcast();
             break;
         }
         case CAMERA_MSG_COMPRESSED_IMAGE: {
@@ -229,11 +235,11 @@
     mCameraService->getCameraVendorTagCache(&cache);
 
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, &cameraInfo);
+    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -311,114 +317,155 @@
     mCameraService->notifySystemEvent(eventId, args);
 }
 
+void CameraFuzzer::invokeTorchAPIs(int32_t cameraId) {
+    String16 cameraIdStr = String16(String8::format("%d", cameraId));
+    sp<IBinder> binder = new BBinder;
+
+    mCameraService->setTorchMode(cameraIdStr, true, binder);
+    ALOGV("Turned torch on.");
+    int32_t torchStrength = rand() % 5 + 1;
+    ALOGV("Changing torch strength level to %d", torchStrength);
+    mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder);
+    mCameraService->setTorchMode(cameraIdStr, false, binder);
+    ALOGV("Turned torch off.");
+}
+
 void CameraFuzzer::invokeCameraAPIs() {
-    for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
-        getCameraInformation(cameraId);
+    /** In order to avoid the timeout issue caused due to multiple iteration of loops, the 'for'
+     * loops are removed and the 'cameraId', 'pictureSize' and 'videoSize' are derived using the
+     * FuzzedDataProvider from the available cameras and vectors of 'pictureSizes' and 'videoSizes'
+     */
+    int32_t cameraId = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(0, mNumCameras - 1);
+    getCameraInformation(cameraId);
+    invokeTorchAPIs(cameraId);
 
-        ::android::binder::Status rc;
-        sp<ICamera> cameraDevice;
+    ::android::binder::Status rc;
+    sp<ICamera> cameraDevice;
 
-        rc = mCameraService->connect(this, cameraId, String16(),
-                android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
-        if (!rc.isOk()) {
-            // camera not connected
-            return;
+    rc = mCameraService->connect(this, cameraId, String16(),
+                                 android::CameraService::USE_CALLING_UID,
+                                 android::CameraService::USE_CALLING_PID,
+                                 /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+                                 /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
+                                 &cameraDevice);
+    if (!rc.isOk()) {
+        // camera not connected
+        return;
+    }
+    if (cameraDevice) {
+        sp<Surface> previewSurface;
+        sp<SurfaceControl> surfaceControl;
+        CameraParameters params(cameraDevice->getParameters());
+        String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
+        bool isAFSupported = false;
+        const char* focusMode = nullptr;
+
+        if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
+            isAFSupported = true;
+        } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
+            isAFSupported = true;
+            focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+        } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
+            isAFSupported = true;
+            focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
+        } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
+            isAFSupported = true;
+            focusMode = CameraParameters::FOCUS_MODE_MACRO;
         }
-        if (cameraDevice) {
-            sp<Surface> previewSurface;
-            sp<SurfaceControl> surfaceControl;
-            CameraParameters params(cameraDevice->getParameters());
-            String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
-            bool isAFSupported = false;
-            const char *focusMode = nullptr;
+        if (nullptr != focusMode) {
+            params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
+            cameraDevice->setParameters(params.flatten());
+        }
+        int previewWidth, previewHeight;
+        params.getPreviewSize(&previewWidth, &previewHeight);
 
-            if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
-                isAFSupported = true;
-            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
-                isAFSupported = true;
-                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
-            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
-                isAFSupported = true;
-                focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
-            } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
-                isAFSupported = true;
-                focusMode = CameraParameters::FOCUS_MODE_MACRO;
-            }
-            if (nullptr != focusMode) {
-                params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
-                cameraDevice->setParameters(params.flatten());
-            }
-            int previewWidth, previewHeight;
-            params.getPreviewSize(&previewWidth, &previewHeight);
+        mComposerClient = new SurfaceComposerClient;
+        mComposerClient->initCheck();
 
-            mComposerClient = new SurfaceComposerClient;
-            mComposerClient->initCheck();
-
-            bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
-            int layerMetaData;
-            if (shouldPassInvalidLayerMetaData) {
-                layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
-            } else {
-                layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+        bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
+        int layerMetaData;
+        if (shouldPassInvalidLayerMetaData) {
+            layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
+        } else {
+            layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
                     0, kNumLayerMetaData - 1)];
-            }
-            surfaceControl = mComposerClient->createSurface(
+        }
+        surfaceControl = mComposerClient->createSurface(
                 String8("Test Surface"), previewWidth, previewHeight,
                 CameraParameters::previewFormatToEnum(params.getPreviewFormat()), layerMetaData);
 
-            if (surfaceControl.get() != nullptr) {
-                SurfaceComposerClient::Transaction{}
+        if (surfaceControl.get()) {
+            SurfaceComposerClient::Transaction{}
                     .setLayer(surfaceControl, 0x7fffffff)
                     .show(surfaceControl)
                     .apply();
 
-                previewSurface = surfaceControl->getSurface();
+            previewSurface = surfaceControl->getSurface();
+            if (previewSurface.get()) {
                 cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
             }
-            cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
+        }
+        cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
 
-            Vector<Size> pictureSizes;
-            params.getSupportedPictureSizes(pictureSizes);
+        Vector<Size> pictureSizes;
+        params.getSupportedPictureSizes(pictureSizes);
 
-            for (size_t i = 0; i < pictureSizes.size(); ++i) {
-                params.setPictureSize(pictureSizes[i].width, pictureSizes[i].height);
-                cameraDevice->setParameters(params.flatten());
-                cameraDevice->startPreview();
-                waitForPreviewStart();
-                cameraDevice->autoFocus();
-                waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
-                bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
-                int msgType;
-                if (shouldPassInvalidCameraMsg) {
-                    msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
-                } else {
-                    msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+        if (pictureSizes.size()) {
+            Size pictureSize = pictureSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                    0, pictureSizes.size() - 1)];
+            params.setPictureSize(pictureSize.width, pictureSize.height);
+            cameraDevice->setParameters(params.flatten());
+            cameraDevice->startPreview();
+            waitForPreviewStart();
+            cameraDevice->autoFocus();
+            waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
+            bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
+            int msgType;
+            if (shouldPassInvalidCameraMsg) {
+                msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
+            } else {
+                msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
                         0, kNumCameraMsg - 1)];
-                }
-                cameraDevice->takePicture(msgType);
-
-                waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
             }
+            cameraDevice->takePicture(msgType);
 
-            Vector<Size> videoSizes;
-            params.getSupportedVideoSizes(videoSizes);
+            waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
+            cameraDevice->stopPreview();
+        }
 
-            for (size_t i = 0; i < videoSizes.size(); ++i) {
-                params.setVideoSize(videoSizes[i].width, videoSizes[i].height);
+        Vector<Size> videoSizes;
+        params.getSupportedVideoSizes(videoSizes);
 
-                cameraDevice->setParameters(params.flatten());
-                cameraDevice->startPreview();
-                waitForPreviewStart();
-                cameraDevice->setVideoBufferMode(
+        if (videoSizes.size()) {
+            Size videoSize = videoSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+                    0, videoSizes.size() - 1)];
+            params.setVideoSize(videoSize.width, videoSize.height);
+
+            cameraDevice->setParameters(params.flatten());
+            cameraDevice->startPreview();
+            waitForPreviewStart();
+            cameraDevice->setVideoBufferMode(
                     android::hardware::BnCamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
-                cameraDevice->setVideoTarget(previewSurface->getIGraphicBufferProducer());
-                cameraDevice->startRecording();
-                cameraDevice->stopRecording();
+            sp<SurfaceControl> surfaceControlVideo = mComposerClient->createSurface(
+                    String8("Test Surface Video"), previewWidth, previewHeight,
+                    CameraParameters::previewFormatToEnum(params.getPreviewFormat()),
+                    layerMetaData);
+            if (surfaceControlVideo.get()) {
+                SurfaceComposerClient::Transaction{}
+                        .setLayer(surfaceControlVideo, 0x7fffffff)
+                        .show(surfaceControlVideo)
+                        .apply();
+                sp<Surface> previewSurfaceVideo = surfaceControlVideo->getSurface();
+                if (previewSurfaceVideo.get()) {
+                    cameraDevice->setVideoTarget(previewSurfaceVideo->getIGraphicBufferProducer());
+                }
             }
             cameraDevice->stopPreview();
-            cameraDevice->disconnect();
+            cameraDevice->startRecording();
+            waitForEvent(mRecordingLock, mRecordingCondition, mRecordingNotification);
+            cameraDevice->stopRecording();
         }
+        cameraDevice->disconnect();
     }
 }
 
@@ -534,7 +581,8 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                &device);
         if (device == nullptr) {
             continue;
         }
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 3616572..5e2a3fb 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -27,8 +27,13 @@
         "external/dynamic_depth/internal",
     ],
 
+    header_libs: [
+        "libmedia_headers",
+    ],
+
     shared_libs: [
         "libbase",
+        "libbinder",
         "libcutils",
         "libcameraservice",
         "libhidlbase",
@@ -44,7 +49,7 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -57,6 +62,7 @@
     ],
 
     srcs: [
+        "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
         "ClientManagerTest.cpp",
         "DepthProcessorTest.cpp",
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
new file mode 100644
index 0000000..731eebf
--- /dev/null
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -0,0 +1,302 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/BnCameraServiceProxy.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <android/hardware/ICameraService.h>
+
+#include <private/android_filesystem_config.h>
+
+#include "../CameraService.h"
+#include "../utils/CameraServiceProxyWrapper.h"
+
+#include <gtest/gtest.h>
+
+#include <memory>
+#include <vector>
+
+using namespace android;
+using namespace android::hardware::camera;
+
+// Empty service listener.
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+    virtual ~TestCameraServiceListener() {};
+
+    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+        // No op
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+        return binder::Status::ok();
+    };
+
+    virtual binder::Status onCameraAccessPrioritiesChanged() {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
+            const String16& /*clientPackageName*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+            int32_t /*torchStrength*/) {
+        // No op
+        return binder::Status::ok();
+    }
+};
+
+// Empty device callback.
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+    TestCameraDeviceCallbacks() {}
+
+    virtual ~TestCameraDeviceCallbacks() {}
+
+    virtual binder::Status onDeviceError(int /*errorCode*/,
+            const CaptureResultExtras& /*resultExtras*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onDeviceIdle() {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+            int64_t /*timestamp*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+            const CaptureResultExtras& /*resultExtras*/,
+            const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onPrepared(int /*streamId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRepeatingRequestError(
+            int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onRequestQueueEmpty() {
+        return binder::Status::ok();
+    }
+};
+
+// Override isCameraDisabled from the CameraServiceProxy with a flag.
+class CameraServiceProxyOverride : public ::android::hardware::BnCameraServiceProxy {
+public:
+    CameraServiceProxyOverride() :
+            mCameraServiceProxy(CameraServiceProxyWrapper::getDefaultCameraServiceProxy()),
+            mCameraDisabled(false), mOverrideCameraDisabled(false)
+    { }
+
+    virtual binder::Status getRotateAndCropOverride(const String16& packageName, int lensFacing,
+            int userId, int *ret) override {
+        return mCameraServiceProxy->getRotateAndCropOverride(packageName, lensFacing,
+                userId, ret);
+    }
+
+    virtual binder::Status getAutoframingOverride(const String16& packageName, int *ret) override {
+        return mCameraServiceProxy->getAutoframingOverride(packageName, ret);
+    }
+
+    virtual binder::Status pingForUserUpdate() override {
+        return mCameraServiceProxy->pingForUserUpdate();
+    }
+
+    virtual binder::Status notifyCameraState(
+            const hardware::CameraSessionStats& cameraSessionStats) override {
+        return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
+    }
+
+    virtual binder::Status isCameraDisabled(int userId, bool *ret) override {
+        if (mOverrideCameraDisabled) {
+            *ret = mCameraDisabled;
+            return binder::Status::ok();
+        }
+        return mCameraServiceProxy->isCameraDisabled(userId, ret);
+    }
+
+    void setCameraDisabled(bool cameraDisabled) {
+        mCameraDisabled = cameraDisabled;
+    }
+
+    void setOverrideCameraDisabled(bool overrideCameraDisabled) {
+        mOverrideCameraDisabled = overrideCameraDisabled;
+    }
+
+protected:
+    sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
+    bool mCameraDisabled;
+    bool mOverrideCameraDisabled;
+};
+
+class AutoDisconnectDevice {
+public:
+    AutoDisconnectDevice(sp<hardware::camera2::ICameraDeviceUser> device) :
+            mDevice(device)
+    { }
+
+    ~AutoDisconnectDevice() {
+        if (mDevice != nullptr) {
+            mDevice->disconnect();
+        }
+    }
+
+private:
+    sp<hardware::camera2::ICameraDeviceUser> mDevice;
+};
+
+class CameraPermissionsTest : public ::testing::Test {
+protected:
+    static sp<CameraService> sCameraService;
+    static sp<CameraServiceProxyOverride> sCameraServiceProxy;
+    static std::shared_ptr<CameraServiceProxyWrapper> sCameraServiceProxyWrapper;
+    static uid_t sOldUid;
+
+    static void SetUpTestSuite() {
+        sOldUid = getuid();
+        setuid(AID_CAMERASERVER);
+        sCameraServiceProxy = new CameraServiceProxyOverride();
+        sCameraServiceProxyWrapper =
+            std::make_shared<CameraServiceProxyWrapper>(sCameraServiceProxy);
+        sCameraService = new CameraService(sCameraServiceProxyWrapper);
+        sCameraService->clearCachedVariables();
+    }
+
+    static void TearDownTestSuite() {
+        sCameraServiceProxyWrapper = nullptr;
+        sCameraServiceProxy = nullptr;
+        sCameraService = nullptr;
+        setuid(sOldUid);
+    }
+};
+
+sp<CameraService> CameraPermissionsTest::sCameraService = nullptr;
+sp<CameraServiceProxyOverride> CameraPermissionsTest::sCameraServiceProxy = nullptr;
+std::shared_ptr<CameraServiceProxyWrapper>
+CameraPermissionsTest::sCameraServiceProxyWrapper = nullptr;
+uid_t CameraPermissionsTest::sOldUid = 0;
+
+// Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
+// policy, and succeed when it isn't.
+TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(true);
+
+    sCameraServiceProxy->setCameraDisabled(true);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+        AutoDisconnectDevice autoDisconnect(device);
+        ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
+        ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
+                << "connectDevice returned exception code " << status.exceptionCode();
+    }
+
+    sCameraServiceProxy->setCameraDisabled(false);
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> device;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+        AutoDisconnectDevice autoDisconnect(device);
+        ASSERT_TRUE(status.isOk());
+    }
+}
+
+// Test that consecutive camera connections succeed.
+TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+        AutoDisconnectDevice autoDisconnectA(deviceA);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+        status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+        AutoDisconnectDevice autoDisconnectB(deviceB);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+    }
+}
+
+// Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
+// in the second call.
+TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+    std::vector<hardware::CameraStatus> statuses;
+    sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+    sCameraService->addListenerTest(serviceListener, &statuses);
+    sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+    for (auto s : statuses) {
+        sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+        sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+        binder::Status status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+        AutoDisconnectDevice autoDisconnectA(deviceA);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+        status =
+                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+        AutoDisconnectDevice autoDisconnectB(deviceB);
+        ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+                " service specific error code " << status.serviceSpecificErrorCode();
+    }
+}
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index e9f6979..1a6b2e0 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -102,23 +102,57 @@
     sp<device::V3_2::ICameraDevice> mDeviceInterface;
     hardware::hidl_vec<common::V1_0::VendorTagSection> mVendorTagSections;
 
+    // Whether to call a physical camera unavailable callback upon setCallback
+    bool mHasPhysicalCameraUnavailableCallback;
+    hardware::hidl_string mLogicalCameraId;
+    hardware::hidl_string mUnavailablePhysicalCameraId;
+
     TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
             const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection) :
         mDeviceNames(devices),
         mDeviceInterface(new TestDeviceInterface(devices)),
-        mVendorTagSections (vendorSection) {}
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(false) {}
 
     TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
             const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
             android::hardware::hidl_vec<uint8_t> chars) :
         mDeviceNames(devices),
         mDeviceInterface(new TestDeviceInterface(devices, chars)),
-        mVendorTagSections (vendorSection) {}
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(false) {}
+
+    TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
+            const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
+            android::hardware::hidl_vec<uint8_t> chars,
+            const hardware::hidl_string& logicalCameraId,
+            const hardware::hidl_string& unavailablePhysicalCameraId) :
+        mDeviceNames(devices),
+        mDeviceInterface(new TestDeviceInterface(devices, chars)),
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(true),
+        mLogicalCameraId(logicalCameraId),
+        mUnavailablePhysicalCameraId(unavailablePhysicalCameraId) {}
 
     virtual hardware::Return<Status> setCallback(
             const sp<provider::V2_4::ICameraProviderCallback>& callbacks) override {
         mCalledCounter[SET_CALLBACK]++;
         mCallbacks = callbacks;
+        if (mHasPhysicalCameraUnavailableCallback) {
+            auto cast26 = provider::V2_6::ICameraProviderCallback::castFrom(callbacks);
+            if (!cast26.isOk()) {
+                ADD_FAILURE() << "Failed to cast ICameraProviderCallback to V2_6";
+            } else {
+                sp<provider::V2_6::ICameraProviderCallback> callback26 = cast26;
+                if (callback26 == nullptr) {
+                    ADD_FAILURE() << "V2_6::ICameraProviderCallback is null after conversion";
+                } else {
+                    callback26->physicalCameraDeviceStatusChange(mLogicalCameraId,
+                            mUnavailablePhysicalCameraId,
+                            android::hardware::camera::common::V1_0::CameraDeviceStatus::NOT_PRESENT);
+                }
+            }
+        }
         return hardware::Return<Status>(Status::OK);
     }
 
@@ -151,9 +185,8 @@
     using getCameraDeviceInterface_V1_x_cb = std::function<void(Status status,
             const sp<device::V1_0::ICameraDevice>& device)>;
     virtual hardware::Return<void> getCameraDeviceInterface_V1_x(
-            const hardware::hidl_string& cameraDeviceName,
+            [[maybe_unused]] const hardware::hidl_string& cameraDeviceName,
             getCameraDeviceInterface_V1_x_cb _hidl_cb) override {
-        (void) cameraDeviceName;
         _hidl_cb(Status::OK, nullptr); //TODO: impl. of ver. 1.0 device interface
                                        //      otherwise enumeration will fail.
         return hardware::Void();
@@ -227,9 +260,8 @@
     virtual ~TestInteractionProxy() {}
 
     virtual bool registerForNotifications(
-            const std::string &serviceName,
+            [[maybe_unused]] const std::string &serviceName,
             const sp<hidl::manager::V1_0::IServiceNotification> &notification) override {
-        (void) serviceName;
         mManagerNotificationInterface = notification;
         return true;
     }
@@ -266,12 +298,16 @@
 };
 
 struct TestStatusListener : public CameraProviderManager::StatusListener {
+    int mPhysicalCameraStatusChangeCount = 0;
+
     ~TestStatusListener() {}
 
     void onDeviceStatusChanged(const String8 &,
             CameraDeviceStatus) override {}
     void onDeviceStatusChanged(const String8 &, const String8 &,
-            CameraDeviceStatus) override {}
+            CameraDeviceStatus) override {
+        mPhysicalCameraStatusChangeCount++;
+    }
     void onTorchStatusChanged(const String8 &,
             TorchModeStatus) override {}
     void onTorchStatusChanged(const String8 &,
@@ -634,3 +670,46 @@
     ASSERT_EQ(deviceCount, deviceNames.size()) <<
             "Unexpected amount of camera devices";
 }
+
+// Test that CameraProviderManager does not trigger
+// onDeviceStatusChanged(NOT_PRESENT) for physical camera before initialize()
+// returns.
+TEST(CameraProviderManagerTest, PhysicalCameraAvailabilityCallbackRaceTest) {
+    std::vector<hardware::hidl_string> deviceNames;
+    deviceNames.push_back("device@3.2/test/0");
+    hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+
+    sp<CameraProviderManager> providerManager = new CameraProviderManager();
+    sp<TestStatusListener> statusListener = new TestStatusListener();
+    TestInteractionProxy serviceProxy;
+
+    android::hardware::hidl_vec<uint8_t> chars;
+    CameraMetadata meta;
+    int32_t charKeys[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES };
+    meta.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, charKeys,
+            sizeof(charKeys) / sizeof(charKeys[0]));
+    uint8_t capabilities[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA };
+    meta.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities,
+            sizeof(capabilities)/sizeof(capabilities[0]));
+    uint8_t physicalCameraIds[] = { '2', '\0', '3', '\0' };
+    meta.update(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, physicalCameraIds,
+            sizeof(physicalCameraIds)/sizeof(physicalCameraIds[0]));
+    camera_metadata_t* metaBuffer = const_cast<camera_metadata_t*>(meta.getAndLock());
+    chars.setToExternal(reinterpret_cast<uint8_t*>(metaBuffer),
+            get_camera_metadata_size(metaBuffer));
+
+    sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
+            vendorSection, chars, "device@3.2/test/0", "2");
+    serviceProxy.setProvider(provider);
+
+    status_t res = providerManager->initialize(statusListener, &serviceProxy);
+    ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+    ASSERT_EQ(statusListener->mPhysicalCameraStatusChangeCount, 0)
+            << "Unexpected physical camera status change callback upon provider init.";
+
+    std::unordered_map<std::string, std::set<std::string>> unavailablePhysicalIds;
+    auto cameraIds = providerManager->getCameraDeviceIds(&unavailablePhysicalIds);
+    ASSERT_TRUE(unavailablePhysicalIds.count("0") > 0 && unavailablePhysicalIds["0"].count("2") > 0)
+        << "Unavailable physical camera Ids not set properly.";
+}
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 8331136..b367571 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -355,8 +355,6 @@
 #include "DistortionMapperTest_OpenCvData.h"
 
 TEST(DistortionMapperTest, CompareToOpenCV) {
-    status_t res;
-
     float bigDistortion[] = {0.1, -0.003, 0.004, 0.02, 0.01};
 
     // Expect to match within sqrt(2) radius pixels
@@ -370,7 +368,7 @@
     using namespace openCvData;
 
     DistortionMapperInfo *mapperInfo = m.getMapperInfo();
-    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
+    m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
             /*simple*/false);
 
     for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
index 3c187cd..9f86526 100644
--- a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
@@ -195,6 +195,7 @@
 
     // Round-trip results can't be exact since we've gone from a large int range -> small int range
     // and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
+
     e = result.find(ANDROID_CONTROL_AE_REGIONS);
     EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
 
@@ -209,11 +210,11 @@
     EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
 
     auto full_landmarks = std::vector<int32_t> {
-        full_crop[0], full_crop[1] + full_crop[3],
         full_crop[0] + full_crop[2], full_crop[1],
-        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
+        full_crop[0], full_crop[1] + full_crop[3],
+        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
         full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
-        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
+        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
     };
     e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
     EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");
@@ -286,7 +287,6 @@
 
     // Round-trip results can't be exact since we've gone from a large int range -> small int range
     // and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
-
     e = result.find(ANDROID_CONTROL_AE_REGIONS);
     EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
 
@@ -301,11 +301,11 @@
     EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
 
     auto full_landmarks = std::vector<int32_t> {
-        full_crop[0] + full_crop[2], full_crop[1],
         full_crop[0], full_crop[1] + full_crop[3],
-        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
+        full_crop[0] + full_crop[2], full_crop[1],
+        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
         full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
-        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
+        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
     };
     e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
     EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index ff7aafd..badd47a 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -160,11 +160,9 @@
             false/*hasZoomRatioRange*/, zoomRatioRange,
             usePreCorrectArray));
 
-    size_t index = 0;
     int32_t width = testActiveArraySize[2];
     int32_t height = testActiveArraySize[3];
     if (usePreCorrectArray) {
-        index = 1;
         width = testPreCorrActiveArraySize[2];
         height = testPreCorrActiveArraySize[3];
     }
@@ -254,6 +252,19 @@
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
     }
+
+    // Verify region zoom scaling doesn't generate invalid metering region
+    // (width < 0, or height < 0)
+    std::array<float, 3> scaleRatios = {10.0f, 1.0f, 0.1f};
+    for (float scaleRatio : scaleRatios) {
+        for (size_t i = 0; i < originalCoords.size(); i+= 2) {
+            int32_t coordinates[] = {originalCoords[i], originalCoords[i+1],
+                    originalCoords[i], originalCoords[i+1]};
+            mapper.scaleRegion(coordinates, scaleRatio, width, height);
+            EXPECT_LE(coordinates[0], coordinates[2]);
+            EXPECT_LE(coordinates[1], coordinates[3]);
+        }
+    }
 }
 
 TEST(ZoomRatioTest, scaleCoordinatesTest) {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 69175cc..b58975f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -26,32 +26,41 @@
 
 namespace android {
 
-using hardware::ICameraServiceProxy;
+using hardware::CameraExtensionSessionStats;
 using hardware::CameraSessionStats;
+using hardware::ICameraServiceProxy;
 
-Mutex CameraServiceProxyWrapper::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
-
-Mutex CameraServiceProxyWrapper::mLock;
-std::map<String8, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
-        CameraServiceProxyWrapper::mSessionStatsMap;
+namespace {
+// Sentinel value to be returned when extension session with a stale or invalid key is reported.
+const String16 POISON_EXT_STATS_KEY("poisoned_stats");
+} // anonymous namespace
 
 /**
  * CameraSessionStatsWrapper functions
  */
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
-    Mutex::Autolock l(mLock);
-
-    updateProxyDeviceState(mSessionStats);
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateProxyDeviceState(
+        sp<hardware::ICameraServiceProxy>& proxyBinder) {
+    if (proxyBinder == nullptr) return;
+    proxyBinder->notifyCameraState(mSessionStats);
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen(
+        sp<hardware::ICameraServiceProxy>& proxyBinder) {
+    Mutex::Autolock l(mLock);
+    updateProxyDeviceState(proxyBinder);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(
+    sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+    bool deviceError) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
     mSessionStats.mLatencyMs = latencyMs;
-    updateProxyDeviceState(mSessionStats);
+    mSessionStats.mDeviceError = deviceError;
+    mSessionStats.mSessionIndex = 0;
+    updateProxyDeviceState(proxyBinder);
 }
 
 void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
@@ -66,12 +75,14 @@
     }
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(
+    sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
     mSessionStats.mMaxPreviewFps = maxPreviewFps;
-    updateProxyDeviceState(mSessionStats);
+    mSessionStats.mSessionIndex++;
+    updateProxyDeviceState(proxyBinder);
 
     // Reset mCreationDuration to -1 to distinguish between 1st session
     // after configuration, and all other sessions after configuration.
@@ -79,6 +90,7 @@
 }
 
 void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+        sp<hardware::ICameraServiceProxy>& proxyBinder,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
@@ -91,10 +103,76 @@
     mSessionStats.mUserTag = String16(userTag.c_str());
     mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mStreamStats = streamStats;
-    updateProxyDeviceState(mSessionStats);
+
+    updateProxyDeviceState(proxyBinder);
 
     mSessionStats.mInternalReconfigure = 0;
     mSessionStats.mStreamStats.clear();
+    mSessionStats.mCameraExtensionSessionStats = {};
+}
+
+int64_t CameraServiceProxyWrapper::CameraSessionStatsWrapper::getLogId() {
+    Mutex::Autolock l(mLock);
+    return mSessionStats.mLogId;
+}
+
+String16 CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
+        const hardware::CameraExtensionSessionStats& extStats) {
+    Mutex::Autolock l(mLock);
+    CameraExtensionSessionStats& currStats = mSessionStats.mCameraExtensionSessionStats;
+    if (currStats.key != extStats.key) {
+        // Mismatched keys. Extensions stats likely reported for a closed session
+        ALOGW("%s: mismatched extensions stats key: current='%s' reported='%s'. Dropping stats.",
+              __FUNCTION__, String8(currStats.key).c_str(), String8(extStats.key).c_str());
+        return POISON_EXT_STATS_KEY; // return poisoned key to so future calls are
+                                     // definitely dropped.
+    }
+
+    // Matching keys...
+    if (currStats.key.size()) {
+        // non-empty matching keys. overwrite.
+        ALOGV("%s: Overwriting extension session stats: %s", __FUNCTION__,
+              extStats.toString().c_str());
+        currStats = extStats;
+        return currStats.key;
+    }
+
+    // Matching empty keys...
+    if (mSessionStats.mClientName != extStats.clientName) {
+        ALOGW("%s: extension stats reported for unexpected package: current='%s' reported='%s'. "
+              "Dropping stats.", __FUNCTION__,
+              String8(mSessionStats.mClientName).c_str(),
+              String8(extStats.clientName).c_str());
+        return POISON_EXT_STATS_KEY;
+    }
+
+    // Matching empty keys for the current client...
+    if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_OPEN ||
+        mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_IDLE) {
+        // Camera is open, but not active. It is possible that the active callback hasn't
+        // occurred yet. Keep the stats, but don't associate it with any session.
+        ALOGV("%s: extension stat reported for an open, but not active camera. "
+              "Saving stats, but not generating key.", __FUNCTION__);
+        currStats = extStats;
+        return {}; // Subsequent calls will handle setting the correct key.
+    }
+
+    if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_ACTIVE) {
+        // camera is active. First call for the session!
+        currStats = extStats;
+
+        // Generate a new key from logId and sessionIndex.
+        std::ostringstream key;
+        key << mSessionStats.mSessionIndex << '/' << mSessionStats.mLogId;
+        currStats.key = String16(key.str().c_str());
+        ALOGV("%s: New extension session stats: %s", __FUNCTION__, currStats.toString().c_str());
+        return currStats.key;
+    }
+
+    // Camera is closed. Probably a stale call.
+    ALOGW("%s: extension stats reported for closed camera id '%s'. Dropping stats.",
+          __FUNCTION__, String8(mSessionStats.mCameraId).c_str());
+    return {};
 }
 
 /**
@@ -103,19 +181,26 @@
 
 sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
 #ifndef __BRILLO__
-    Mutex::Autolock al(sProxyMutex);
-    if (sCameraServiceProxy == nullptr) {
-        sp<IServiceManager> sm = defaultServiceManager();
-        // Use checkService because cameraserver normally starts before the
-        // system server and the proxy service. So the long timeout that getService
-        // has before giving up is inappropriate.
-        sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
-        if (binder != nullptr) {
-            sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
-        }
+    Mutex::Autolock al(mProxyMutex);
+    if (mCameraServiceProxy == nullptr) {
+        mCameraServiceProxy = getDefaultCameraServiceProxy();
     }
 #endif
-    return sCameraServiceProxy;
+    return mCameraServiceProxy;
+}
+
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::getDefaultCameraServiceProxy() {
+#ifndef __BRILLO__
+    sp<IServiceManager> sm = defaultServiceManager();
+    // Use checkService because cameraserver normally starts before the
+    // system server and the proxy service. So the long timeout that getService
+    // has before giving up is inappropriate.
+    sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+    if (binder != nullptr) {
+        return interface_cast<ICameraServiceProxy>(binder);
+    }
+#endif
+    return nullptr;
 }
 
 void CameraServiceProxyWrapper::pingCameraServiceProxy() {
@@ -138,10 +223,19 @@
     return ret;
 }
 
-void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
+int CameraServiceProxyWrapper::getAutoframingOverride(const String16& packageName) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
-    if (proxyBinder == nullptr) return;
-    proxyBinder->notifyCameraState(sessionStats);
+    if (proxyBinder == nullptr) {
+        return ANDROID_CONTROL_AUTOFRAMING_OFF;
+    }
+    int ret = 0;
+    auto status = proxyBinder->getAutoframingOverride(packageName, &ret);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed during autoframing override query: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+
+    return ret;
 }
 
 void CameraServiceProxyWrapper::logStreamConfigured(const String8& id,
@@ -149,12 +243,12 @@
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
-        sessionStats = mSessionStatsMap[id];
-        if (sessionStats == nullptr) {
+        if (mSessionStatsMap.count(id) == 0) {
             ALOGE("%s: SessionStatsMap should contain camera %s",
                     __FUNCTION__, id.c_str());
             return;
         }
+        sessionStats = mSessionStatsMap[id];
     }
 
     ALOGV("%s: id %s, operatingMode %d, internalConfig %d, latencyMs %d",
@@ -166,16 +260,17 @@
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
-        sessionStats = mSessionStatsMap[id];
-        if (sessionStats == nullptr) {
+        if (mSessionStatsMap.count(id) == 0) {
             ALOGE("%s: SessionStatsMap should contain camera %s when logActive is called",
                     __FUNCTION__, id.c_str());
             return;
         }
+        sessionStats = mSessionStatsMap[id];
     }
 
     ALOGV("%s: id %s", __FUNCTION__, id.c_str());
-    sessionStats->onActive(maxPreviewFps);
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onActive(proxyBinder, maxPreviewFps);
 }
 
 void CameraServiceProxyWrapper::logIdle(const String8& id,
@@ -185,13 +280,12 @@
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
-        sessionStats = mSessionStatsMap[id];
-    }
-
-    if (sessionStats == nullptr) {
-        ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
+        if (mSessionStatsMap.count(id) == 0) {
+            ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
                 __FUNCTION__, id.c_str());
-        return;
+            return;
+        }
+        sessionStats = mSessionStatsMap[id];
     }
 
     ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
@@ -205,7 +299,8 @@
                 streamStats[i].mStartLatencyMs);
     }
 
-    sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
             videoStabilizationMode, streamStats);
 }
 
@@ -226,19 +321,23 @@
             apiLevel = CameraSessionStats::CAMERA_API_LEVEL_2;
         }
 
-        sessionStats = std::make_shared<CameraSessionStatsWrapper>(String16(id), facing,
-                CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
-                apiLevel, isNdk, latencyMs);
+        // Generate a new log ID for open events
+        int64_t logId = generateLogId(mRandomDevice);
+
+        sessionStats = std::make_shared<CameraSessionStatsWrapper>(
+                String16(id), facing, CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+                apiLevel, isNdk, latencyMs, logId);
         mSessionStatsMap.emplace(id, sessionStats);
         ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
     }
 
     ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
             __FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
-    sessionStats->onOpen();
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onOpen(proxyBinder);
 }
 
-void CameraServiceProxyWrapper::logClose(const String8& id, int32_t latencyMs) {
+void CameraServiceProxyWrapper::logClose(const String8& id, int32_t latencyMs, bool deviceError) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
@@ -254,19 +353,22 @@
                     __FUNCTION__, id.c_str());
             return;
         }
+
         mSessionStatsMap.erase(id);
-        ALOGV("%s: Erasing id %s", __FUNCTION__, id.c_str());
+        ALOGV("%s: Erasing id %s, deviceError %d", __FUNCTION__, id.c_str(), deviceError);
     }
 
-    ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
-    sessionStats->onClose(latencyMs);
+    ALOGV("%s: id %s, latencyMs %d, deviceError %d", __FUNCTION__,
+            id.c_str(), latencyMs, deviceError);
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    sessionStats->onClose(proxyBinder, latencyMs, deviceError);
 }
 
-bool CameraServiceProxyWrapper::isCameraDisabled() {
+bool CameraServiceProxyWrapper::isCameraDisabled(int userId) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
     if (proxyBinder == nullptr) return true;
     bool ret = false;
-    auto status = proxyBinder->isCameraDisabled(&ret);
+    auto status = proxyBinder->isCameraDisabled(userId, &ret);
     if (!status.isOk()) {
         ALOGE("%s: Failed during camera disabled query: %s", __FUNCTION__,
                 status.exceptionMessage().c_str());
@@ -274,4 +376,48 @@
     return ret;
 }
 
-}; // namespace android
+int64_t CameraServiceProxyWrapper::getCurrentLogIdForCamera(const String8& cameraId) {
+    std::shared_ptr<CameraSessionStatsWrapper> stats;
+    {
+        Mutex::Autolock _l(mLock);
+        if (mSessionStatsMap.count(cameraId) == 0) {
+            ALOGE("%s: SessionStatsMap should contain camera %s before asking for its logging ID.",
+                  __FUNCTION__, cameraId.c_str());
+            return 0;
+        }
+
+        stats = mSessionStatsMap[cameraId];
+    }
+    return stats->getLogId();
+}
+
+int64_t CameraServiceProxyWrapper::generateLogId(std::random_device& randomDevice) {
+    int64_t ret = 0;
+    do {
+        // std::random_device generates 32 bits per call, so we call it twice
+        ret = randomDevice();
+        ret = ret << 32;
+        ret = ret | randomDevice();
+    } while (ret == 0); // 0 is not a valid identifier
+
+    return ret;
+}
+
+String16 CameraServiceProxyWrapper::updateExtensionStats(
+        const hardware::CameraExtensionSessionStats& extStats) {
+    std::shared_ptr<CameraSessionStatsWrapper> stats;
+    String8 cameraId = String8(extStats.cameraId);
+    {
+        Mutex::Autolock _l(mLock);
+        if (mSessionStatsMap.count(cameraId) == 0) {
+            ALOGE("%s CameraExtensionSessionStats reported for camera id that isn't open: %s",
+                  __FUNCTION__, cameraId.c_str());
+            return {};
+        }
+
+        stats = mSessionStatsMap[cameraId];
+        return stats->updateExtensionSessionStats(extStats);
+    }
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index e34a8f0..e32580c 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -24,6 +24,7 @@
 #include <utils/String16.h>
 #include <utils/StrongPointer.h>
 #include <utils/Timers.h>
+#include <random>
 
 #include <camera/CameraSessionStats.h>
 
@@ -32,72 +33,105 @@
 class CameraServiceProxyWrapper {
 private:
     // Guard mCameraServiceProxy
-    static Mutex sProxyMutex;
+    Mutex mProxyMutex;
     // Cached interface to the camera service proxy in system service
-    static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+    sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
 
-    struct CameraSessionStatsWrapper {
+    class CameraSessionStatsWrapper {
+      private:
         hardware::CameraSessionStats mSessionStats;
         Mutex mLock; // lock for per camera session stats
 
-        CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
-                const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
-            mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
-            {}
+        /**
+         * Update the session stats of a given camera device (open/close/active/idle) with
+         * the camera proxy service in the system service
+         */
+        void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
 
-        void onOpen();
-        void onClose(int32_t latencyMs);
+      public:
+        CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
+                                  const String16& clientName, int apiLevel, bool isNdk,
+                                  int32_t latencyMs, int64_t logId)
+            : mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk,
+                            latencyMs, logId) {}
+
+        void onOpen(sp<hardware::ICameraServiceProxy>& proxyBinder);
+        void onClose(sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+                bool deviceError);
         void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
-        void onActive(float maxPreviewFps);
-        void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        void onActive(sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps);
+        void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
+                int64_t requestCount, int64_t resultErrorCount, bool deviceError,
                 const std::string& userTag, int32_t videoStabilizationMode,
                 const std::vector<hardware::CameraStreamStats>& streamStats);
+
+        String16 updateExtensionSessionStats(const hardware::CameraExtensionSessionStats& extStats);
+
+        // Returns the logId associated with this event.
+        int64_t getLogId();
     };
 
     // Lock for camera session stats map
-    static Mutex mLock;
+    Mutex mLock;
     // Map from camera id to the camera's session statistics
-    static std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+    std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
 
-    /**
-     * Update the session stats of a given camera device (open/close/active/idle) with
-     * the camera proxy service in the system service
-     */
-    static void updateProxyDeviceState(
-            const hardware::CameraSessionStats& sessionStats);
+    std::random_device mRandomDevice;  // pulls 32-bit random numbers from /dev/urandom
 
-    static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+    sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+
+    // Returns a randomly generated ID that is suitable for logging the event. A new identifier
+    // should only be generated for an open event. All other events for the cameraId should use the
+    // ID generated for the open event associated with them.
+    static int64_t generateLogId(std::random_device& randomDevice);
 
 public:
+    CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
+            mCameraServiceProxy(serviceProxy)
+    { }
+
+    static sp<hardware::ICameraServiceProxy> getDefaultCameraServiceProxy();
+
     // Open
-    static void logOpen(const String8& id, int facing,
+    void logOpen(const String8& id, int facing,
             const String16& clientPackageName, int apiLevel, bool isNdk,
             int32_t latencyMs);
 
     // Close
-    static void logClose(const String8& id, int32_t latencyMs);
+    void logClose(const String8& id, int32_t latencyMs, bool deviceError);
 
     // Stream configuration
-    static void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
+    void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
             int32_t latencyMs);
 
     // Session state becomes active
-    static void logActive(const String8& id, float maxPreviewFps);
+    void logActive(const String8& id, float maxPreviewFps);
 
     // Session state becomes idle
-    static void logIdle(const String8& id,
+    void logIdle(const String8& id,
             int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::string& userTag, int32_t videoStabilizationMode,
             const std::vector<hardware::CameraStreamStats>& streamStats);
 
     // Ping camera service proxy for user update
-    static void pingCameraServiceProxy();
+    void pingCameraServiceProxy();
 
     // Return the current top activity rotate and crop override.
-    static int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
+    int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
+
+    // Return the current top activity autoframing.
+    int getAutoframingOverride(const String16& packageName);
 
     // Detect if the camera is disabled by device policy.
-    static bool isCameraDisabled();
+    bool isCameraDisabled(int userId);
+
+    // Returns the logId currently associated with the given cameraId. See 'mLogId' in
+    // frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
+    // identifier. Returns a non-0 value on success.
+    int64_t getCurrentLogIdForCamera(const String8& cameraId);
+
+    // Update the stored extension stats to the latest values
+    String16 updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 2eb2d55..89e75b3 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -19,6 +19,8 @@
 #include "SessionConfigurationUtils.h"
 #include "../api2/DepthCompositeStream.h"
 #include "../api2/HeicCompositeStream.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "api2/JpegRCompositeStream.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -26,6 +28,7 @@
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "system/graphics-base-v1.1.h"
+#include <ui/PublicFormat.h>
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::OutputStreamInfo;
@@ -70,11 +73,11 @@
 
     int32_t dynamicDepthKey =
             SessionConfigurationUtils::getAppropriateModeTag(
-                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+                    ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
 
     int32_t heicKey =
             SessionConfigurationUtils::getAppropriateModeTag(
-                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+                    ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
 
     getStreamConfigurations(staticInfo, scalerKey, scm);
     getStreamConfigurations(staticInfo, depthKey, scm);
@@ -127,7 +130,7 @@
 
 size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
         camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
-    return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+    return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
             (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
 }
 
@@ -158,8 +161,13 @@
             getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
     const int32_t heicSizesTag =
             getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+    const int32_t jpegRSizesTag = getAppropriateModeTag(
+            ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
 
+    bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+                ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
     camera_metadata_ro_entry streamConfigs =
+            (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
             (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
             (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
             info.find(heicSizesTag) :
@@ -193,6 +201,8 @@
 
     if (bestWidth == -1) {
         // Return false if no configurations for this format were listed
+        ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
+                __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
         return false;
     }
 
@@ -209,11 +219,18 @@
 }
 
 //check if format is 10-bit compatible
-bool is10bitCompatibleFormat(int32_t format) {
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
     switch(format) {
         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
         case HAL_PIXEL_FORMAT_YCBCR_P010:
             return true;
+        case HAL_PIXEL_FORMAT_BLOB:
+            if (dataSpace == static_cast<android_dataspace_t>(
+                        ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+                return true;
+            }
+
+            return false;
         default:
             return false;
     }
@@ -282,6 +299,65 @@
     }
 }
 
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+    camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    for (size_t i = 0; i < entry.count; ++i) {
+        uint8_t capability = entry.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+        int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+    int64_t colorSpace64 = colorSpace;
+    int64_t format64 = format;
+
+    // Translate HAL format + data space to public format
+    if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+        format64 = 0x100; // JPEG
+    } else if (format == HAL_PIXEL_FORMAT_BLOB
+            && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+        format64 = 0x48454946; // HEIC
+    } else if (format == HAL_PIXEL_FORMAT_BLOB
+            && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+        format64 = 0x69656963; // DEPTH_JPEG
+    } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // DEPTH_POINT_CLOUD, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // DEPTH16, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // RAW_DEPTH, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+        return false; // RAW_DEPTH10, not applicable
+    } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+            static_cast<android_dataspace>(
+                ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+        format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+    }
+
+    camera_metadata_ro_entry_t entry =
+            staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+    for (size_t i = 0; i < entry.count; i += 3) {
+        bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+        bool isDynamicProfileCompatible =
+                (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+        if (colorSpace64 == entry.data.i64[i]
+                && isFormatCompatible
+                && isDynamicProfileCompatible) {
+            return true;
+        }
+    }
+
+    ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+            " combination not found", colorSpace, format64, dynamicRangeProfile);
+    return false;
+}
+
 bool isPublicFormat(int32_t format)
 {
     switch(format) {
@@ -309,6 +385,23 @@
     }
 }
 
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
+    switch (colorSpace) {
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
+            *dataSpace = HAL_DATASPACE_V0_SRGB;
+            return true;
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
+            *dataSpace = HAL_DATASPACE_DISPLAY_P3;
+            return true;
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
+            *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
+            return true;
+        default:
+            ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
+            return false;
+    }
+}
+
 bool isStreamUseCaseSupported(int64_t streamUseCase,
         const CameraMetadata &deviceInfo) {
     camera_metadata_ro_entry_t availableStreamUseCases =
@@ -336,7 +429,8 @@
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
-        int64_t streamUseCase, int timestampBase, int mirrorMode) {
+        int64_t streamUseCase, int timestampBase, int mirrorMode,
+        int32_t colorSpace) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -400,6 +494,16 @@
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
     }
 
+    if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+            format != HAL_PIXEL_FORMAT_BLOB) {
+        if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
+            String8 msg = String8::format("Camera %s: color space %d not supported, failed to "
+                    "convert to data space", logicalCameraId.string(), colorSpace);
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        }
+    }
+
     // FIXME: remove this override since the default format should be
     //       IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
     if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
@@ -411,7 +515,7 @@
     }
     std::unordered_set<int32_t> overriddenSensorPixelModes;
     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
-            physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+            physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
         String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
                 "format %#x are not valid",logicalCameraId.string(), format);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -443,13 +547,23 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
-            !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
+            !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
         String8 msg = String8::format("Camera %s: No 10-bit supported stream configurations with "
                 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
                 logicalCameraId.string(), format, dynamicRangeProfile);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
+    if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+            SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+            !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+                    dynamicRangeProfile, physicalCameraMetadata)) {
+        String8 msg = String8::format("Camera %s: Color space %d not supported, failed to "
+                "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+                logicalCameraId.string(), colorSpace, format, dynamicRangeProfile);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
     if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
             physicalCameraMetadata)) {
         String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
@@ -458,7 +572,7 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
-            timestampBase > OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED) {
+            timestampBase > OutputConfiguration::TIMESTAMP_BASE_MAX) {
         String8 msg = String8::format("Camera %s: invalid timestamp base %d",
                 logicalCameraId.string(), timestampBase);
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -483,6 +597,7 @@
         streamInfo.streamUseCase = streamUseCase;
         streamInfo.timestampBase = timestampBase;
         streamInfo.mirrorMode = mirrorMode;
+        streamInfo.colorSpace = colorSpace;
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
@@ -538,6 +653,7 @@
     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
     stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
     stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+    stream->colorSpace = streamInfo.colorSpace;
     stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
     stream->id = -1; // Invalid stream id
     stream->physicalCameraId = std::string(physicalId.string());
@@ -562,6 +678,7 @@
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+        bool isCompositeJpegRDisabled,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit) {
@@ -635,6 +752,7 @@
         String8 physicalCameraId = String8(it.getPhysicalCameraId());
 
         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+        int32_t colorSpace = it.getColorSpace();
         std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
         const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
                 overrideForPerfClass);
@@ -672,7 +790,7 @@
             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
                     streamInfo.format, streamInfo.width,
-                    streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+                    streamInfo.height, metadataChosen,
                     &streamInfo.sensorPixelModesUsed) != OK) {
                         ALOGE("%s: Deferred surface sensor pixel modes not valid",
                                 __FUNCTION__);
@@ -693,7 +811,7 @@
             sp<Surface> surface;
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
                     logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
-                    streamUseCase, timestampBase, mirrorMode);
+                    streamUseCase, timestampBase, mirrorMode, colorSpace);
 
             if (!res.isOk())
                 return res;
@@ -703,7 +821,10 @@
                         camera3::DepthCompositeStream::isDepthCompositeStream(surface);
                 bool isHeicCompositeStream =
                         camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                if (isDepthCompositeStream || isHeicCompositeStream) {
+                bool isJpegRCompositeStream =
+                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
+                        !isCompositeJpegRDisabled;
+                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
                     // We need to take in to account that composite streams can have
                     // additional internal camera streams.
                     std::vector<OutputStreamInfo> compositeStreams;
@@ -711,10 +832,14 @@
                       // TODO: Take care of composite streams.
                         ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
                                 deviceInfo, &compositeStreams);
-                    } else {
+                    } else if (isHeicCompositeStream) {
                         ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
                             deviceInfo, &compositeStreams);
+                    } else {
+                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                            deviceInfo, &compositeStreams);
                     }
+
                     if (ret != OK) {
                         String8 msg = String8::format(
                                 "Camera %s: Failed adding composite streams: %s (%d)",
@@ -843,15 +968,17 @@
 
 status_t checkAndOverrideSensorPixelModesUsed(
         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
-        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        const CameraMetadata &staticInfo,
         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
 
     const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
             convertToSet(sensorPixelModesUsed);
-    if (!isUltraHighResolutionSensor(staticInfo)) {
+    if (!supportsUltraHighResolutionCapture(staticInfo)) {
         if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
                 sensorPixelModesUsedSet.end()) {
             // invalid value for non ultra high res sensors
+            ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
+                    "support ultra high resolution capture", __FUNCTION__);
             return BAD_VALUE;
         }
         overriddenSensorPixelModesUsed->clear();
@@ -872,35 +999,40 @@
     // Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
     // size + format of the OutputConfiguration is found exclusively in 1.
     // If yes, add that sensorPixelMode to overriddenSensorPixelModes.
-    // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
-    // compatibility.
+    // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
+    // This maintains backwards compatibility and also tells the framework the stream
+    // might be used in either sensor pixel mode.
     if (sensorPixelModesUsedSet.size() == 0) {
-        // Ambiguous case, default to only 'DEFAULT' mode.
+        // Ambiguous case, override to include both cases.
         if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
-            return OK;
-        }
-        // We don't allow flexible consumer for max resolution mode.
-        if (isInMaximumResolutionStreamConfigurationMap) {
             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
             return OK;
         }
-        if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+        if (isInMaximumResolutionStreamConfigurationMap) {
+            overriddenSensorPixelModesUsed->insert(
+                    ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+        } else {
             overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
-            return OK;
         }
-        return BAD_VALUE;
+        return OK;
     }
 
     // Case2: The app has set sensorPixelModesUsed, we need to verify that they
     // are valid / err out.
     if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
             sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+        ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
+                " isn't present in default stream configuration map", __FUNCTION__, format, width,
+                height);
         return BAD_VALUE;
     }
 
    if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
             sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+        ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
+                "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
+                format, width, height);
         return BAD_VALUE;
     }
     *overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index a127c7b..dc143da 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -98,10 +98,11 @@
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
-        int64_t streamUseCase, int timestampBase, int mirrorMode);
+        int64_t streamUseCase, int timestampBase, int mirrorMode,
+        int32_t colorSpace);
 
 //check if format is 10-bit output compatible
-bool is10bitCompatibleFormat(int32_t format);
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
 
 // check if the dynamic range requires 10-bit output
 bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile);
@@ -109,6 +110,13 @@
 // Check if the device supports a given dynamicRangeProfile
 bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
 
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+        int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace);
+
 bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
 
 void mapStreamInfo(const OutputStreamInfo &streamInfo,
@@ -130,7 +138,7 @@
 binder::Status
 convertToHALStreamCombination(
     const SessionConfiguration& sessionConfiguration,
-    const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+    const String8 &logicalCameraId, const CameraMetadata &deviceInfo, bool isCompositeJpegRDisabled,
     metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
     aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
     bool overrideForPerfClass, bool *earlyExit);
@@ -139,7 +147,7 @@
 
 status_t checkAndOverrideSensorPixelModesUsed(
         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
-        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        const CameraMetadata &staticInfo,
         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
 
 bool targetPerfClassPrimaryCamera(
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index 4e6f832..111c1bf 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -50,7 +50,7 @@
     for (const auto &stream : aidl.streams) {
         if (static_cast<int>(stream.dynamicRangeProfile) !=
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
-            ALOGE("%s  Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
+            ALOGE("%s Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
                     stream.dynamicRangeProfile);
             return BAD_VALUE;
         }
@@ -111,8 +111,8 @@
         bool overrideForPerfClass, bool *earlyExit) {
     aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
     auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
-            getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
-            earlyExit);
+            false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
+            aidlStreamConfiguration, overrideForPerfClass, earlyExit);
     if (!ret.isOk()) {
         return ret;
     }
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 1efdc60..7d344f8 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,12 +49,22 @@
             return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
         case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
             return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS:
+            return ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION;
         case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
             return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
         case ANDROID_LENS_INTRINSIC_CALIBRATION:
             return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
         case ANDROID_LENS_DISTORTION:
             return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+        case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
+            return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
+        case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
+            return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
         default:
             ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
                     defaultTag);
@@ -63,7 +73,62 @@
     return -1;
 }
 
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+static bool isKeyPresentWithCount(const CameraMetadata &deviceInfo, uint32_t tag, uint32_t count) {
+    auto countFound = deviceInfo.find(tag).count;
+    return (countFound != 0) && (countFound % count == 0);
+}
+
+static bool supportsKeysForBasicUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
+    // Check whether the following conditions are satisfied for reduced ultra high
+    // resolution support :
+    // 1) SENSOR_PIXEL_MODE is advertised in ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS
+    // 2) The following keys are present in CameraCharacteristics for basic functionality
+    //        a) ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+    //        b) ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION
+    //        c) ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+    //        d) ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+    //        e) ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+    //        f) ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+    camera_metadata_ro_entry_t entryChar;
+    entryChar = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+    bool supportsSensorPixelMode = false;
+    for (size_t i = 0; i < entryChar.count; i++) {
+        int32_t key = entryChar.data.i32[i];
+        if (key == ANDROID_SENSOR_PIXEL_MODE) {
+            supportsSensorPixelMode = true;
+            break;
+        }
+    }
+    if (!supportsSensorPixelMode) {
+        return false;
+    }
+
+    // Basic sensor array size information tags are present
+    if (!isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+            /*count*/2) ||
+            !isKeyPresentWithCount(deviceInfo,
+                    ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+                    /*count*/4) ||
+            !isKeyPresentWithCount(deviceInfo,
+                    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION, /*count*/4) ||
+            !isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_BINNING_FACTOR, /*count*/2)) {
+        return false;
+    }
+
+    // Basic stream configuration tags are present
+    if (!isKeyPresentWithCount(deviceInfo,
+            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+            !isKeyPresentWithCount(deviceInfo,
+                    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+            !isKeyPresentWithCount(deviceInfo,
+                    ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION, /*count*/ 4)) {
+        return false;
+    }
+
+    return true;
+}
+
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
     camera_metadata_ro_entry_t entryCap;
     entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
     // Go through the capabilities and check if it has
@@ -74,7 +139,10 @@
             return true;
         }
     }
-    return false;
+
+    // If not, then check that the keys which guarantee basic supports for
+    // ultra high resolution capture are supported.
+    return supportsKeysForBasicUltraHighResolutionCapture(deviceInfo);
 }
 
 bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
@@ -93,4 +161,4 @@
 
 } // namespace SessionConfigurationUtils
 } // namespace camera3
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
index 45b1e91..dac1824 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
@@ -22,7 +22,7 @@
 namespace camera3 {
 namespace SessionConfigurationUtils {
 
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo);
 
 int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
 
@@ -33,4 +33,4 @@
 } // camera3
 } // android
 
-#endif
\ No newline at end of file
+#endif
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 461f5e9..d1e54ab 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -169,6 +169,22 @@
 
     camera_metadata_entry lastEntry = lastValues.find(tag);
 
+    // Monitor when the stream ids change, this helps visually see what
+    // monitored metadata values are for capture requests with different
+    // stream ids.
+    if (source == REQUEST) {
+        if (inputStreamId != mLastInputStreamId) {
+            mMonitoringEvents.emplace(source, frameNumber, timestamp, camera_metadata_ro_entry_t{},
+                                      cameraId, std::unordered_set<int>(), inputStreamId);
+            mLastInputStreamId = inputStreamId;
+        }
+
+        if (outputStreamIds != mLastStreamIds) {
+            mMonitoringEvents.emplace(source, frameNumber, timestamp, camera_metadata_ro_entry_t{},
+                                      cameraId, outputStreamIds, -1);
+            mLastStreamIds = outputStreamIds;
+        }
+    }
     if (entry.count > 0) {
         bool isDifferent = false;
         if (lastEntry.count > 0) {
@@ -190,22 +206,14 @@
             // No last entry, so always consider to be different
             isDifferent = true;
         }
-        // Also monitor when the stream ids change, this helps visually see what
-        // monitored metadata values are for capture requests with different
-        // stream ids.
-        if (source == REQUEST &&
-                (inputStreamId != mLastInputStreamId || outputStreamIds != mLastStreamIds)) {
-            mLastInputStreamId = inputStreamId;
-            mLastStreamIds = outputStreamIds;
-            isDifferent = true;
-        }
+
         if (isDifferent) {
             ALOGV("%s: Tag %s changed", __FUNCTION__,
                   get_local_camera_metadata_tag_name_vendor_id(
                           tag, mVendorTagId));
             lastValues.update(entry);
             mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId,
-                    outputStreamIds, inputStreamId);
+                                      std::unordered_set<int>(), -1);
         }
     } else if (lastEntry.count > 0) {
         // Value has been removed
@@ -219,8 +227,8 @@
         entry.count = 0;
         mLastInputStreamId = inputStreamId;
         mLastStreamIds = outputStreamIds;
-        mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId, outputStreamIds,
-                inputStreamId);
+        mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId,
+                                  std::unordered_set<int>(), -1);
     }
 }
 
@@ -261,23 +269,39 @@
 
     for (const auto& event : mMonitoringEvents) {
         int indentation = (event.source == REQUEST) ? 15 : 30;
-        String8 eventString = String8::format("f%d:%" PRId64 "ns:%*s%*s%s.%s: ",
+        String8 eventString = String8::format("f%d:%" PRId64 "ns:%*s%*s",
                 event.frameNumber, event.timestamp,
                 2, event.cameraId.c_str(),
                 indentation,
-                event.source == REQUEST ? "REQ:" : "RES:",
+                event.source == REQUEST ? "REQ:" : "RES:");
+
+        if (!event.outputStreamIds.empty()) {
+            eventString += " output stream ids:";
+            for (const auto& id : event.outputStreamIds) {
+                eventString.appendFormat(" %d", id);
+            }
+            eventString += "\n";
+            vec.emplace_back(eventString.string());
+            continue;
+        }
+
+        if (event.inputStreamId != -1) {
+            eventString.appendFormat(" input stream id: %d\n", event.inputStreamId);
+            vec.emplace_back(eventString.string());
+            continue;
+        }
+
+        eventString += String8::format(
+                "%s.%s: ",
                 get_local_camera_metadata_section_name_vendor_id(event.tag, mVendorTagId),
                 get_local_camera_metadata_tag_name_vendor_id(event.tag, mVendorTagId));
-        if (event.newData.size() == 0) {
-            eventString += " (Removed)";
+
+        if (event.newData.empty()) {
+            eventString += " (Removed)\n";
         } else {
-            eventString += getEventDataString(event.newData.data(),
-                                    event.tag,
-                                    event.type,
-                                    event.newData.size() / camera_metadata_type_size[event.type],
-                                    indentation + 18,
-                                    event.outputStreamIds,
-                                    event.inputStreamId);
+            eventString += getEventDataString(
+                    event.newData.data(), event.tag, event.type,
+                    event.newData.size() / camera_metadata_type_size[event.type], indentation + 18);
         }
         vec.emplace_back(eventString.string());
     }
@@ -285,13 +309,8 @@
 
 #define CAMERA_METADATA_ENUM_STRING_MAX_SIZE 29
 
-String8 TagMonitor::getEventDataString(const uint8_t* data_ptr,
-                                    uint32_t tag,
-                                    int type,
-                                    int count,
-                                    int indentation,
-                                    const std::unordered_set<int32_t>& outputStreamIds,
-                                    int32_t inputStreamId) {
+String8 TagMonitor::getEventDataString(const uint8_t* data_ptr, uint32_t tag, int type, int count,
+                                       int indentation) {
     static int values_per_line[NUM_TYPES] = {
         [TYPE_BYTE]     = 16,
         [TYPE_INT32]    = 8,
@@ -327,7 +346,7 @@
                         == OK) {
                         returnStr += value_string_tmp;
                     } else {
-                        returnStr.appendFormat("%hhu", *(data_ptr + index));
+                        returnStr.appendFormat("%hhu ", *(data_ptr + index));
                     }
                     break;
                 case TYPE_INT32:
@@ -344,7 +363,7 @@
                     }
                     break;
                 case TYPE_FLOAT:
-                    returnStr.appendFormat("%0.8f", *(float*)(data_ptr + index));
+                    returnStr.appendFormat("%0.8f ", *(float*)(data_ptr + index));
                     break;
                 case TYPE_INT64:
                     returnStr.appendFormat("%" PRId64 " ", *(int64_t*)(data_ptr + index));
@@ -362,17 +381,7 @@
                     returnStr += "??? ";
             }
         }
-        returnStr += "] ";
-        if (!outputStreamIds.empty()) {
-            returnStr += "output stream ids: ";
-            for (const auto &id : outputStreamIds) {
-                returnStr.appendFormat(" %d ", id);
-            }
-        }
-        if (inputStreamId != -1) {
-            returnStr.appendFormat("input stream id: %d", inputStreamId);
-        }
-        returnStr += "\n";
+        returnStr += "]\n";
     }
     return returnStr;
 }
@@ -385,11 +394,12 @@
         source(src),
         frameNumber(frameNumber),
         timestamp(timestamp),
+        cameraId(cameraId),
         tag(value.tag),
         type(value.type),
         newData(value.data.u8, value.data.u8 + camera_metadata_type_size[value.type] * value.count),
-        cameraId(cameraId), outputStreamIds(outputStreamIds), inputStreamId(inputStreamId) {
-}
+        outputStreamIds(outputStreamIds),
+        inputStreamId(inputStreamId) {}
 
 TagMonitor::MonitorEvent::~MonitorEvent() {
 }
diff --git a/services/camera/libcameraservice/utils/TagMonitor.h b/services/camera/libcameraservice/utils/TagMonitor.h
index 088d6fe..9ded15d 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.h
+++ b/services/camera/libcameraservice/utils/TagMonitor.h
@@ -85,12 +85,8 @@
     // function.
     void dumpMonitoredTagEventsToVectorLocked(std::vector<std::string> &out);
 
-    static String8 getEventDataString(const uint8_t *data_ptr,
-                                       uint32_t tag, int type,
-                                       int count,
-                                       int indentation,
-                                       const std::unordered_set<int32_t> &outputStreamIds,
-                                       int32_t inputStreamId);
+    static String8 getEventDataString(const uint8_t* data_ptr, uint32_t tag, int type, int count,
+                                      int indentation);
 
     void monitorSingleMetadata(TagMonitor::eventSource source, int64_t frameNumber,
             nsecs_t timestamp, const std::string& cameraId, uint32_t tag,
@@ -128,12 +124,15 @@
         eventSource source;
         uint32_t frameNumber;
         nsecs_t timestamp;
+        std::string cameraId;
         uint32_t tag;
         uint8_t type;
         std::vector<uint8_t> newData;
-        std::string cameraId;
+        // NOTE: We want to print changes to outputStreamIds and inputStreamId in their own lines.
+        // So any MonitorEvent where these fields are not the default value will have garbage
+        // values for all fields other than source, frameNumber, timestamp, and cameraId.
         std::unordered_set<int32_t> outputStreamIds;
-        int32_t inputStreamId = 1;
+        int32_t inputStreamId = -1;
     };
 
     // A ring buffer for tracking the last kMaxMonitorEvents metadata changes
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index 4488efb..a2f17c2 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -54,6 +54,9 @@
         arm64: {
             src: "seccomp_policy/mediaswcodec-arm64.policy",
         },
+        riscv64: {
+            src: "seccomp_policy/mediaswcodec-riscv64.policy",
+        },
         x86: {
             src: "seccomp_policy/mediaswcodec-x86.policy",
         },
@@ -144,6 +147,9 @@
         arm64: {
             src: "seccomp_policy/mediacodec-arm64.policy",
         },
+        riscv64: {
+            enabled: false,
+        },
         x86: {
             src: "seccomp_policy/mediacodec-x86.policy",
         },
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
new file mode 100644
index 0000000..a55c3eb
--- /dev/null
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-riscv64.policy
@@ -0,0 +1,60 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+read: 1
+mprotect: 1
+prctl: 1
+openat: 1
+getuid: 1
+getrlimit: 1
+writev: 1
+ioctl: 1
+close: 1
+mmap: 1
+munmap: 1
+fstat: 1
+madvise: 1
+newfstatat: 1
+futex: 1
+faccessat: 1
+lseek: 1
+clone: 1
+sigaltstack: 1
+rt_sigprocmask: 1
+setpriority: 1
+restart_syscall: 1
+exit: 1
+exit_group: 1
+rt_sigreturn: 1
+readlinkat: 1
+fstatfs: 1
+pread64: 1
+mremap: 1
+dup: 1
+set_tid_address: 1
+write: 1
+nanosleep: 1
+sched_setscheduler: 1
+uname: 1
+memfd_create: 1
+ftruncate: 1
+getdents64: 1
+ppoll: 1
+
+# Required by AddressSanitizer
+gettid: 1
+sched_yield: 1
+getpid: 1
+
+@include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.riscv64.policy
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index 85ce110..acafe56 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -74,6 +74,9 @@
         arm64: {
             src: "seccomp_policy/mediaextractor-arm64.policy",
         },
+        riscv64: {
+            src: "seccomp_policy/mediaextractor-riscv64.policy",
+        },
         x86: {
             src: "seccomp_policy/mediaextractor-x86.policy",
         },
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-riscv64.policy b/services/mediaextractor/seccomp_policy/mediaextractor-riscv64.policy
new file mode 100644
index 0000000..df143dd
--- /dev/null
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-riscv64.policy
@@ -0,0 +1,48 @@
+# Organized by frequency of systemcall - in descending order for
+# best performance.
+ioctl: 1
+futex: 1
+prctl: 1
+write: 1
+getpriority: 1
+close: 1
+dup: 1
+mmap: 1
+munmap: 1
+openat: 1
+mprotect: 1
+madvise: 1
+getuid: 1
+fstat: 1
+fstatfs: 1
+read: 1
+setpriority: 1
+sigaltstack: 1
+clone: 1
+sched_setscheduler: 1
+lseek: 1
+newfstatat: 1
+faccessat: 1
+restart_syscall: 1
+exit: 1
+exit_group: 1
+rt_sigreturn: 1
+rt_sigprocmask: 1
+getrlimit: 1
+nanosleep: 1
+getrandom: 1
+timer_create: 1
+timer_settime: 1
+timer_delete: 1
+
+# for dynamically loading extractors
+getdents64: 1
+readlinkat: 1
+pread64: 1
+mremap: 1
+
+# Required by Sanitizers
+sched_yield: 1
+
+@include /apex/com.android.media/etc/seccomp_policy/crash_dump.riscv64.policy
+@include /apex/com.android.media/etc/seccomp_policy/code_coverage.riscv64.policy
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index 9ff0ce4..c96c37b 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -38,5 +38,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmedialogservice library",
+        vector: "local_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "future_version",
     },
 }
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index 11534bb..c90488f 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -138,6 +138,7 @@
         "AudioTypes.cpp",
         "cleaner.cpp",
         "iface_statsd.cpp",
+        "MediaDrmStatsdHelper.cpp",
         "MediaMetricsService.cpp",
         "statsd_audiopolicy.cpp",
         "statsd_audiorecord.cpp",
@@ -169,7 +170,7 @@
         "libmemunreachable",
         "libprotobuf-cpp-lite",
         "libstagefright_foundation",
-        "libstatslog",
+        "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
         "libutils",
@@ -177,6 +178,7 @@
     ],
 
     export_shared_lib_headers: [
+        "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
     ],
@@ -200,3 +202,33 @@
         "libaudioutils_headers",
     ],
 }
+
+cc_library {
+    name: "libstats_media_metrics",
+    generated_sources: ["stats_media_metrics.cpp"],
+    generated_headers: ["stats_media_metrics.h"],
+    export_generated_headers: ["stats_media_metrics.h"],
+    shared_libs: [
+        "libcutils",
+        "libstatspull",
+        "libstatssocket",
+    ],
+}
+
+genrule {
+    name: "stats_media_metrics.h",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --header $(genDir)/stats_media_metrics.h --module media_metrics --namespace android,stats,media_metrics",
+    out: [
+        "stats_media_metrics.h",
+    ],
+}
+
+genrule {
+    name: "stats_media_metrics.cpp",
+    tools: ["stats-log-api-gen"],
+    cmd: "$(location stats-log-api-gen) --cpp $(genDir)/stats_media_metrics.cpp --module media_metrics --namespace android,stats,media_metrics --importHeader stats_media_metrics.h",
+    out: [
+        "stats_media_metrics.cpp",
+    ],
+}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 99e3691..59d1ae4 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -24,7 +24,7 @@
 #include <aaudio/AAudio.h>        // error codes
 #include <audio_utils/clock.h>    // clock conversions
 #include <cutils/properties.h>
-#include <statslog.h>             // statsd
+#include <stats_media_metrics.h>             // statsd
 #include <system/audio.h>
 
 #include "AudioTypes.h"           // string to int conversions
@@ -238,6 +238,57 @@
     "sample_rate",
     "content_type",
     "sharing_requested",
+    "format_hardware",
+    "channel_count_hardware",
+    "sample_rate_hardware",
+    "uid",
+};
+
+static constexpr const char * HeadTrackerDeviceEnabledFields[] {
+    "mediametrics_headtrackerdeviceenabled_reported",
+    "type",
+    "event",
+    "enabled",
+};
+
+static constexpr const char * HeadTrackerDeviceSupportedFields[] {
+    "mediametrics_headtrackerdevicesupported_reported",
+    "type",
+    "event",
+    "supported",
+};
+
+static constexpr const char * SpatializerCapabilitiesFields[] {
+    "mediametrics_spatializer_reported",
+    "head_tracking_modes",
+    "spatializer_levels",
+    "spatializer_modes",
+    "channel_masks",
+};
+
+static constexpr const char * SpatializerDeviceEnabledFields[] {
+    "mediametrics_spatializerdeviceenabled_reported",
+    "type",
+    "event",
+    "enabled",
+};
+
+static constexpr const char * const MidiDeviceCloseFields[] {
+    "mediametrics_midi_device_close_reported",
+    "uid",
+    "midi_device_id",
+    "input_port_count",
+    "output_port_count",
+    "device_type",
+    "is_shared",
+    "supports_ump",
+    "using_alsa",
+    "duration_ns",
+    "opened_count",
+    "closed_count",
+    "device_disconnected",
+    "total_input_bytes",
+    "total_output_bytes",
 };
 
 /**
@@ -263,7 +314,7 @@
     int result = 0;
 
 #ifdef STATSD_ENABLE
-    result = android::util::stats_write(args...);
+    result = stats::media_metrics::stats_write(args...);
 #endif
     return result;
 }
@@ -279,7 +330,7 @@
     std::stringstream ss;
 
 #ifdef STATSD_ENABLE
-    result = android::util::stats_write(args...);
+    result = stats::media_metrics::stats_write(args...);
     ss << "result:" << result;
 #endif
     ss << " { ";
@@ -459,6 +510,24 @@
             [this](const std::shared_ptr<const android::mediametrics::Item> &item){
                 mAudioPowerUsage.checkCreatePatch(item);
             }));
+
+    // Handle Spatializer - these keys are prefixed by "audio.spatializer."
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "*." AMEDIAMETRICS_PROP_EVENT,
+        std::monostate{}, /* match any event */
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                mSpatializer.onEvent(item);
+            }));
+
+    // Handle MIDI
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_MIDI "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+                mMidiLogging.onEvent(item);
+            }));
 }
 
 AudioAnalytics::~AudioAnalytics()
@@ -569,7 +638,7 @@
         const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
 
         // currently we only send create status events.
-        const int32_t event = android::util::
+        const int32_t event = stats::media_metrics::
                 MEDIAMETRICS_AUDIO_RECORD_STATUS_REPORTED__EVENT__AUDIO_RECORD_EVENT_CREATE;
 
         // The following fields should all be present in a create event.
@@ -609,7 +678,7 @@
                 __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SAMPLERATE);
 
         const auto [ result, str ] = sendToStatsd(AudioRecordStatusFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
                 , atom_status
                 , message.c_str()
                 , subCode
@@ -623,7 +692,7 @@
                 , sampleRate
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mStatsdLog->log(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
+        mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
         return true;
     }
     return false;
@@ -641,7 +710,7 @@
         const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
 
         // currently we only send create status events.
-        const int32_t event = android::util::
+        const int32_t event = stats::media_metrics::
                 MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__EVENT__AUDIO_TRACK_EVENT_CREATE;
 
         // The following fields should all be present in a create event.
@@ -696,7 +765,7 @@
                 __func__,
                 AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_PITCH);
         const auto [ result, str ] = sendToStatsd(AudioTrackStatusFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
                 , atom_status
                 , message.c_str()
                 , subCode
@@ -713,7 +782,7 @@
                 , (float)pitch
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mStatsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
+        mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
         return true;
     }
     return false;
@@ -750,15 +819,9 @@
     int32_t frameCount = 0;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
             key, AMEDIAMETRICS_PROP_FRAMECOUNT, &frameCount);
-    std::string inputDevicePairs;
-    mAudioAnalytics.mAnalyticsState->timeMachine().get(
-            key, AMEDIAMETRICS_PROP_INPUTDEVICES, &inputDevicePairs);
     int32_t intervalCount = 0;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
             key, AMEDIAMETRICS_PROP_INTERVALCOUNT, &intervalCount);
-    std::string outputDevicePairs;
-    mAudioAnalytics.mAnalyticsState->timeMachine().get(
-            key, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevicePairs);
     int32_t sampleRate = 0;
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
             key, AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate);
@@ -766,53 +829,16 @@
     mAudioAnalytics.mAnalyticsState->timeMachine().get(
             key, AMEDIAMETRICS_PROP_FLAGS, &flags);
 
-    // We may have several devices.
-    // Accumulate the bit flags for input and output devices.
-    std::stringstream oss;
-    long_enum_type_t outputDeviceBits{};
-    {   // compute outputDevices
-        const auto devaddrvec = stringutils::getDeviceAddressPairs(outputDevicePairs);
-        for (const auto& [device, addr] : devaddrvec) {
-            if (oss.tellp() > 0) oss << "|";  // delimit devices with '|'.
-            oss << device;
-            outputDeviceBits += types::lookup<types::OUTPUT_DEVICE, long_enum_type_t>(device);
-        }
-    }
-    const std::string outputDevices = oss.str();
-
-    std::stringstream iss;
-    long_enum_type_t inputDeviceBits{};
-    {   // compute inputDevices
-        const auto devaddrvec = stringutils::getDeviceAddressPairs(inputDevicePairs);
-        for (const auto& [device, addr] : devaddrvec) {
-            if (iss.tellp() > 0) iss << "|";  // delimit devices with '|'.
-            iss << device;
-            inputDeviceBits += types::lookup<types::INPUT_DEVICE, long_enum_type_t>(device);
-        }
-    }
-    const std::string inputDevices = iss.str();
-
-    // Get connected device name if from bluetooth.
-    bool isBluetooth = false;
-
-    std::string inputDeviceNames;  // not filled currently.
-    std::string outputDeviceNames;
-    if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
-        isBluetooth = true;
-        outputDeviceNames = SUPPRESSED;
-#if 0   // TODO(b/161554630) sanitize name
-        mAudioAnalytics.mAnalyticsState->timeMachine().get(
-            "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &outputDeviceNames);
-        // Remove | if present
-        stringutils::replace(outputDeviceNames, "|", '?');
-        if (outputDeviceNames.size() > STATSD_DEVICE_NAME_MAX_LENGTH) {
-            outputDeviceNames.resize(STATSD_DEVICE_NAME_MAX_LENGTH); // truncate
-        }
-#endif
-    }
-
     switch (itemType) {
     case RECORD: {
+        std::string inputDevicePairs;
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_INPUTDEVICES, &inputDevicePairs);
+
+        const auto [ inputDeviceStatsd, inputDevices ] =
+                stringutils::parseInputDevicePairs(inputDevicePairs);
+        const std::string inputDeviceNames;  // not filled currently.
+
         std::string callerName;
         const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
@@ -848,7 +874,7 @@
 
         LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
-              << " inputDevices:" << inputDevices << "(" << inputDeviceBits
+              << " inputDevices:" << inputDevices << "(" << inputDeviceStatsd
               << ") inputDeviceNames:" << inputDeviceNames
               << " deviceTimeNs:" << deviceTimeNs
               << " encoding:" << encoding << "(" << encodingForStats
@@ -865,8 +891,8 @@
         if (clientCalled  // only log if client app called AudioRecord.
                 && mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
-                    , ENUM_EXTRACT(inputDeviceBits)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
+                    , ENUM_EXTRACT(inputDeviceStatsd)
                     , inputDeviceNames.c_str()
                     , deviceTimeNs
                     , ENUM_EXTRACT(encodingForStats)
@@ -883,7 +909,7 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case THREAD: {
@@ -895,18 +921,35 @@
                 key, AMEDIAMETRICS_PROP_UNDERRUN, &underrun);
 
         const bool isInput = types::isInputThreadType(type);
+
+        // get device information
+        std::string devicePairs;
+        std::string deviceStatsd;
+        std::string devices;
+        std::string deviceNames;
+        if (isInput) {
+            // Note we get the "last" device which is the one associated with group.
+            item->get(AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_INPUTDEVICES,
+                    &devicePairs);
+            std::tie(deviceStatsd, devices) = stringutils::parseInputDevicePairs(devicePairs);
+        } else {
+            // Note we get the "last" device which is the one associated with group.
+            item->get(AMEDIAMETRICS_PROP_PREFIX_LAST AMEDIAMETRICS_PROP_OUTPUTDEVICES,
+                    &devicePairs);
+            std::tie(deviceStatsd, devices) = stringutils::parseOutputDevicePairs(devicePairs);
+            deviceNames = mAudioAnalytics.getDeviceNamesFromOutputDevices(devices);
+        }
+
         const auto encodingForStats = types::lookup<types::ENCODING, short_enum_type_t>(encoding);
         const auto flagsForStats =
                 (isInput ? types::lookup<types::INPUT_FLAG, short_enum_type_t>(flags)
                         : types::lookup<types::OUTPUT_FLAG, short_enum_type_t>(flags));
         const auto typeForStats = types::lookup<types::THREAD_TYPE, short_enum_type_t>(type);
 
-        LOG(LOG_LEVEL) << "key:" << key
+         LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
-              << " inputDevices:" << inputDevices << "(" << inputDeviceBits
-              << ") outputDevices:" << outputDevices << "(" << outputDeviceBits
-              << ") inputDeviceNames:" << inputDeviceNames
-              << " outputDeviceNames:" << outputDeviceNames
+              << " devices:" << devices << "(" << deviceStatsd
+              << ") deviceNames:" << deviceNames
               << " deviceTimeNs:" << deviceTimeNs
               << " encoding:" << encoding << "(" << encodingForStats
               << ") frameCount:" << frameCount
@@ -918,9 +961,9 @@
               << ")";
         if (mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioThreadDeviceUsageFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
-                , isInput ? ENUM_EXTRACT(inputDeviceBits) : ENUM_EXTRACT(outputDeviceBits)
-                , isInput ? inputDeviceNames.c_str() : outputDeviceNames.c_str()
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
+                , ENUM_EXTRACT(deviceStatsd)
+                , deviceNames.c_str()
                 , deviceTimeNs
                 , ENUM_EXTRACT(encodingForStats)
                 , frameCount
@@ -932,10 +975,19 @@
             );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
         }
     } break;
     case TRACK: {
+        std::string outputDevicePairs;
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+                key, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevicePairs);
+
+        const auto [ outputDeviceStatsd, outputDevices ] =
+                stringutils::parseOutputDevicePairs(outputDevicePairs);
+        const std::string outputDeviceNames =
+                mAudioAnalytics.getDeviceNamesFromOutputDevices(outputDevices);
+
         std::string callerName;
         const bool clientCalled = mAudioAnalytics.mAnalyticsState->timeMachine().get(
                 key, AMEDIAMETRICS_PROP_CALLERNAME, &callerName) == OK;
@@ -1003,7 +1055,7 @@
 
         LOG(LOG_LEVEL) << "key:" << key
               << " id:" << id
-              << " outputDevices:" << outputDevices << "(" << outputDeviceBits
+              << " outputDevices:" << outputDevices << "(" << outputDeviceStatsd
               << ") outputDeviceNames:" << outputDeviceNames
               << " deviceTimeNs:" << deviceTimeNs
               << " encoding:" << encoding << "(" << encodingForStats
@@ -1029,8 +1081,8 @@
         if (clientCalled // only log if client app called AudioTracks
                 && mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioTrackDeviceUsageFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
-                    , ENUM_EXTRACT(outputDeviceBits)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
+                    , ENUM_EXTRACT(outputDeviceStatsd)
                     , outputDeviceNames.c_str()
                     , deviceTimeNs
                     , ENUM_EXTRACT(encodingForStats)
@@ -1053,15 +1105,10 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
         }
         } break;
     }
-
-    // Report this as needed.
-    if (isBluetooth) {
-        // report this for Bluetooth
-    }
 }
 
 // DeviceConnection helper class.
@@ -1120,7 +1167,7 @@
             const long_enum_type_t inputDeviceBits{};
 
             const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                     , ENUM_EXTRACT(inputDeviceBits)
                     , ENUM_EXTRACT(outputDeviceBits)
                     , mA2dpDeviceName.c_str()
@@ -1130,7 +1177,7 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
     }
 }
@@ -1174,7 +1221,7 @@
                 << " deviceName:" << mA2dpDeviceName;
         if (mAudioAnalytics.mDeliverStatistics) {
             const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
-                    CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+                    CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                     , ENUM_EXTRACT(inputDeviceBits)
                     , ENUM_EXTRACT(outputDeviceBits)
                     , mA2dpDeviceName.c_str()
@@ -1184,7 +1231,7 @@
                     );
             ALOGV("%s: statsd %s", __func__, str.c_str());
             mAudioAnalytics.mStatsdLog->log(
-                    android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+                    stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
         }
         return;
     }
@@ -1201,7 +1248,7 @@
             << " deviceName:" << mA2dpDeviceName;
     if (mAudioAnalytics.mDeliverStatistics) {
         const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
-                CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
                 , ENUM_EXTRACT(inputDeviceBits)
                 , ENUM_EXTRACT(outputDeviceBits)
                 , mA2dpDeviceName.c_str()
@@ -1211,7 +1258,7 @@
                 );
         ALOGV("%s: statsd %s", __func__, str.c_str());
         mAudioAnalytics.mStatsdLog->log(
-                android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+                stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
     }
 }
 
@@ -1317,6 +1364,21 @@
     const auto sharingModeRequested =
             types::lookup<types::AAUDIO_SHARING_MODE, int32_t>(sharingModeRequestedStr);
 
+    std::string formatHardwareStr;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_ENCODINGHARDWARE, &formatHardwareStr);
+    const auto formatHardware = types::lookup<types::ENCODING, int32_t>(formatHardwareStr);
+
+    int32_t channelCountHardware = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_CHANNELCOUNTHARDWARE, &channelCountHardware);
+
+    int32_t sampleRateHardware = 0;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, &sampleRateHardware);
+
+    const auto uid = item->getUid();
+
     LOG(LOG_LEVEL) << "key:" << key
             << " path:" << path
             << " direction:" << direction << "(" << directionStr << ")"
@@ -1336,13 +1398,17 @@
             << " sample_rate: " << sampleRate
             << " content_type: " << contentType << "(" << contentTypeStr << ")"
             << " sharing_requested:" << sharingModeRequested
-                    << "(" << sharingModeRequestedStr << ")";
+                    << "(" << sharingModeRequestedStr << ")"
+            << " format_hardware:" << formatHardware << "(" << formatHardwareStr << ")"
+            << " channel_count_hardware:" << channelCountHardware
+            << " sample_rate_hardware: " << sampleRateHardware
+            << " uid: " << uid;
 
     if (mAudioAnalytics.mDeliverStatistics) {
-        android::util::BytesField bf_serialized(
+        const stats::media_metrics::BytesField bf_serialized(
             serializedDeviceTypes.c_str(), serializedDeviceTypes.size());
         const auto result = sendToStatsd(
-                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
                 , path
                 , direction
                 , framesPerBurst
@@ -1361,11 +1427,15 @@
                 , sampleRate
                 , contentType
                 , sharingModeRequested
+                , formatHardware
+                , channelCountHardware
+                , sampleRateHardware
+                , uid
                 );
         std::stringstream ss;
         ss << "result:" << result;
         const auto fieldsStr = printFields(AAudioStreamFields,
-                CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+                CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
                 , path
                 , direction
                 , framesPerBurst
@@ -1384,11 +1454,15 @@
                 , sampleRate
                 , contentType
                 , sharingModeRequested
+                , formatHardware
+                , channelCountHardware
+                , sampleRateHardware
+                , uid
                 );
         ss << " " << fieldsStr;
         std::string str = ss.str();
         ALOGV("%s: statsd %s", __func__, str.c_str());
-        mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
+        mAudioAnalytics.mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
     }
 }
 
@@ -1525,5 +1599,312 @@
     return { s, n };
 }
 
+// Classifies the setting event for statsd (use generated statsd enums.proto constants).
+static int32_t classifySettingEvent(bool isSetAlready, bool withinBoot) {
+    if (isSetAlready) {
+        return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_NORMAL;
+    }
+    if (withinBoot) {
+        return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_BOOT;
+    }
+    return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_FIRST;
+}
+
+void AudioAnalytics::Spatializer::onEvent(
+        const std::shared_ptr<const android::mediametrics::Item> &item)
+{
+    const auto key = item->getKey();
+
+    if (!startsWith(key, AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER)) return;
+
+    const std::string suffix =
+            key.substr(std::size(AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER) - 1);
+
+    std::string eventStr; // optional - find the actual event string.
+    (void)item->get(AMEDIAMETRICS_PROP_EVENT, &eventStr);
+
+    const size_t delim = suffix.find('.'); // note could use split.
+    if (delim == suffix.npos) {
+        // on create with suffix == "0" for the first spatializer effect.
+
+        std::string headTrackingModes;
+        (void)item->get(AMEDIAMETRICS_PROP_HEADTRACKINGMODES, &headTrackingModes);
+
+        std::string levels;
+        (void)item->get(AMEDIAMETRICS_PROP_LEVELS, &levels);
+
+        std::string modes;
+        (void)item->get(AMEDIAMETRICS_PROP_MODES, &modes);
+
+        std::string channelMasks;
+        (void)item->get(AMEDIAMETRICS_PROP_CHANNELMASKS, &channelMasks);
+
+        LOG(LOG_LEVEL) << "key:" << key
+                << " headTrackingModes:" << headTrackingModes
+                << " levels:" << levels
+                << " modes:" << modes
+                << " channelMasks:" << channelMasks
+                ;
+
+        const std::vector<int32_t> headTrackingModesVector =
+                types::vectorFromMap(headTrackingModes, types::getHeadTrackingModeMap());
+        const std::vector<int32_t> levelsVector =
+                types::vectorFromMap(levels, types::getSpatializerLevelMap());
+        const std::vector<int32_t> modesVector =
+                types::vectorFromMap(modes, types::getSpatializerModeMap());
+        const std::vector<int64_t> channelMasksVector =
+                types::channelMaskVectorFromString(channelMasks);
+
+        const auto [ result, str ] = sendToStatsd(SpatializerCapabilitiesFields,
+                CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED)
+                , headTrackingModesVector
+                , levelsVector
+                , modesVector
+                , channelMasksVector
+                );
+
+        mAudioAnalytics.mStatsdLog->log(
+                stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED, str);
+
+        std::lock_guard lg(mLock);
+        if (mFirstCreateTimeNs == 0) {
+            // Only update the create time once to prevent audioserver restart
+            // from looking like a boot.
+            mFirstCreateTimeNs = item->getTimestamp();
+        }
+        mSimpleLog.log("%s suffix: %s item: %s",
+                __func__, suffix.c_str(), item->toString().c_str());
+    } else {
+        std::string subtype = suffix.substr(0, delim);
+        if (subtype != "device") return; // not a device.
+
+        const std::string deviceType = suffix.substr(std::size("device.") - 1);
+
+        const int32_t deviceTypeStatsd =
+                types::lookup<types::AUDIO_DEVICE_INFO_TYPE, int32_t>(deviceType);
+
+        std::string address;
+        (void)item->get(AMEDIAMETRICS_PROP_ADDRESS, &address);
+        std::string enabled;
+        (void)item->get(AMEDIAMETRICS_PROP_ENABLED, &enabled);
+        std::string hasHeadTracker;
+        (void)item->get(AMEDIAMETRICS_PROP_HASHEADTRACKER, &hasHeadTracker);
+        std::string headTrackerEnabled;
+        (void)item->get(AMEDIAMETRICS_PROP_HEADTRACKERENABLED, &headTrackerEnabled);
+
+        std::lock_guard lg(mLock);
+
+        // Validate from our cached state
+
+        // Our deviceKey takes the device type and appends the address if any.
+        // This distinguishes different wireless devices for the purposes of tracking.
+        std::string deviceKey(deviceType);
+        deviceKey.append("_").append(address);
+        DeviceState& deviceState = mDeviceStateMap[deviceKey];
+
+        // check whether the settings event is within a certain time of spatializer creation.
+        const bool withinBoot =
+                item->getTimestamp() - mFirstCreateTimeNs < kBootDurationThreshold;
+
+        if (!enabled.empty()) {
+            if (enabled != deviceState.enabled) {
+                const int32_t settingEventStatsd =
+                        classifySettingEvent(!deviceState.enabled.empty(), withinBoot);
+                deviceState.enabled = enabled;
+                const bool enabledStatsd = enabled == "true";
+                const auto [ result, str ] = sendToStatsd(SpatializerDeviceEnabledFields,
+                        CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED)
+                        , deviceTypeStatsd
+                        , settingEventStatsd
+                        , enabledStatsd
+                        );
+                mAudioAnalytics.mStatsdLog->log(
+                        stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED, str);
+            }
+        }
+        if (!hasHeadTracker.empty()) {
+            if (hasHeadTracker != deviceState.hasHeadTracker) {
+                const int32_t settingEventStatsd =
+                        classifySettingEvent(!deviceState.hasHeadTracker.empty(), withinBoot);
+                deviceState.hasHeadTracker = hasHeadTracker;
+                const bool supportedStatsd = hasHeadTracker == "true";
+                const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceSupportedFields,
+                        CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED)
+                        , deviceTypeStatsd
+                        , settingEventStatsd
+                        , supportedStatsd
+                        );
+                mAudioAnalytics.mStatsdLog->log(
+                        stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED, str);
+            }
+        }
+        if (!headTrackerEnabled.empty()) {
+            if (headTrackerEnabled != deviceState.headTrackerEnabled) {
+                const int32_t settingEventStatsd =
+                        classifySettingEvent(!deviceState.headTrackerEnabled.empty(), withinBoot);
+                deviceState.headTrackerEnabled = headTrackerEnabled;
+                const bool enabledStatsd = headTrackerEnabled == "true";
+                const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceEnabledFields,
+                        CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED)
+                        , deviceTypeStatsd
+                        , settingEventStatsd
+                        , enabledStatsd
+                        );
+                mAudioAnalytics.mStatsdLog->log(
+                        stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED, str);
+            }
+        }
+        mSimpleLog.log("%s deviceKey: %s item: %s",
+                __func__, deviceKey.c_str(), item->toString().c_str());
+    }
+}
+
+std::pair<std::string, int32_t> AudioAnalytics::Spatializer::dump(
+        int32_t lines, const char *prefix) const
+{
+    std::lock_guard lg(mLock);
+    std::string s = mSimpleLog.dumpToString(prefix == nullptr ? "" : prefix, lines);
+    size_t n = std::count(s.begin(), s.end(), '\n');
+    return { s, n };
+}
+
+void AudioAnalytics::MidiLogging::onEvent(
+        const std::shared_ptr<const android::mediametrics::Item> &item) const {
+    const std::string& key = item->getKey();
+
+    const auto uid = item->getUid();
+
+    int32_t deviceId = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_DEVICEID, &deviceId);
+
+    int32_t inputPortCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_INPUTPORTCOUNT, &inputPortCount);
+
+    int32_t outputPortCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_OUTPUTPORTCOUNT, &outputPortCount);
+
+    int32_t hardwareType = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_HARDWARETYPE, &hardwareType);
+
+    std::string isSharedString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_ISSHARED, &isSharedString);
+    const bool isShared = (isSharedString == "true");
+
+    std::string supportsMidiUmpString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SUPPORTSMIDIUMP, &supportsMidiUmpString);
+    const bool supportsMidiUmp = (supportsMidiUmpString == "true");
+
+    std::string usingAlsaString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_USINGALSA, &usingAlsaString);
+    const bool usingAlsa = (usingAlsaString == "true");
+
+    int64_t durationNs = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_DURATIONNS, &durationNs);
+
+    int32_t openedCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_OPENEDCOUNT, &openedCount);
+
+    int32_t closedCount = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_CLOSEDCOUNT, &closedCount);
+
+    std::string deviceDisconnectedString;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_DEVICEDISCONNECTED, &deviceDisconnectedString);
+    const bool deviceDisconnected = (deviceDisconnectedString == "true");
+
+    int32_t totalInputBytes = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_TOTALINPUTBYTES, &totalInputBytes);
+
+    int32_t totalOutputBytes = -1;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_TOTALOUTPUTBYTES, &totalOutputBytes);
+
+    LOG(LOG_LEVEL) << "key:" << key
+            << " uid:" << uid
+            << " id:" << deviceId
+            << " input_port_count:" << inputPortCount
+            << " output_port_count:" << outputPortCount
+            << " device_type:" << hardwareType
+            << " is_shared:" << isSharedString
+            << " supports_ump:" << supportsMidiUmpString
+            << " using_alsa:" << usingAlsaString
+            << " duration_opened_ms:" << durationNs
+            << " opened_count:" << openedCount
+            << " closed_count:" << closedCount
+            << " device_disconnected:" << deviceDisconnectedString
+            << " total_input_bytes:" << totalInputBytes
+            << " total_output_bytes:" << totalOutputBytes;
+
+    if (mAudioAnalytics.mDeliverStatistics) {
+        const auto result = sendToStatsd(
+                CONDITION(stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED)
+                , uid
+                , deviceId
+                , inputPortCount
+                , outputPortCount
+                , hardwareType
+                , isShared
+                , supportsMidiUmp
+                , usingAlsa
+                , durationNs
+                , openedCount
+                , closedCount
+                , deviceDisconnected
+                , totalInputBytes
+                , totalOutputBytes);
+        std::stringstream ss;
+        ss << "result:" << result;
+        const auto fieldsStr = printFields(MidiDeviceCloseFields,
+                CONDITION(stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED)
+                , uid
+                , deviceId
+                , inputPortCount
+                , outputPortCount
+                , hardwareType
+                , isShared
+                , supportsMidiUmp
+                , usingAlsa
+                , durationNs
+                , openedCount
+                , closedCount
+                , deviceDisconnected
+                , totalInputBytes
+                , totalOutputBytes);
+        ss << " " << fieldsStr;
+        std::string str = ss.str();
+        ALOGV("%s: statsd %s", __func__, str.c_str());
+        mAudioAnalytics.mStatsdLog->log(
+                stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED, str);
+    }
+}
+
+// This method currently suppresses the name.
+std::string AudioAnalytics::getDeviceNamesFromOutputDevices(std::string_view devices) const {
+    std::string deviceNames;
+    if (stringutils::hasBluetoothOutputDevice(devices)) {
+        deviceNames = SUPPRESSED;
+#if 0   // TODO(b/161554630) sanitize name
+        mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            "audio.device.bt_a2dp", AMEDIAMETRICS_PROP_NAME, &deviceNames);
+        // Remove | if present
+        stringutils::replace(deviceNames, "|", '?');
+        if (deviceNames.size() > STATSD_DEVICE_NAME_MAX_LENGTH) {
+            deviceNames.resize(STATSD_DEVICE_NAME_MAX_LENGTH); // truncate
+        }
+#endif
+    }
+    return deviceNames;
+}
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 5787e9e..630a436 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -26,7 +26,7 @@
 #include <string>
 #include <audio_utils/clock.h>
 #include <cutils/properties.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
 #include <sys/timerfd.h>
 #include <system/audio.h>
 
@@ -164,7 +164,7 @@
     const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
     const int32_t min_volume_duration_secs = (int32_t)(min_volume_duration_ns / NANOS_PER_SECOND);
     const int32_t max_volume_duration_secs = (int32_t)(max_volume_duration_ns / NANOS_PER_SECOND);
-    const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+    const int result = stats::media_metrics::stats_write(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED,
                                          audio_device,
                                          duration_secs,
                                          (float)volume,
@@ -177,7 +177,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audio_power_usage_data_reported:"
-            << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+            << stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED
             << " audio_device:" << audio_device
             << " duration_secs:" << duration_secs
             << " average_volume:" << (float)volume
@@ -187,7 +187,7 @@
             << " max_volume_duration_secs:" << max_volume_duration_secs
             << " max_volume:" << (float)max_volume
             << " }";
-    mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
+    mStatsdLog->log(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
 }
 
 void AudioPowerUsage::updateMinMaxVolumeAndDuration(
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 6e24a58..353ae12 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -18,7 +18,7 @@
 #include "MediaMetricsConstants.h"
 #include "StringUtils.h"
 #include <media/TypeConverter.h> // requires libmedia_helper to get the Audio code.
-#include <statslog.h>            // statsd
+#include <stats_media_metrics.h>            // statsd
 
 namespace android::mediametrics::types {
 
@@ -135,6 +135,94 @@
     return map;
 }
 
+// A map for the Java AudioDeviceInfo types to internal (native) output devices.
+const std::unordered_map<std::string, int32_t>& getAudioDeviceOutCompactMap() {
+    // DO NOT MODIFY VALUES (OK to add new ones).
+    static std::unordered_map<std::string, int32_t> map{
+        // should "unknown" go to AUDIO_DEVICE_NONE?
+        {"earpiece", AUDIO_DEVICE_OUT_EARPIECE},
+        {"speaker", AUDIO_DEVICE_OUT_SPEAKER},
+        {"headset", AUDIO_DEVICE_OUT_WIRED_HEADSET},
+        {"headphone", AUDIO_DEVICE_OUT_WIRED_HEADPHONE},
+        {"bt_sco", AUDIO_DEVICE_OUT_BLUETOOTH_SCO},
+        {"bt_sco_hs", AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET},
+        {"bt_sco_carkit", AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT},
+        {"bt_a2dp", AUDIO_DEVICE_OUT_BLUETOOTH_A2DP},
+        {"bt_a2dp_hp", AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES},
+        {"bt_a2dp_spk", AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER},
+        {"aux_digital", AUDIO_DEVICE_OUT_AUX_DIGITAL},
+        {"hdmi", AUDIO_DEVICE_OUT_HDMI},
+        {"analog_dock", AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET},
+        {"digital_dock", AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET},
+        {"usb_accessory", AUDIO_DEVICE_OUT_USB_ACCESSORY},
+        {"usb_device", AUDIO_DEVICE_OUT_USB_DEVICE},
+        {"remote_submix", AUDIO_DEVICE_OUT_REMOTE_SUBMIX},
+        {"telephony_tx", AUDIO_DEVICE_OUT_TELEPHONY_TX},
+        {"line", AUDIO_DEVICE_OUT_LINE},
+        {"hdmi_arc", AUDIO_DEVICE_OUT_HDMI_ARC},
+        {"hdmi_earc", AUDIO_DEVICE_OUT_HDMI_EARC},
+        {"spdif", AUDIO_DEVICE_OUT_SPDIF},
+        {"fm_transmitter", AUDIO_DEVICE_OUT_FM},
+        {"aux_line", AUDIO_DEVICE_OUT_AUX_LINE},
+        {"speaker_safe", AUDIO_DEVICE_OUT_SPEAKER_SAFE},
+        {"ip", AUDIO_DEVICE_OUT_IP},
+        {"bus", AUDIO_DEVICE_OUT_BUS},
+        {"proxy", AUDIO_DEVICE_OUT_PROXY},
+        {"usb_headset", AUDIO_DEVICE_OUT_USB_HEADSET},
+        {"hearing_aid_out", AUDIO_DEVICE_OUT_HEARING_AID},
+        {"echo_canceller", AUDIO_DEVICE_OUT_ECHO_CANCELLER},
+        // default does not exist
+        {"ble_headset", AUDIO_DEVICE_OUT_BLE_HEADSET},
+        {"ble_speaker", AUDIO_DEVICE_OUT_BLE_SPEAKER},
+        {"ble_broadcast", AUDIO_DEVICE_OUT_BLE_BROADCAST},
+    };
+    return map;
+}
+
+// A map for the Java AudioDeviceInfo types.
+// This uses generated statsd enums.proto constants.
+const std::unordered_map<std::string, int32_t>& getAudioDeviceInfoTypeMap() {
+    // DO NOT MODIFY VALUES (OK to add new ones).
+    static std::unordered_map<std::string, int32_t> map{
+        {"unknown", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN},
+        {"earpiece", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_EARPIECE},
+        {"speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER},
+        {"headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADSET},
+        {"headphone", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADPHONES}, // sic
+        {"bt_sco", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+        {"bt_sco_hs", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+        {"bt_sco_carkit", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+        {"bt_a2dp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+        {"bt_a2dp_hp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+        {"bt_a2dp_spk", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+        {"aux_digital", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+        {"hdmi", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+        {"analog_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+        {"digital_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+        {"usb_accessory", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_ACCESSORY},
+        {"usb_device", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_DEVICE},
+        {"usb_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_HEADSET},
+        {"remote_submix", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_REMOTE_SUBMIX},
+        {"telephony_tx", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_TELEPHONY},
+        {"line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_ANALOG},
+        {"hdmi_arc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_ARC},
+        {"hdmi_earc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_EARC},
+        {"spdif", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_DIGITAL},
+        {"fm_transmitter", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_FM},
+        {"aux_line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_AUX_LINE},
+        {"speaker_safe", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER_SAFE},
+        {"ip", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_IP},
+        {"bus", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUS},
+        {"proxy", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN /* AUDIO_DEVICE_INFO_TYPE_PROXY */},
+        {"hearing_aid_out", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HEARING_AID},
+        {"echo_canceller", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_ECHO_REFERENCE}, // sic
+        {"ble_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_HEADSET},
+        {"ble_speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_SPEAKER},
+        {"ble_broadcast", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_BROADCAST},
+    };
+    return map;
+}
+
 const std::unordered_map<std::string, int32_t>& getAudioThreadTypeMap() {
     // DO NOT MODIFY VALUES (OK to add new ones).
     // This may be found in frameworks/av/services/audioflinger/Threads.h
@@ -197,27 +285,62 @@
     return map;
 }
 
+const std::unordered_map<std::string, int32_t>& getHeadTrackingModeMap() {
+    // DO NOT MODIFY VALUES(OK to add new ones).
+    // frameworks/base/media/java/android/media/Spatializer.java
+    // frameworks/av/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl
+    static std::unordered_map<std::string, int32_t> map {
+        {"OTHER", 0},
+        {"DISABLED", -1},
+        {"RELATIVE_WORLD", 1},
+        {"RELATIVE_SCREEN", 2},
+    };
+    return map;
+}
+
+const std::unordered_map<std::string, int32_t>& getSpatializerLevelMap() {
+    // DO NOT MODIFY VALUES(OK to add new ones).
+    // frameworks/base/media/java/android/media/Spatializer.java
+    // frameworks/av/media/libaudioclient/aidl/android/media/SpatializerHeadTrackingMode.aidl
+    static std::unordered_map<std::string, int32_t> map {
+        {"NONE", 0},
+        {"SPATIALIZER_MULTICHANNEL", 1},
+        {"SPATIALIZER_MCHAN_BED_PLUS_OBJECTS", 2},
+    };
+    return map;
+}
+
+const std::unordered_map<std::string, int32_t>& getSpatializerModeMap() {
+    // DO NOT MODIFY VALUES(OK to add new ones).
+    // frameworks/av/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
+    static std::unordered_map<std::string, int32_t> map {
+        {"SPATIALIZER_BINAURAL", 0},
+        {"SPATIALIZER_TRANSAURAL", 1},
+    };
+    return map;
+}
+
 const std::unordered_map<std::string, int32_t>& getStatusMap() {
     // DO NOT MODIFY VALUES(OK to add new ones).
     static std::unordered_map<std::string, int32_t> map {
         {"",
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_OK,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_ARGUMENT,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_IO,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_MEMORY,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_SECURITY,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_STATE,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_TIMEOUT,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
         {AMEDIAMETRICS_PROP_STATUS_VALUE_UNKNOWN,
-            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
+            stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
     };
     return map;
 }
@@ -286,6 +409,35 @@
     return value;
 }
 
+std::vector<int32_t> vectorFromMap(
+        const std::string &str, const std::unordered_map<std::string, int32_t>& map)
+{
+    std::vector<int32_t> v;
+
+    if (str.empty()) return v;
+
+    const auto result = stringutils::split(str, "|");
+    for (const auto &s : result) {
+        auto it = map.find(s);
+        if (it == map.end()) continue;
+        v.push_back(it->second);
+    }
+    return v;
+}
+
+std::vector<int64_t> channelMaskVectorFromString(const std::string &s)
+{
+    std::vector<int64_t> v;
+
+    const auto result = stringutils::split(s, "|");
+    for (const auto &mask : result) {
+        // 0 if undetected or if actually 0.
+        int64_t int64Mask = strtoll(mask.c_str(), nullptr, 0);
+        v.push_back(int64Mask);
+    }
+    return v;
+}
+
 template <>
 int32_t lookup<CONTENT_TYPE>(const std::string &contentType)
 {
@@ -441,6 +593,17 @@
 }
 
 template <>
+int32_t lookup<AUDIO_DEVICE_INFO_TYPE>(const std::string& audioDeviceInfoType)
+{
+    auto& map = getAudioDeviceInfoTypeMap();
+    auto it = map.find(audioDeviceInfoType);
+    if (it == map.end()) {
+        return 0;
+    }
+    return it->second;
+}
+
+template <>
 int32_t lookup<CALLER_NAME>(const std::string &callerName)
 {
     auto& map = getAudioCallerNameMap();
@@ -463,12 +626,45 @@
 }
 
 template <>
+int32_t lookup<HEAD_TRACKING_MODE>(const std::string& headTrackingMode)
+{
+    auto& map = getHeadTrackingModeMap();
+    auto it = map.find(headTrackingMode);
+    if (it == map.end()) {
+        return 0;
+    }
+    return it->second;
+}
+
+template <>
+int32_t lookup<SPATIALIZER_LEVEL>(const std::string& spatializerLevel)
+{
+    auto& map = getSpatializerLevelMap();
+    auto it = map.find(spatializerLevel);
+    if (it == map.end()) {
+        return 0;
+    }
+    return it->second;
+}
+
+template <>
+int32_t lookup<SPATIALIZER_MODE>(const std::string& spatializerMode)
+{
+    auto& map = getSpatializerModeMap();
+    auto it = map.find(spatializerMode);
+    if (it == map.end()) {
+        return 0;
+    }
+    return it->second;
+}
+
+template <>
 int32_t lookup<STATUS>(const std::string &status)
 {
     auto& map = getStatusMap();
     auto it = map.find(status);
     if (it == map.end()) {
-        return util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
+        return stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
     }
     return it->second;
 }
diff --git a/services/mediametrics/MediaDrmStatsdHelper.cpp b/services/mediametrics/MediaDrmStatsdHelper.cpp
new file mode 100644
index 0000000..d762672
--- /dev/null
+++ b/services/mediametrics/MediaDrmStatsdHelper.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MediaDrmStatsdHelper.h"
+#include <tuple>
+#include <map>
+#include <unordered_map>
+
+namespace {
+
+struct UUID {
+    uint64_t msb, lsb; // NOLINT(misc-non-private-member-variables-in-classes)
+    bool operator < (const UUID& that) const {
+        return std::tie(msb, lsb) < std::tie(that.msb, that.lsb);
+    }
+};
+
+// KEEP IN SYNC WITH frameworks/proto_logging/stats/enums/media/drm/enums.proto
+std::map<UUID, int32_t> const kUuidSchemeEnumMap {
+    {{.msb = 0x6DD8B3C345F44A68, .lsb = 0xBF3A64168D01A4A6}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__ABV_MODRM             }, // ABV_MODRM
+    {{.msb = 0xF239E769EFA34850, .lsb = 0x9C16A903C6932EFB}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__ADOBE_PRIMETIME       }, // ADOBE_PRIMETIME
+    {{.msb = 0x616C746963617374, .lsb = 0x2D50726F74656374}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__ALTICAST              }, // ALTICAST
+    {{.msb = 0x94CE86FB07FF4F43, .lsb = 0xADB893D2FA968CA2}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__APPLE_FAIRPLAY        }, // APPLE_FAIRPLAY
+    {{.msb = 0x279FE473512C48FE, .lsb = 0xADE8D176FEE6B40F}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__ARRIS_TITANIUM        }, // ARRIS_TITANIUM
+    {{.msb = 0x3D5E6D359B9A41E8, .lsb = 0xB843DD3C6E72C42C}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__CHINADRM              }, // CHINADRM
+    {{.msb = 0x3EA8778F77424BF9, .lsb = 0xB18BE834B2ACBD47}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__CLEAR_KEY_AES_128     }, // CLEAR_KEY_AES_128
+    {{.msb = 0xBE58615B19C44684, .lsb = 0x88B3C8C57E99E957}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__CLEAR_KEY_SAMPLE_AES  }, // CLEAR_KEY_SAMPLE_AES
+    {{.msb = 0xE2719D58A985B3C9, .lsb = 0x781AB030AF78D30E}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__CLEAR_KEY_DASH_IF     }, // CLEAR_KEY_DASH_IF
+    {{.msb = 0x644FE7B5260F4FAD, .lsb = 0x949A0762FFB054B4}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__CMLA_OMA              }, // CMLA_OMA
+    {{.msb = 0x37C332587B994C7E, .lsb = 0xB15D19AF74482154}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__COMMSCOPE_TITANIUM    }, // COMMSCOPE_TITANIUM
+    {{.msb = 0x45D481CB8FE049C0, .lsb = 0xADA9AB2D2455B2F2}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__CORECRYPT             }, // CORECRYPT
+    {{.msb = 0xDCF4E3E362F15818, .lsb = 0x7BA60A6FE33FF3DD}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__DIGICAP_SMARTXESS     }, // DIGICAP_SMARTXESS
+    {{.msb = 0x35BF197B530E42D7, .lsb = 0x8B651B4BF415070F}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__DIVX                  }, // DIVX
+    {{.msb = 0x80A6BE7E14484C37, .lsb = 0x9E70D5AEBE04C8D2}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__IRDETO                }, // IRDETO
+    {{.msb = 0x5E629AF538DA4063, .lsb = 0x897797FFBD9902D4}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__MARLIN                }, // MARLIN
+    {{.msb = 0x9A04F07998404286, .lsb = 0xAB92E65BE0885F95}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__MICROSOFT_PLAYREADY   }, // MICROSOFT_PLAYREADY
+    {{.msb = 0x6A99532D869F5922, .lsb = 0x9A91113AB7B1E2F3}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__MOBITV                }, // MOBITV
+    {{.msb = 0xADB41C242DBF4A6D, .lsb = 0x958B4457C0D27B95}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__NAGRA_MEDIAACCESS     }, // NAGRA_MEDIAACCESS
+    {{.msb = 0x1F83E1E86EE94F0D, .lsb = 0xBA2F5EC4E3ED1A66}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__SECUREMEDIA           }, // SECUREMEDIA
+    {{.msb = 0x992C46E6C4374899, .lsb = 0xB6A050FA91AD0E39}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__SECUREMEDIA_STEELKNOT }, // SECUREMEDIA_STEELKNOT
+    {{.msb = 0xA68129D3575B4F1A, .lsb = 0x9CBA3223846CF7C3}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__SYNAMEDIA_VIDEOGUARD  }, // SYNAMEDIA_VIDEOGUARD
+    {{.msb = 0xAA11967FCC014A4A, .lsb = 0x8E99C5D3DDDFEA2D}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__UNITEND_UDRM          }, // UNITEND_UDRM
+    {{.msb = 0x9A27DD82FDE24725, .lsb = 0x8CBC4234AA06EC09}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__VERIMATRIX_VCAS       }, // VERIMATRIX_VCAS
+    {{.msb = 0xB4413586C58CFFB0, .lsb = 0x94A5D4896C1AF6C3}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__VIACCESS_ORCA         }, // VIACCESS_ORCA
+    {{.msb = 0x793B79569F944946, .lsb = 0xA94223E7EF7E44B4}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__VISIONCRYPT           }, // VISIONCRYPT
+    {{.msb = 0x1077EFECC0B24D02, .lsb = 0xACE33C1E52E2FB4B}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__W3C_COMMON            }, // W3C_COMMON
+    {{.msb = 0xEDEF8BA979D64ACE, .lsb = 0xA3C827DCD51D21ED}, android::stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__WIDEVINE              }, // WIDEVINE
+};
+
+// KEEP IN SYNC WITH frameworks/av/drm/libmediadrm/include/mediadrm/IDrm.h
+// KEEP IN SYNC WITH frameworks/proto_logging/stats/enums/media/drm/enums.proto
+std::unordered_map<std::string, int32_t> const kDrmApiEnumMap {
+    {"initCheck"                  , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_INIT_CHECK                      },
+    {"isCryptoSchemeSupported"    , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_IS_CRYPTO_SCHEME_SUPPORTED      },
+    {"createPlugin"               , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_CREATE_PLUGIN                   },
+    {"destroyPlugin"              , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_DESTROY_PLUGIN                  },
+    {"openSession"                , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_OPEN_SESSION                    },
+    {"closeSession"               , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_CLOSE_SESSION                   },
+    {"getKeyRequest"              , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_KEY_REQUEST                 },
+    {"provideKeyResponse"         , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_PROVIDE_KEY_RESPONSE            },
+    {"removeKeys"                 , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_REMOVE_KEYS                     },
+    {"restoreKeys"                , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_RESTORE_KEYS                    },
+    {"queryKeyStatus"             , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_QUERY_KEY_STATUS                },
+    {"getProvisionRequest"        , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_PROVISION_REQUEST           },
+    {"provideProvisionResponse"   , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_PROVIDE_PROVISION_RESPONSE      },
+    {"getSecureStops"             , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_SECURE_STOPS                },
+    {"getSecureStopIds"           , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_SECURE_STOP_IDS             },
+    {"getSecureStop"              , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_SECURE_STOP                 },
+    {"releaseSecureStops"         , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_RELEASE_SECURE_STOPS            },
+    {"removeSecureStop"           , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_REMOVE_SECURE_STOP              },
+    {"removeAllSecureStops"       , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_REMOVE_ALL_SECURE_STOPS         },
+    {"getHdcpLevels"              , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_HDCP_LEVELS                 },
+    {"getNumberOfSessions"        , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_NUMBER_OF_SESSIONS          },
+    {"getSecurityLevel"           , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_SECURITY_LEVEL              },
+    {"getOfflineLicenseKeySetIds" , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_OFFLINE_LICENSE_KEY_SET_IDS },
+    {"removeOfflineLicense"       , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_REMOVE_OFFLINE_LICENSE          },
+    {"getOfflineLicenseState"     , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_OFFLINE_LICENSE_STATE       },
+    {"getPropertyString"          , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_PROPERTY_STRING             },
+    {"getPropertyByteArray"       , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_PROPERTY_BYTE_ARRAY         },
+    {"setPropertyString"          , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_SET_PROPERTY_STRING             },
+    {"setPropertyByteArray"       , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_SET_PROPERTY_BYTE_ARRAY         },
+    {"getMetrics"                 , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_METRICS                     },
+    {"setCipherAlgorithm"         , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_SET_CIPHER_ALGORITHM            },
+    {"setMacAlgorithm"            , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_SET_MAC_ALGORITHM               },
+    {"encrypt"                    , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GENERIC_ENCRYPT                 },
+    {"decrypt"                    , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GENERIC_DECRYPT                 },
+    {"sign"                       , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GENERIC_SIGN                    },
+    {"verify"                     , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GENERIC_VERIFY                  },
+    {"signRSA"                    , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_SIGN_RSA                        },
+    {"setListener"                , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_SET_LISTENER                    },
+    {"requiresSecureDecoder"      , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_REQUIRES_SECURE_DECODER         },
+    {"requiresSecureDecoderLevel" , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_REQUIRES_SECURE_DECODER_LEVEL   },
+    {"setPlaybackId"              , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_SET_PLAYBACK_ID                 },
+    {"getLogMessages"             , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_LOG_MESSAGES                },
+    {"getSupportedSchemes"        , android::stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_GET_SUPPORTED_SCHEMES           },
+};
+
+} // anonymous namespace
+
+namespace android {
+
+int32_t MediaDrmStatsdHelper::findDrmScheme(const int64_t msb, const int64_t lsb) {
+    auto it = kUuidSchemeEnumMap.find({.msb = static_cast<uint64_t>(msb), .lsb = static_cast<uint64_t>(lsb)});
+    if (it == kUuidSchemeEnumMap.end()) return stats::media_metrics::MEDIA_DRM_SESSION_OPENED__SCHEME__DRM_SCHEME_OTHER;
+    return it->second;
+}
+
+int32_t MediaDrmStatsdHelper::findDrmApi(const std::string& api) {
+    auto it = kDrmApiEnumMap.find(api);
+    if (it == kDrmApiEnumMap.end()) return stats::media_metrics::MEDIA_DRM_ERRORED__API__DRM_API_UNKNOWN;
+    return it->second;
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index ff16b9e..af1372b 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -33,7 +33,7 @@
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <memunreachable/memunreachable.h>
 #include <private/android_filesystem_config.h> // UID
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include <set>
 
@@ -72,6 +72,7 @@
 bool MediaMetricsService::useUidForPackage(
         const std::string& package, const std::string& installer)
 {
+    // NOLINTBEGIN(bugprone-branch-clone)
     if (strchr(package.c_str(), '.') == nullptr) {
         return false;  // not of form 'com.whatever...'; assume internal and ok
     } else if (strncmp(package.c_str(), "android.", 8) == 0) {
@@ -85,6 +86,7 @@
     } else {
         return true;  // we're not sure where it came from, use uid only.
     }
+    // NOLINTEND(bugprone-branch-clone)
 }
 
 /* static */
@@ -336,6 +338,15 @@
                 result << "-- some lines may be truncated --\n";
             }
 
+            const int32_t spatializerLinesToDump = all ? INT32_MAX : 15;
+            result << "\nSpatializer Message Log:";
+            const auto [ spatializerDumpString, spatializerLines ] =
+                    mAudioAnalytics.dumpSpatializer(spatializerLinesToDump);
+            result << "\n" << spatializerDumpString;
+            if (spatializerLines == spatializerLinesToDump) {
+                result << "-- some lines may be truncated --\n";
+            }
+
             result << "\nLogSessionId:\n"
                    << mediametrics::ValidateId::get()->dump();
 
@@ -500,6 +511,8 @@
     const std::string &key = item->getKey();
     if (startsWith(key, "audio.")) return true;
     if (startsWith(key, "drm.vendor.")) return true;
+    if (startsWith(key, "mediadrm.")) return true;
+
     // the list of allowedKey uses statsd_handlers
     // in iface_statsd.cpp as reference
     // drmmanager is from a trusted uid, therefore not needed here
@@ -511,6 +524,8 @@
                                      "audiotrack",
                                      // other media
                                      "codec",
+                                     "freeze",
+                                     "judder",
                                      "extractor",
                                      "mediadrm",
                                      "mediaparser",
@@ -535,7 +550,7 @@
     if (mStatsdRegistered.test_and_set()) {
         return;
     }
-    auto tag = android::util::MEDIA_DRM_ACTIVITY_INFO;
+    auto tag = stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO;
     auto cb = MediaMetricsService::pullAtomCallback;
     AStatsManager_setPullAtomCallback(tag, /* metadata */ nullptr, cb, this);
 }
@@ -553,7 +568,7 @@
 std::string MediaMetricsService::atomTagToKey(int32_t atomTag)
 {
     switch (atomTag) {
-    case android::util::MEDIA_DRM_ACTIVITY_INFO:
+    case stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO:
         return "mediadrm";
     }
     return {};
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index 50525bc..5766f1c 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -15,11 +15,15 @@
  */
 
 //#define LOG_NDEBUG 0
-#define LOG_TAG "MediaMetricsService::stringutils"
+#define LOG_TAG "mediametrics::stringutils"
 #include <utils/Log.h>
 
 #include "StringUtils.h"
 
+#include <charconv>
+
+#include "AudioTypes.h"
+
 namespace android::mediametrics::stringutils {
 
 std::string tokenizer(std::string::const_iterator& it,
@@ -52,6 +56,26 @@
     }
 }
 
+bool parseVector(const std::string &str, std::vector<int32_t> *vector) {
+    std::vector<int32_t> values;
+    const char *p = str.c_str();
+    const char *last = p + str.size();
+    while (p != last) {
+        if (*p == ',' || *p == '{' || *p == '}') {
+            p++;
+        }
+        int32_t value = -1;
+        auto [ptr, error] = std::from_chars(p, last, value);
+        if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+            return false;
+        }
+        p = ptr;
+        values.push_back(value);
+    }
+    *vector = std::move(values);
+    return true;
+}
+
 std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string& devices)
 {
     std::vector<std::pair<std::string, std::string>> result;
@@ -99,4 +123,30 @@
     return replaced;
 }
 
+template <types::AudioEnumCategory CATEGORY>
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseDevicePairs(const std::string& devicePairs) {
+    std::pair<std::string, std::string> result{};
+    const auto devaddrvec = stringutils::getDeviceAddressPairs(devicePairs);
+    for (const auto& [device, addr] : devaddrvec) { // addr ignored for now.
+        if (!result.second.empty()) {
+            result.second.append("|"); // delimit devices with '|'.
+            result.first.append("|");
+        }
+        result.second.append(device);
+        result.first.append(types::lookup<CATEGORY, std::string>(device));
+    }
+    return result;
+}
+
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseOutputDevicePairs(const std::string& devicePairs) {
+    return parseDevicePairs<types::OUTPUT_DEVICE>(devicePairs);
+}
+
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseInputDevicePairs(const std::string& devicePairs) {
+    return parseDevicePairs<types::INPUT_DEVICE>(devicePairs);
+}
+
 } // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 84d494e..20a6378 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -51,7 +51,7 @@
         "libprotobuf-cpp-lite",
         "libstagefright",
         "libstagefright_foundation",
-        "libstatslog",
+        "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
         "libutils",
@@ -68,5 +68,13 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediametricsservice",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index 776f878..7f4e6e8 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -37,8 +37,6 @@
 #include "MediaMetricsService.h"
 #include "iface_statsd.h"
 
-#include <statslog.h>
-
 namespace android {
 
 // set of routines that crack a mediametrics::Item
@@ -85,6 +83,9 @@
         { "drmmanager", statsd_drmmanager },
         { "extractor", statsd_extractor },
         { "mediadrm", statsd_mediadrm },
+        { "mediadrm.created", statsd_mediadrm_created },
+        { "mediadrm.errored", statsd_mediadrm_errored },
+        { "mediadrm.session_opened", statsd_mediadrm_session_opened },
         { "mediaparser", statsd_mediaparser },
         { "nuplayer", statsd_nuplayer },
         { "nuplayer2", statsd_nuplayer },
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index 5ee8c30..f0a4ac8 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -92,6 +92,15 @@
         return mHealth.dump(lines);
     }
 
+    /**
+     * Returns a pair consisting of the dump string and the number of lines in the string.
+     *
+     * Spatializer dump.
+     */
+    std::pair<std::string, int32_t> dumpSpatializer(int32_t lines = INT32_MAX) const {
+        return mSpatializer.dump(lines);
+    }
+
     void clear() {
         // underlying state is locked.
         mPreviousAnalyticsState->clear();
@@ -152,6 +161,13 @@
      */
     std::string getThreadFromTrack(const std::string& track) const;
 
+    /**
+     * return the device name, if present.
+     *
+     * This is currently enabled only for Bluetooth output devices.
+     */
+    std::string getDeviceNamesFromOutputDevices(std::string_view devices) const;
+
     const bool mDeliverStatistics;
 
     // Actions is individually locked
@@ -317,6 +333,50 @@
         SimpleLog mSimpleLog GUARDED_BY(mLock) {64};
     } mHealth{*this};
 
+    // Spatializer is a nested class that tracks related messages.
+    class Spatializer {
+    public:
+        explicit Spatializer(AudioAnalytics &audioAnalytics)
+            : mAudioAnalytics(audioAnalytics) {}
+
+        // an item that starts with "audio.spatializer"
+        void onEvent(const std::shared_ptr<const android::mediametrics::Item> &item);
+
+        std::pair<std::string, int32_t> dump(
+                int32_t lines = INT32_MAX, const char *prefix = nullptr) const;
+
+    private:
+
+        // Current device state as strings:
+        // "" means unknown, "true" or "false".
+        struct DeviceState {
+            std::string enabled;
+            std::string hasHeadTracker;
+            std::string headTrackerEnabled;
+        };
+
+        AudioAnalytics& mAudioAnalytics;
+        static constexpr int64_t kBootDurationThreshold = 120 /* seconds */ * 1e9;
+        mutable std::mutex mLock;
+        int64_t mFirstCreateTimeNs GUARDED_BY(mLock) = 0;
+        std::map<std::string, DeviceState> mDeviceStateMap GUARDED_BY(mLock);
+        SimpleLog mSimpleLog GUARDED_BY(mLock) {64};
+    } mSpatializer{*this};
+
+    // MidiLogging collects info whenever a MIDI device is closed.
+    class MidiLogging {
+    public:
+        explicit MidiLogging(AudioAnalytics &audioAnalytics)
+            : mAudioAnalytics(audioAnalytics) {}
+
+        void onEvent(
+                const std::shared_ptr<const android::mediametrics::Item> &item) const;
+
+    private:
+
+        AudioAnalytics &mAudioAnalytics;
+    } mMidiLogging{*this};
+
     AudioPowerUsage mAudioPowerUsage;
 };
 
diff --git a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
index b7215e6..6e5a5cf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
+++ b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
@@ -50,7 +50,7 @@
      */
     std::pair<std::string, int32_t> dump(int32_t lines = INT32_MAX) const;
 
-    // align with message AudioUsageDataReported in frameworks/base/cmds/statsd/src/atoms.proto
+    // align with message AudioPowerUsageDataReported in frameworks/proto_logging/stats/atoms.proto
     enum AudioType {
         UNKNOWN_TYPE = 0,
         VOICE_CALL_TYPE = 1,            // voice call
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index 5dbff9b..b5fe28b 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -29,6 +29,14 @@
 const std::unordered_map<std::string, int64_t>& getAudioDeviceOutMap();
 const std::unordered_map<std::string, int32_t>& getAudioThreadTypeMap();
 const std::unordered_map<std::string, int32_t>& getAudioTrackTraitsMap();
+const std::unordered_map<std::string, int32_t>& getHeadTrackingModeMap();
+const std::unordered_map<std::string, int32_t>& getSpatializerLevelMap();
+const std::unordered_map<std::string, int32_t>& getSpatializerModeMap();
+
+std::vector<int32_t> vectorFromMap(
+        const std::string &str, const std::unordered_map<std::string, int32_t>& map);
+
+std::vector<int64_t> channelMaskVectorFromString(const std::string &s);
 
 // Enumeration for the device connection results.
 enum DeviceConnectionResult : int32_t {
@@ -47,14 +55,18 @@
     AAUDIO_DIRECTION,
     AAUDIO_PERFORMANCE_MODE,
     AAUDIO_SHARING_MODE,
+    AUDIO_DEVICE_INFO_TYPE,
     CALLER_NAME,
     CONTENT_TYPE,
     ENCODING,
+    HEAD_TRACKING_MODE,
     INPUT_DEVICE,  // int64_t
     INPUT_FLAG,
     OUTPUT_DEVICE, // int64_t
     OUTPUT_FLAG,
     SOURCE_TYPE,
+    SPATIALIZER_LEVEL,
+    SPATIALIZER_MODE,
     STATUS,
     STREAM_TYPE,
     THREAD_TYPE,
diff --git a/services/mediametrics/include/mediametricsservice/MediaDrmStatsdHelper.h b/services/mediametrics/include/mediametricsservice/MediaDrmStatsdHelper.h
new file mode 100644
index 0000000..2e5e7ff
--- /dev/null
+++ b/services/mediametrics/include/mediametricsservice/MediaDrmStatsdHelper.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_DRM_STATSD_HELPER_H
+#define MEDIA_DRM_STATSD_HELPER_H
+
+#include <cstdint>
+#include <stats_media_metrics.h>
+#include <string>
+namespace android {
+
+class MediaDrmStatsdHelper {
+public:
+    static int32_t findDrmScheme(const int64_t msb, const int64_t lsb);
+    static int32_t findDrmApi(const std::string& api);
+};
+
+} // namespace android
+#endif  // MEDIA_DRM_STATSD_HELPER_H
\ No newline at end of file
diff --git a/services/mediametrics/include/mediametricsservice/MediaMetricsService.h b/services/mediametrics/include/mediametricsservice/MediaMetricsService.h
index 8d0b1cf..3ec5ac7 100644
--- a/services/mediametrics/include/mediametricsservice/MediaMetricsService.h
+++ b/services/mediametrics/include/mediametricsservice/MediaMetricsService.h
@@ -125,7 +125,7 @@
     std::atomic<int64_t> mItemsSubmitted{}; // accessed outside of lock.
 
     // mStatsdLog is locked internally (thread-safe) and shows the last atoms logged
-    static constexpr size_t STATSD_LOG_LINES_MAX = 30; // recent log lines to keep
+    static constexpr size_t STATSD_LOG_LINES_MAX = 48; // recent log lines to keep
     static constexpr size_t STATSD_LOG_LINES_DUMP = 4; // normal amount of lines to dump
     const std::shared_ptr<mediametrics::StatsdLog> mStatsdLog{
             std::make_shared<mediametrics::StatsdLog>(STATSD_LOG_LINES_MAX)};
diff --git a/services/mediametrics/include/mediametricsservice/StatsdLog.h b/services/mediametrics/include/mediametricsservice/StatsdLog.h
index e207bac..5d5009e 100644
--- a/services/mediametrics/include/mediametricsservice/StatsdLog.h
+++ b/services/mediametrics/include/mediametricsservice/StatsdLog.h
@@ -16,11 +16,13 @@
 
 #pragma once
 
-#include <audio_utils/SimpleLog.h>
 #include <map>
 #include <mutex>
 #include <sstream>
 
+#include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
+
 namespace android::mediametrics {
 
 class StatsdLog {
@@ -61,9 +63,9 @@
    }
 
 private:
+    mutable std::mutex mLock;
     SimpleLog mSimpleLog; // internally locked
     std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
-    mutable std::mutex mLock;
 };
 
 } // namespace android::mediametrics
diff --git a/services/mediametrics/include/mediametricsservice/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
index a56f5b8..ed2cf2e 100644
--- a/services/mediametrics/include/mediametricsservice/StringUtils.h
+++ b/services/mediametrics/include/mediametricsservice/StringUtils.h
@@ -23,6 +23,19 @@
 
 namespace android::mediametrics::stringutils {
 
+// Define a way of printing a vector - this
+// is used for proto repeated arguments.
+template <typename T>
+inline std::ostream & operator<< (std::ostream& s,
+                           std::vector<T> const& v) {
+    s << "{ ";
+    for (const auto& e : v) {
+        s << e << " ";
+    }
+    s << "}";
+    return s;
+}
+
 /**
  * fieldPrint is a helper method that logs to a stringstream a sequence of
  * field names (in a fixed size array) together with a variable number of arg parameters.
@@ -59,6 +72,12 @@
 std::vector<std::string> split(const std::string& flags, const char *delim);
 
 /**
+ * Parses a vector of integers using ',' '{' and '}' as delimeters. Leaves
+ * vector unmodified if the parsing fails.
+ */
+bool parseVector(const std::string &str, std::vector<int32_t> *vector);
+
+/**
  * Parse the devices string and return a vector of device address pairs.
  *
  * A failure to parse returns early with the contents that were able to be parsed.
@@ -204,4 +223,14 @@
     return { key, "" };
 }
 
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseOutputDevicePairs(const std::string& outputDevicePairs);
+
+std::pair<std::string /* external statsd */, std::string /* internal */>
+parseInputDevicePairs(const std::string& inputDevicePairs);
+
+inline bool hasBluetoothOutputDevice(std::string_view devices) {
+    return devices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos;
+}
+
 } // namespace android::mediametrics::stringutils
diff --git a/services/mediametrics/include/mediametricsservice/TimeMachine.h b/services/mediametrics/include/mediametricsservice/TimeMachine.h
index ce579b3..1445c7c 100644
--- a/services/mediametrics/include/mediametricsservice/TimeMachine.h
+++ b/services/mediametrics/include/mediametricsservice/TimeMachine.h
@@ -143,6 +143,7 @@
             if (mPropertyMap.size() >= kKeyMaxProperties &&
                     !mPropertyMap.count(property)) {
                 ALOGV("%s: too many properties, rejecting %s", __func__, property.c_str());
+                mRejectedPropertiesCount++;
                 return;
             }
             auto& timeSequence = mPropertyMap[property];
@@ -172,6 +173,10 @@
                     ss << s;
                 }
             }
+            if (ll > 0 && mRejectedPropertiesCount > 0) {
+                ss << "Rejected properties: " << mRejectedPropertiesCount << "\n";
+                ll--;
+            }
             return { ss.str(), lines - ll };
         }
 
@@ -214,6 +219,7 @@
         const uid_t mAllowUid;
         const int64_t mCreationTime;
 
+        unsigned int mRejectedPropertiesCount = 0;
         int64_t mLastModificationTime;
         std::map<std::string /* property */, PropertyHistory> mPropertyMap;
     };
@@ -221,7 +227,7 @@
     using History = std::map<std::string /* key */, std::shared_ptr<KeyHistory>>;
 
     static inline constexpr size_t kTimeSequenceMaxElements = 50;
-    static inline constexpr size_t kKeyMaxProperties = 50;
+    static inline constexpr size_t kKeyMaxProperties = 128;
     static inline constexpr size_t kKeyLowWaterMark = 400;
     static inline constexpr size_t kKeyHighWaterMark = 500;
 
diff --git a/services/mediametrics/include/mediametricsservice/iface_statsd.h b/services/mediametrics/include/mediametricsservice/iface_statsd.h
index c2a8b3c..34d71ba 100644
--- a/services/mediametrics/include/mediametricsservice/iface_statsd.h
+++ b/services/mediametrics/include/mediametricsservice/iface_statsd.h
@@ -15,7 +15,9 @@
  */
 
 #include <memory>
+
 #include <stats_event.h>
+#include <StatsdLog.h>
 
 namespace android {
 namespace mediametrics {
@@ -30,13 +32,15 @@
 extern statsd_pusher statsd_audiothread;
 extern statsd_pusher statsd_audiotrack;
 extern statsd_pusher statsd_codec;
+extern statsd_pusher statsd_drmmanager;
 extern statsd_pusher statsd_extractor;
+extern statsd_pusher statsd_mediadrm;
+extern statsd_pusher statsd_mediadrm_created;
+extern statsd_pusher statsd_mediadrm_errored;
+extern statsd_pusher statsd_mediadrm_session_opened;
 extern statsd_pusher statsd_mediaparser;
-
 extern statsd_pusher statsd_nuplayer;
 extern statsd_pusher statsd_recorder;
-extern statsd_pusher statsd_mediadrm;
-extern statsd_pusher statsd_drmmanager;
 
 using statsd_puller = bool (const std::shared_ptr<const mediametrics::Item>& item,
         AStatsEventList *, const std::shared_ptr<mediametrics::StatsdLog>& statsdLog);
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 3d9376e..9a9bc1d 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -107,15 +107,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiopolicy_reported:"
-            << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -131,7 +132,7 @@
             << " active_session:" << active_session
             << " active_device:" << active_device
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index a7b045e..63c61ec 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -99,16 +99,14 @@
     }
 
     int32_t error_code = -1;
-    if (item->getInt32("android.media.audiorecord.errcode", &error_code)) {
-        metrics_proto.set_error_code(error_code);
-    } else if (item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
+    if (item->getInt32("android.media.audiorecord.errcode", &error_code) ||
+        item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
         metrics_proto.set_error_code(error_code);
     }
 
     std::string error_function;
-    if (item->getString("android.media.audiorecord.errfunc", &error_function)) {
-        metrics_proto.set_error_function(error_function);
-    } else if (item->getString("android.media.audiorecord.lastError.at", &error_function)) {
+    if (item->getString("android.media.audiorecord.errfunc", &error_function) ||
+        item->getString("android.media.audiorecord.lastError.at", &error_function)) {
         metrics_proto.set_error_function(error_function);
     }
 
@@ -149,8 +147,9 @@
     (void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
     const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized,
@@ -158,7 +157,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiorecord_reported:"
-            << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -183,7 +182,7 @@
 
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index e9b6dd6..3056605 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -188,15 +188,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiothread_reported:"
-            << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -231,7 +232,7 @@
             << " latency_mean_millis:" << latency_mean_millis
             << " latency_stddev_millis:" << latency_stddev_millis
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 67514e9..1fc7fb4 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -134,8 +134,9 @@
     (void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
     const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+                               stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED,
                                timestamp_nanos, package_name.c_str(), package_version_code,
                                media_apex_version,
                                bf_serialized,
@@ -143,7 +144,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_audiotrack_reported:"
-            << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -164,7 +165,7 @@
 
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index a737ba0..ea76bcd 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -23,42 +23,178 @@
 #include <pthread.h>
 #include <pwd.h>
 #include <stdint.h>
+#include <string>
 #include <string.h>
 #include <sys/stat.h>
 #include <sys/time.h>
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 #include <stats_event.h>
 
-#include "cleaner.h"
-#include "MediaMetricsService.h"
-#include "ValidateId.h"
-#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
-#include "iface_statsd.h"
+#include <frameworks/proto_logging/stats/message/mediametrics_message.pb.h>
+#include <mediametricsservice/cleaner.h>
+#include <mediametricsservice/iface_statsd.h>
+#include <mediametricsservice/MediaMetricsService.h>
+#include <mediametricsservice/StringUtils.h>
+#include <mediametricsservice/ValidateId.h>
 
 namespace android {
 
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_RENDERED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+
+static const int BITRATE_UNKNOWN =
+        stats::media_metrics::MEDIA_CODEC_RENDERED__BITRATE__BITRATE_UNKNOWN;
+
+static const std::pair<char const *, int> CODEC_LOOKUP[] = {
+    { "avc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+    { "h264", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+    { "hevc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+    { "h265", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+    { "vp8", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP8 },
+    { "vp9", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP9 },
+    { "av1", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+    { "av01", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+    { "dolby-vision", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+};
+
+static const int32_t RESOLUTION_LOOKUP[] = {
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_MAX_SIZE,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_32K,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_16K,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_4K_UHD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1440X2560,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2400,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2340,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD_ALMOST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_576X1024,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_540X960,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X854,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X640,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_360X640,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_352X640,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_VERY_LOW,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_SMALLEST,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO,
+};
+
+static const int32_t FRAMERATE_LOOKUP[] = {
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_25,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_30,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_50,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_60,
+    stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_120,
+};
+
+static int32_t getMetricsCodecEnum(const std::string &mime, const std::string &componentName) {
+    for (const auto & codecStrAndEnum : CODEC_LOOKUP) {
+        if (strcasestr(mime.c_str(), codecStrAndEnum.first) != nullptr ||
+            strcasestr(componentName.c_str(), codecStrAndEnum.first) != nullptr) {
+            return codecStrAndEnum.second;
+        }
+    }
+    return MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+}
+
+static int32_t getMetricsResolutionEnum(int32_t width, int32_t height) {
+    if (width == 0 || height == 0) {
+        return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+    }
+    int64_t pixels = int64_t(width) * height / 1000;
+    if (width < 0 || height < 0 || pixels > RESOLUTION_LOOKUP[0]) {
+        return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+    }
+    for (int32_t resolutionEnum : RESOLUTION_LOOKUP) {
+        if (pixels > resolutionEnum) {
+            return resolutionEnum;
+        }
+    }
+    return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+}
+
+static int32_t getMetricsFramerateEnum(float inFramerate) {
+    if (inFramerate == -1.0f) {
+        return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+    }
+    if (inFramerate == -2.0f) {
+        return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+    }
+    int framerate = int(inFramerate * 100); // Table is in hundredths of frames per second
+    static const int framerateTolerance = 40; // Tolerance is 0.4 frames per second - table is 100s
+    for (int32_t framerateEnum : FRAMERATE_LOOKUP) {
+        if (abs(framerate - framerateEnum) < framerateTolerance) {
+            return framerateEnum;
+        }
+    }
+    return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+}
+
+static int32_t getMetricsHdrFormatEnum(std::string &mime, std::string &componentName,
+                                       int32_t configColorTransfer, int32_t parsedColorTransfer,
+                                       int32_t hdr10StaticInfo, int32_t hdr10PlusInfo) {
+    if (hdr10PlusInfo) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+    }
+    if (hdr10StaticInfo) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+    }
+    // 7 = COLOR_TRANSFER_HLG in MediaCodecConstants.h
+    if (configColorTransfer == 7 || parsedColorTransfer == 7) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+    }
+    if (strcasestr(mime.c_str(), "dolby-vision") != nullptr ||
+        strcasestr(componentName.c_str(), "dvhe") != nullptr ||
+        strcasestr(componentName.c_str(), "dvav") != nullptr ||
+        strcasestr(componentName.c_str(), "dav1") != nullptr) {
+        return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+    }
+    return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+}
+
+static void parseVector(const std::string &str, std::vector<int32_t> *vector) {
+    if (!mediametrics::stringutils::parseVector(str, vector)) {
+        ALOGE("failed to parse integer vector from '%s'", str.c_str());
+    }
+}
+
 bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
         const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
 {
     if (item == nullptr) return false;
 
     AStatsEvent* event = AStatsEvent_obtain();
-    AStatsEvent_setAtomId(event, android::util::MEDIA_CODEC_REPORTED);
+    AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_CODEC_REPORTED);
 
-    const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
-    AStatsEvent_writeInt64(event, timestamp_nanos);
+    const nsecs_t timestampNanos = MediaMetricsService::roundTime(item->getTimestamp());
+    AStatsEvent_writeInt64(event, timestampNanos);
 
-    std::string package_name = item->getPkgName();
-    AStatsEvent_writeString(event, package_name.c_str());
+    std::string packageName = item->getPkgName();
+    AStatsEvent_writeString(event, packageName.c_str());
 
-    int64_t package_version_code = item->getPkgVersionCode();
-    AStatsEvent_writeInt64(event, package_version_code);
+    int64_t packageVersionCode = item->getPkgVersionCode();
+    AStatsEvent_writeInt64(event, packageVersionCode);
 
-    int64_t media_apex_version = 0;
-    AStatsEvent_writeInt64(event, media_apex_version);
+    int64_t mediaApexVersion = 0;
+    AStatsEvent_writeInt64(event, mediaApexVersion);
 
     // the rest into our own proto
     //
@@ -84,17 +220,25 @@
     }
     AStatsEvent_writeString(event, mode.c_str());
 
-    int32_t encoder = -1;
-    if (item->getInt32("android.media.mediacodec.encoder", &encoder)) {
-        metrics_proto.set_encoder(encoder);
+    int32_t isEncoder = -1;
+    if (item->getInt32("android.media.mediacodec.encoder", &isEncoder)) {
+        metrics_proto.set_encoder(isEncoder);
     }
-    AStatsEvent_writeInt32(event, encoder);
+    AStatsEvent_writeInt32(event, isEncoder);
 
-    int32_t secure = -1;
-    if (item->getInt32("android.media.mediacodec.secure", &secure)) {
-        metrics_proto.set_secure(secure);
+    int32_t isSecure = -1;
+    if (item->getInt32("android.media.mediacodec.secure", &isSecure)) {
+        metrics_proto.set_secure(isSecure);
     }
-    AStatsEvent_writeInt32(event, secure);
+    AStatsEvent_writeInt32(event, isSecure);
+
+    int32_t isHardware = -1;
+    item->getInt32("android.media.mediacodec.hardware", &isHardware);
+    // not logged to MediaCodecReported or MediametricsCodecReported
+
+    int32_t isTunneled = -1;
+    item->getInt32("android.media.mediacodec.tunneled", &isTunneled);
+    // not logged to MediaCodecReported or MediametricsCodecReported
 
     int32_t width = -1;
     if (item->getInt32("android.media.mediacodec.width", &width)) {
@@ -133,79 +277,78 @@
     AStatsEvent_writeInt32(event, level);
 
 
-    int32_t max_width = -1;
-    if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
-        metrics_proto.set_max_width(max_width);
+    int32_t maxWidth = -1;
+    if ( item->getInt32("android.media.mediacodec.maxwidth", &maxWidth)) {
+        metrics_proto.set_max_width(maxWidth);
     }
-    AStatsEvent_writeInt32(event, max_width);
+    AStatsEvent_writeInt32(event, maxWidth);
 
-    int32_t max_height = -1;
-    if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
-        metrics_proto.set_max_height(max_height);
+    int32_t maxHeight = -1;
+    if ( item->getInt32("android.media.mediacodec.maxheight", &maxHeight)) {
+        metrics_proto.set_max_height(maxHeight);
     }
-    AStatsEvent_writeInt32(event, max_height);
+    AStatsEvent_writeInt32(event, maxHeight);
 
-    int32_t error_code = -1;
-    if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
-        metrics_proto.set_error_code(error_code);
+    int32_t errorCode = -1;
+    if ( item->getInt32("android.media.mediacodec.errcode", &errorCode)) {
+        metrics_proto.set_error_code(errorCode);
     }
-    AStatsEvent_writeInt32(event, error_code);
+    AStatsEvent_writeInt32(event, errorCode);
 
-    std::string error_state;
-    if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
-        metrics_proto.set_error_state(error_state);
+    std::string errorState;
+    if ( item->getString("android.media.mediacodec.errstate", &errorState)) {
+        metrics_proto.set_error_state(errorState);
     }
-    AStatsEvent_writeString(event, error_state.c_str());
+    AStatsEvent_writeString(event, errorState.c_str());
 
-    int64_t latency_max = -1;
-    if (item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
-        metrics_proto.set_latency_max(latency_max);
+    int64_t latencyMax = -1;
+    if (item->getInt64("android.media.mediacodec.latency.max", &latencyMax)) {
+        metrics_proto.set_latency_max(latencyMax);
     }
-    AStatsEvent_writeInt64(event, latency_max);
+    AStatsEvent_writeInt64(event, latencyMax);
 
-    int64_t latency_min = -1;
-    if (item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
-        metrics_proto.set_latency_min(latency_min);
+    int64_t latencyMin = -1;
+    if (item->getInt64("android.media.mediacodec.latency.min", &latencyMin)) {
+        metrics_proto.set_latency_min(latencyMin);
     }
-    AStatsEvent_writeInt64(event, latency_min);
+    AStatsEvent_writeInt64(event, latencyMin);
 
-    int64_t latency_avg = -1;
-    if (item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
-        metrics_proto.set_latency_avg(latency_avg);
+    int64_t latencyAvg = -1;
+    if (item->getInt64("android.media.mediacodec.latency.avg", &latencyAvg)) {
+        metrics_proto.set_latency_avg(latencyAvg);
     }
-    AStatsEvent_writeInt64(event, latency_avg);
+    AStatsEvent_writeInt64(event, latencyAvg);
 
-    int64_t latency_count = -1;
-    if (item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
-        metrics_proto.set_latency_count(latency_count);
+    int64_t latencyCount = -1;
+    if (item->getInt64("android.media.mediacodec.latency.n", &latencyCount)) {
+        metrics_proto.set_latency_count(latencyCount);
     }
-    AStatsEvent_writeInt64(event, latency_count);
+    AStatsEvent_writeInt64(event, latencyCount);
 
-    int64_t latency_unknown = -1;
-    if (item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
-        metrics_proto.set_latency_unknown(latency_unknown);
+    int64_t latencyUnknown = -1;
+    if (item->getInt64("android.media.mediacodec.latency.unknown", &latencyUnknown)) {
+        metrics_proto.set_latency_unknown(latencyUnknown);
     }
-    AStatsEvent_writeInt64(event, latency_unknown);
+    AStatsEvent_writeInt64(event, latencyUnknown);
 
-    int32_t queue_secure_input_buffer_error = -1;
+    int32_t queueSecureInputBufferError = -1;
     if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
-            &queue_secure_input_buffer_error)) {
-        metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
+            &queueSecureInputBufferError)) {
+        metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
     }
-    AStatsEvent_writeInt32(event, queue_secure_input_buffer_error);
+    AStatsEvent_writeInt32(event, queueSecureInputBufferError);
 
-    int32_t queue_input_buffer_error = -1;
-    if (item->getInt32("android.media.mediacodec.queueInputBufferError",
-            &queue_input_buffer_error)) {
-        metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
+    int32_t queueInputBufferError = -1;
+    if (item->getInt32("android.media.mediacodec.queueInputBufferError", &queueInputBufferError)) {
+        metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
     }
-    AStatsEvent_writeInt32(event, queue_input_buffer_error);
+    AStatsEvent_writeInt32(event, queueInputBufferError);
 
-    std::string bitrate_mode;
-    if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
-        metrics_proto.set_bitrate_mode(bitrate_mode);
+    std::string bitrateMode;
+    if (item->getString("android.media.mediacodec.bitrate_mode", &bitrateMode)) {
+        metrics_proto.set_bitrate_mode(bitrateMode);
     }
-    AStatsEvent_writeString(event, bitrate_mode.c_str());
+    AStatsEvent_writeString(event, bitrateMode.c_str());
 
     int32_t bitrate = -1;
     if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
@@ -213,18 +356,18 @@
     }
     AStatsEvent_writeInt32(event, bitrate);
 
-    int64_t lifetime_millis = -1;
-    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
-        lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
-        metrics_proto.set_lifetime_millis(lifetime_millis);
+    int64_t lifetimeMillis = -1;
+    if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMillis)) {
+        lifetimeMillis = mediametrics::bucket_time_minutes(lifetimeMillis);
+        metrics_proto.set_lifetime_millis(lifetimeMillis);
     }
-    AStatsEvent_writeInt64(event, lifetime_millis);
+    AStatsEvent_writeInt64(event, lifetimeMillis);
 
-    int64_t playback_duration_sec = -1;
-    item->getInt64("android.media.mediacodec.playback-duration-sec", &playback_duration_sec);
+    int64_t playbackDurationSec = -1;
+    item->getInt64("android.media.mediacodec.playback-duration-sec", &playbackDurationSec);
     // DO NOT record  playback-duration in the metrics_proto - it should only
     // exist in the flattened atom
-    AStatsEvent_writeInt64(event, playback_duration_sec);
+    AStatsEvent_writeInt64(event, playbackDurationSec);
 
     std::string sessionId;
     if (item->getString("android.media.mediacodec.log-session-id", &sessionId)) {
@@ -438,67 +581,255 @@
     }
     AStatsEvent_writeInt32(event, hdr10PlusInfo);
 
-    int32_t hdrFormat= -1;
+    int32_t hdrFormat = -1;
     if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
         metrics_proto.set_hdr_format(hdrFormat);
     }
     AStatsEvent_writeInt32(event, hdrFormat);
 
+    int64_t codecId = 0;
+    if (item->getInt64("android.media.mediacodec.id", &codecId)) {
+        metrics_proto.set_codec_id(codecId);
+    }
+    AStatsEvent_writeInt64(event, codecId);
+
+    int32_t arrayMode = -1;
+    if (item->getInt32("android.media.mediacodec.array-mode", &arrayMode)) {
+        metrics_proto.set_array_mode(arrayMode);
+    }
+    AStatsEvent_writeInt32(event, arrayMode);
+
+    int32_t operationMode = -1;
+    if (item->getInt32("android.media.mediacodec.operation-mode", &operationMode)) {
+        metrics_proto.set_operation_mode(operationMode);
+    }
+    AStatsEvent_writeInt32(event, operationMode);
+
+    int32_t outputSurface = -1;
+    if (item->getInt32("android.media.mediacodec.output-surface", &outputSurface)) {
+        metrics_proto.set_output_surface(outputSurface);
+    }
+    AStatsEvent_writeInt32(event, outputSurface);
+
+    int32_t appMaxInputSize = -1;
+    if (item->getInt32("android.media.mediacodec.app-max-input-size", &appMaxInputSize)) {
+        metrics_proto.set_app_max_input_size(appMaxInputSize);
+    }
+    AStatsEvent_writeInt32(event, appMaxInputSize);
+
+    int32_t usedMaxInputSize = -1;
+    if (item->getInt32("android.media.mediacodec.used-max-input-size", &usedMaxInputSize)) {
+        metrics_proto.set_used_max_input_size(usedMaxInputSize);
+    }
+    AStatsEvent_writeInt32(event, usedMaxInputSize);
+
+    int32_t codecMaxInputSize = -1;
+    if (item->getInt32("android.media.mediacodec.codec-max-input-size", &codecMaxInputSize)) {
+        metrics_proto.set_codec_max_input_size(codecMaxInputSize);
+    }
+    AStatsEvent_writeInt32(event, codecMaxInputSize);
+
+    int32_t flushCount = -1;
+    if (item->getInt32("android.media.mediacodec.flush-count", &flushCount)) {
+        metrics_proto.set_flush_count(flushCount);
+    }
+    AStatsEvent_writeInt32(event, flushCount);
+
+    int32_t setSurfaceCount = -1;
+    if (item->getInt32("android.media.mediacodec.set-surface-count", &setSurfaceCount)) {
+        metrics_proto.set_set_surface_count(setSurfaceCount);
+    }
+    AStatsEvent_writeInt32(event, setSurfaceCount);
+
+    int32_t resolutionChangeCount = -1;
+    if (item->getInt32("android.media.mediacodec.resolution-change-count",
+            &resolutionChangeCount)) {
+        metrics_proto.set_resolution_change_count(resolutionChangeCount);
+    }
+    AStatsEvent_writeInt32(event, resolutionChangeCount);
+
+    int32_t componentColorFormat = -1;
+    if (item->getInt32("android.media.mediacodec.component-color-format", &componentColorFormat)) {
+        metrics_proto.set_component_color_format(componentColorFormat);
+    }
+    AStatsEvent_writeInt32(event, componentColorFormat);
+
+    int64_t firstRenderTimeUs = -1;
+    item->getInt64("android.media.mediacodec.first-render-time-us", &firstRenderTimeUs);
+    int64_t framesReleased = -1;
+    item->getInt64("android.media.mediacodec.frames-released", &framesReleased);
+    int64_t framesRendered = -1;
+    item->getInt64("android.media.mediacodec.frames-rendered", &framesRendered);
+    int64_t framesDropped = -1;
+    item->getInt64("android.media.mediacodec.frames-dropped", &framesDropped);
+    int64_t framesSkipped = -1;
+    item->getInt64("android.media.mediacodec.frames-skipped", &framesSkipped);
+    double framerateContent = -1;
+    item->getDouble("android.media.mediacodec.framerate-content", &framerateContent);
+    double framerateActual = -1;
+    item->getDouble("android.media.mediacodec.framerate-actual", &framerateActual);
+    int64_t freezeScore = -1;
+    item->getInt64("android.media.mediacodec.freeze-score", &freezeScore);
+    double freezeRate = -1;
+    item->getDouble("android.media.mediacodec.freeze-rate", &freezeRate);
+    std::string freezeScoreHistogramStr;
+    item->getString("android.media.mediacodec.freeze-score-histogram", &freezeScoreHistogramStr);
+    std::string freezeScoreHistogramBucketsStr;
+    item->getString("android.media.mediacodec.freeze-score-histogram-buckets",
+                    &freezeScoreHistogramBucketsStr);
+    std::string freezeDurationMsHistogramStr;
+    item->getString("android.media.mediacodec.freeze-duration-ms-histogram",
+                    &freezeDurationMsHistogramStr);
+    std::string freezeDurationMsHistogramBucketsStr;
+    item->getString("android.media.mediacodec.freeze-duration-ms-histogram-buckets",
+                    &freezeDurationMsHistogramBucketsStr);
+    std::string freezeDistanceMsHistogramStr;
+    item->getString("android.media.mediacodec.freeze-distance-ms-histogram",
+                    &freezeDistanceMsHistogramStr);
+    std::string freezeDistanceMsHistogramBucketsStr;
+    item->getString("android.media.mediacodec.freeze-distance-ms-histogram-buckets",
+                    &freezeDistanceMsHistogramBucketsStr);
+    int64_t judderScore = -1;
+    item->getInt64("android.media.mediacodec.judder-score", &judderScore);
+    double judderRate = -1;
+    item->getDouble("android.media.mediacodec.judder-rate", &judderRate);
+    std::string judderScoreHistogramStr;
+    item->getString("android.media.mediacodec.judder-score-histogram", &judderScoreHistogramStr);
+    std::string judderScoreHistogramBucketsStr;
+    item->getString("android.media.mediacodec.judder-score-histogram-buckets",
+                    &judderScoreHistogramBucketsStr);
+
     int err = AStatsEvent_write(event);
     if (err < 0) {
       ALOGE("Failed to write codec metrics to statsd (%d)", err);
     }
     AStatsEvent_release(event);
 
+    if (framesRendered > 0) {
+        int32_t statsUid = item->getUid();
+        int64_t statsCodecId = codecId;
+        char const *statsLogSessionId = sessionId.c_str();
+        int32_t statsIsHardware = isHardware;
+        int32_t statsIsSecure = isSecure;
+        int32_t statsIsTunneled = isTunneled;
+        int32_t statsCodec = getMetricsCodecEnum(mime, codec);
+        int32_t statsResolution = getMetricsResolutionEnum(width, height);
+        int32_t statsBitrate = BITRATE_UNKNOWN;
+        int32_t statsContentFramerate = getMetricsFramerateEnum(framerateContent);
+        int32_t statsActualFramerate = getMetricsFramerateEnum(framerateActual);
+        int32_t statsHdrFormat = getMetricsHdrFormatEnum(mime, codec, configColorTransfer,
+                                                         parsedColorTransfer, hdrStaticInfo,
+                                                         hdr10PlusInfo);
+        int64_t statsFirstRenderTimeUs = firstRenderTimeUs;
+        int64_t statsPlaybackDurationSeconds = playbackDurationSec;
+        int64_t statsFramesTotal = framesReleased + framesSkipped;
+        int64_t statsFramesReleased = framesReleased;
+        int64_t statsFramesRendered = framesRendered;
+        int64_t statsFramesDropped = framesDropped;
+        int64_t statsFramesSkipped = framesSkipped;
+        float statsFrameDropRate = float(double(framesDropped) / statsFramesTotal);
+        float statsFrameSkipRate = float(double(framesSkipped) / statsFramesTotal);
+        float statsFrameSkipDropRate = float(double(framesSkipped + framesDropped) /
+                                             statsFramesTotal);
+        int64_t statsFreezeScore = freezeScore;
+        float statsFreezeRate = freezeRate;
+        std::vector<int32_t> statsFreezeDurationMsHistogram;
+        parseVector(freezeDurationMsHistogramStr, &statsFreezeDurationMsHistogram);
+        std::vector<int32_t> statsFreezeDurationMsHistogramBuckets;
+        parseVector(freezeDurationMsHistogramBucketsStr, &statsFreezeDurationMsHistogramBuckets);
+        std::vector<int32_t> statsFreezeDistanceMsHistogram;
+        parseVector(freezeDistanceMsHistogramStr, &statsFreezeDistanceMsHistogram);
+        std::vector<int32_t> statsFreezeDistanceMsHistogramBuckets;
+        parseVector(freezeDistanceMsHistogramBucketsStr, &statsFreezeDistanceMsHistogramBuckets);
+        int64_t statsJudderScore = judderScore;
+        float statsJudderRate = judderRate;
+        std::vector<int32_t> statsJudderScoreHistogram;
+        parseVector(judderScoreHistogramStr, &statsJudderScoreHistogram);
+        std::vector<int32_t> statsJudderScoreHistogramBuckets;
+        parseVector(judderScoreHistogramBucketsStr, &statsJudderScoreHistogramBuckets);
+        int result = stats_write(
+            MEDIA_CODEC_RENDERED,
+            statsUid,
+            statsCodecId,
+            statsLogSessionId,
+            statsIsHardware,
+            statsIsSecure,
+            statsIsTunneled,
+            statsCodec,
+            statsResolution,
+            statsBitrate,
+            statsContentFramerate,
+            statsActualFramerate,
+            statsHdrFormat,
+            statsFirstRenderTimeUs,
+            statsPlaybackDurationSeconds,
+            statsFramesTotal,
+            statsFramesReleased,
+            statsFramesRendered,
+            statsFramesDropped,
+            statsFramesSkipped,
+            statsFrameDropRate,
+            statsFrameSkipRate,
+            statsFrameSkipDropRate,
+            statsFreezeScore,
+            statsFreezeRate,
+            statsFreezeDurationMsHistogram,
+            statsFreezeDurationMsHistogramBuckets,
+            statsFreezeDistanceMsHistogram,
+            statsFreezeDistanceMsHistogramBuckets,
+            statsJudderScore,
+            statsJudderRate,
+            statsJudderScoreHistogram,
+            statsJudderScoreHistogramBuckets);
+        ALOGE_IF(result < 0, "Failed to record MEDIA_CODEC_RENDERED atom (%d)", result);
+    }
+
     std::string serialized;
     if (!metrics_proto.SerializeToString(&serialized)) {
         ALOGE("Failed to serialize codec metrics");
         return false;
     }
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
-                               timestamp_nanos, package_name.c_str(), package_version_code,
-                               media_apex_version,
+    const stats::media_metrics::BytesField bf_serialized(serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
+                               timestampNanos, packageName.c_str(), packageVersionCode,
+                               mediaApexVersion,
                                bf_serialized);
 
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_codec_reported:"
-            << android::util::MEDIAMETRICS_CODEC_REPORTED
-            << " timestamp_nanos:" << timestamp_nanos
-            << " package_name:" << package_name
-            << " package_version_code:" << package_version_code
-            << " media_apex_version:" << media_apex_version
-
+            << stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED
+            << " timestamp_nanos:" << timestampNanos
+            << " package_name:" << packageName
+            << " package_version_code:" << packageVersionCode
+            << " media_apex_version:" << mediaApexVersion
             << " codec:" << codec
             << " mime:" << mime
             << " mode:" << mode
-            << " encoder:" << encoder
-            << " secure:" << secure
+            << " encoder:" << isEncoder
+            << " secure:" << isSecure
             << " width:" << width
             << " height:" << height
             << " rotation:" << rotation
             << " crypto:" << crypto
             << " profile:" << profile
-
             << " level:" << level
-            << " max_width:" << max_width
-            << " max_height:" << max_height
-            << " error_code:" << error_code
-            << " error_state:" << error_state
-            << " latency_max:" << latency_max
-            << " latency_min:" << latency_min
-            << " latency_avg:" << latency_avg
-            << " latency_count:" << latency_count
-            << " latency_unknown:" << latency_unknown
-
-            << " queue_input_buffer_error:" << queue_input_buffer_error
-            << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
-            << " bitrate_mode:" << bitrate_mode
+            << " max_width:" << maxWidth
+            << " max_height:" << maxHeight
+            << " error_code:" << errorCode
+            << " error_state:" << errorState
+            << " latency_max:" << latencyMax
+            << " latency_min:" << latencyMin
+            << " latency_avg:" << latencyAvg
+            << " latency_count:" << latencyCount
+            << " latency_unknown:" << latencyUnknown
+            << " queue_input_buffer_error:" << queueInputBufferError
+            << " queue_secure_input_buffer_error:" << queueSecureInputBufferError
+            << " bitrate_mode:" << bitrateMode
             << " bitrate:" << bitrate
             << " original_bitrate:" << originalBitrate
-            << " lifetime_millis:" << lifetime_millis
-            << " playback_duration_seconds:" << playback_duration_sec
+            << " lifetime_millis:" << lifetimeMillis
+            << " playback_duration_seconds:" << playbackDurationSec
             << " log_session_id:" << sessionId
             << " channel_count:" << channelCount
             << " sample_rate:" << sampleRate
@@ -511,7 +842,6 @@
             << " operating_rate:" << operatingRate
             << " priority:" << priority
             << " shaping_enhanced:" << shapingEnhanced
-
             << " qp_i_min:" << qpIMin
             << " qp_i_max:" << qpIMax
             << " qp_p_min:" << qpPMin
@@ -525,7 +855,7 @@
             << " original_qp_b_min:" << qpBMinOri
             << " original_qp_b_max:" << qpBMaxOri
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED, log.str());
 
 
     return true;
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index e06a605..e5f7190 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -18,8 +18,9 @@
 #define LOG_TAG "statsd_drm"
 #include <utils/Log.h>
 #include <media/stagefright/foundation/base64.h>
+#include <binder/IPCThreadState.h>
 
-#include <stdint.h>
+#include <cstdint>
 #include <inttypes.h>
 #include <sys/types.h>
 #include <sys/stat.h>
@@ -32,10 +33,11 @@
 #include <pwd.h>
 
 #include "MediaMetricsService.h"
+#include "MediaDrmStatsdHelper.h"
 #include "StringUtils.h"
 #include "iface_statsd.h"
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include <array>
 #include <string>
@@ -69,8 +71,9 @@
     // This field is left here for backward compatibility.
     // This field is not used anymore.
     const std::string  kUnusedField("");
-    android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         vendor.c_str(),
@@ -80,7 +83,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_mediadrm_reported:"
-            << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -90,7 +93,7 @@
             << " description:" << description
             // omitting serialized
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
     return true;
 }
 
@@ -122,7 +125,8 @@
         item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
     }
 
-    const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+    const int result = stats::media_metrics::stats_write(
+                               stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED,
                                timestamp_nanos, package_name.c_str(), package_version_code,
                                media_apex_version,
                                plugin_id.c_str(), description.c_str(),
@@ -136,7 +140,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_drmmanager_reported:"
-            << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -151,7 +155,7 @@
         log << " method_" << i << ":" << methodCounts[i];
     }
     log << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
     return true;
 }
 
@@ -207,7 +211,7 @@
 
     // Memory for |event| is internally managed by statsd.
     AStatsEvent* event = AStatsEventList_addStatsEvent(out);
-    AStatsEvent_setAtomId(event, android::util::MEDIA_DRM_ACTIVITY_INFO);
+    AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO);
     AStatsEvent_writeString(event, item->getPkgName().c_str());
     AStatsEvent_writeInt64(event, item->getPkgVersionCode());
     AStatsEvent_writeString(event, vendor.c_str());
@@ -219,7 +223,7 @@
     std::stringstream log;
     log << "pulled:" << " {"
             << " media_drm_activity_info:"
-            << android::util::MEDIA_DRM_ACTIVITY_INFO
+            << stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO
             << " package_name:" << item->getPkgName()
             << " package_version_code:" << item->getPkgVersionCode()
             << " vendor:" << vendor
@@ -227,7 +231,146 @@
             << " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
             << " vendor_metrics:" <<  mediametrics::stringutils::bytesToString(plugin_raw, 8)
             << " }";
-    statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO, log.str());
+    return true;
+}
+
+bool statsd_mediadrm_created(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    int64_t uuid_lsb = -1;
+    if (!item->getInt64("uuid_lsb", &uuid_lsb)) return false;
+    int64_t uuid_msb = -1;
+    if (!item->getInt64("uuid_msb", &uuid_msb)) return false;
+    const int32_t scheme = MediaDrmStatsdHelper::findDrmScheme(uuid_msb, uuid_lsb);
+    const int32_t uid = IPCThreadState::self()->getCallingUid();
+    int32_t frontend = 0;
+    if (!item->getInt32("frontend", &frontend)) return false;
+
+    // Optional to be included
+    std::string version = "";
+    item->getString("version", &version);
+    const int result = stats_write(stats::media_metrics::MEDIA_DRM_CREATED,
+                    scheme, uuid_lsb, uuid_msb, uid, frontend, version.c_str());
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " media_drm_created:"
+            << stats::media_metrics::MEDIA_DRM_CREATED
+            << " scheme:" << scheme
+            << " uuid_lsb:" << uuid_lsb
+            << " uuid_msb:" << uuid_msb
+            << " uid:" << uid
+            << " frontend:" << frontend
+            << " version:" << version
+            << " }";
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_CREATED, log.str());
+    return true;
+}
+
+bool statsd_mediadrm_session_opened(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    int64_t uuid_lsb = -1;
+    if (!item->getInt64("uuid_lsb", &uuid_lsb)) return false;
+    int64_t uuid_msb = -1;
+    if (!item->getInt64("uuid_msb", &uuid_msb)) return false;
+    const int32_t scheme = MediaDrmStatsdHelper::findDrmScheme(uuid_msb, uuid_lsb);
+    std::string object_nonce = "";
+    if (!item->getString("object_nonce", &object_nonce)) return false;
+    const int32_t uid = IPCThreadState::self()->getCallingUid();
+    int32_t frontend = 0;
+    if (!item->getInt32("frontend", &frontend)) return false;
+    int32_t requested_security_level = 0;
+    if (!item->getInt32("requested_security_level", &requested_security_level)) return false;
+    int32_t opened_security_level = 0;
+    if (!item->getInt32("opened_security_level", &opened_security_level)) return false;
+
+    // Optional to be included
+    std::string version = "";
+    item->getString("version", &version);
+    const int result = stats_write(stats::media_metrics::MEDIA_DRM_SESSION_OPENED,
+                        scheme, uuid_lsb, uuid_msb, uid, frontend, version.c_str(),
+                        object_nonce.c_str(), requested_security_level,
+                        opened_security_level);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " media_drm_session_opened:"
+            << stats::media_metrics::MEDIA_DRM_SESSION_OPENED
+            << " scheme:" << scheme
+            << " uuid_lsb:" << uuid_lsb
+            << " uuid_msb:" << uuid_msb
+            << " uid:" << uid
+            << " frontend:" << frontend
+            << " version:" << version
+            << " object_nonce:" << object_nonce
+            << " requested_security_level:" << requested_security_level
+            << " opened_security_level:" << opened_security_level
+            << " }";
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_SESSION_OPENED, log.str());
+    return true;
+}
+
+bool statsd_mediadrm_errored(const std::shared_ptr<const mediametrics::Item>& item,
+        const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
+{
+    int64_t uuid_lsb = -1;
+    if (!item->getInt64("uuid_lsb", &uuid_lsb)) return false;
+    int64_t uuid_msb = -1;
+    if (!item->getInt64("uuid_msb", &uuid_msb)) return false;
+    const int32_t scheme = MediaDrmStatsdHelper::findDrmScheme(uuid_msb, uuid_lsb);
+    const int32_t uid = IPCThreadState::self()->getCallingUid();
+    int32_t frontend = 0;
+    if (!item->getInt32("frontend", &frontend)) return false;
+    std::string object_nonce = "";
+    if (!item->getString("object_nonce", &object_nonce)) return false;
+    std::string api_str = "";
+    if (!item->getString("api", &api_str)) return false;
+    const int32_t api = MediaDrmStatsdHelper::findDrmApi(api_str);
+    int32_t error_code = 0;
+    if (!item->getInt32("error_code", &error_code)) return false;
+
+    // Optional to be included
+    std::string version = "";
+    item->getString("version", &version);
+    std::string session_nonce = "";
+    item->getString("session_nonce", &session_nonce);
+    int32_t security_level = 0;
+    item->getInt32("security_level", &security_level);
+
+    int32_t cdm_err = 0;
+    item->getInt32("cdm_err", &cdm_err);
+    int32_t oem_err = 0;
+    item->getInt32("oem_err", &oem_err);
+    int32_t error_context = 0;
+    item->getInt32("error_context", &error_context);
+
+    const int result = stats_write(stats::media_metrics::MEDIA_DRM_ERRORED, scheme, uuid_lsb,
+                        uuid_msb, uid, frontend, version.c_str(), object_nonce.c_str(),
+                        session_nonce.c_str(), security_level, api, error_code, cdm_err,
+                        oem_err, error_context);
+
+    std::stringstream log;
+    log << "result:" << result << " {"
+            << " media_drm_errored:"
+            << stats::media_metrics::MEDIA_DRM_ERRORED
+            << " scheme:" << scheme
+            << " uuid_lsb:" << uuid_lsb
+            << " uuid_msb:" << uuid_msb
+            << " uid:" << uid
+            << " frontend:" << frontend
+            << " version:" << version
+            << " object_nonce:" << object_nonce
+            << " session_nonce:" << session_nonce
+            << " security_level:" << security_level
+            << " api:" << api
+            << " error_code:" << error_code
+            << " cdm_err:" << cdm_err
+            << " oem_err:" << oem_err
+            << " error_context:" << error_context
+            << " }";
+    statsdLog->log(stats::media_metrics::MEDIA_DRM_ERRORED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index a8bfeaa..9345df6 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -96,15 +96,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_extractor_reported:"
-            << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -116,7 +117,7 @@
             << " entry_point:" << entry_point_string << "(" << entry_point << ")"
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 67ca874b..458bd32 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -28,7 +28,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -83,7 +83,8 @@
     item->getString("android.media.mediaparser.logSessionId", &logSessionId);
     logSessionId = mediametrics::ValidateId::get()->validateId(logSessionId);
 
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
+    const int result = stats::media_metrics::stats_write(
+                               stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED,
                                timestamp_nanos,
                                package_name.c_str(),
                                package_version_code,
@@ -103,7 +104,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_mediaparser_reported:"
-            << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -120,7 +121,7 @@
             << " video_height:" << videoHeight
             << " log_session_id:" << logSessionId
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index bdee1f2..fd545f4 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -153,8 +153,9 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
@@ -162,7 +163,7 @@
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_nuplayer_reported:"
-            << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -193,7 +194,7 @@
             // TODO NuPlayer - add log_session_id
             // << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 5f54a68..efa284b 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -29,7 +29,7 @@
 #include <sys/types.h>
 #include <unistd.h>
 
-#include <statslog.h>
+#include <stats_media_metrics.h>
 
 #include "MediaMetricsService.h"
 #include "ValidateId.h"
@@ -179,15 +179,16 @@
         return false;
     }
 
-    android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
-    int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+    const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+    const int result = stats::media_metrics::stats_write(
+        stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED,
         timestamp_nanos, package_name.c_str(), package_version_code,
         media_apex_version,
         bf_serialized);
     std::stringstream log;
     log << "result:" << result << " {"
             << " mediametrics_recorder_reported:"
-            << android::util::MEDIAMETRICS_RECORDER_REPORTED
+            << stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED
             << " timestamp_nanos:" << timestamp_nanos
             << " package_name:" << package_name
             << " package_version_code:" << package_version_code
@@ -218,7 +219,7 @@
             << " iframe_interval:" << iframe_interval
             << " log_session_id:" << log_session_id
             << " }";
-    statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
+    statsdLog->log(stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED, log.str());
     return true;
 }
 
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index bc7b47b..4a6aee4 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -17,9 +17,10 @@
 #define LOG_TAG "mediametrics_tests"
 #include <utils/Log.h>
 
-
 #include <stdio.h>
+#include <string>
 #include <unordered_set>
+#include <vector>
 
 #include <gtest/gtest.h>
 #include <media/MediaMetricsItem.h>
@@ -30,6 +31,7 @@
 #include <system/audio.h>
 
 using namespace android;
+using android::mediametrics::stringutils::parseVector;
 
 static size_t countNewlines(const char *s) {
     size_t count = 0;
@@ -57,6 +59,35 @@
   ASSERT_EQ(false, android::mediametrics::startsWith(s, std::string("est")));
 }
 
+TEST(mediametrics_tests, parseVector) {
+    {
+        std::vector<int32_t> values;
+        EXPECT_EQ(true, parseVector("0{4,300,0,-112343,350}9", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({0, 4, 300, 0, -112343, 350, 9}));
+    }
+    {
+        std::vector<int32_t> values;
+        EXPECT_EQ(true, parseVector("53", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({53}));
+    }
+    {
+        std::vector<int32_t> values;
+        EXPECT_EQ(false, parseVector("5{3,6*3}3", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({}));
+    }
+    {
+        std::vector<int32_t> values = {1}; // should still be this when parsing fails
+        std::vector<int32_t> expected = {1};
+        EXPECT_EQ(false, parseVector("51342abcd,1232", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({1}));
+    }
+    {
+        std::vector<int32_t> values = {2}; // should still be this when parsing fails
+        EXPECT_EQ(false, parseVector("12345678901234,12345678901234", &values));
+        EXPECT_EQ(values, std::vector<int32_t>({2}));
+    }
+}
+
 TEST(mediametrics_tests, defer) {
   bool check = false;
   {
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 5d80744..a2bd5e1 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -16,6 +16,8 @@
         "aidl/android/media/MediaResourceSubType.aidl",
         "aidl/android/media/MediaResourceParcel.aidl",
         "aidl/android/media/MediaResourcePolicyParcel.aidl",
+        "aidl/android/media/ClientInfoParcel.aidl",
+        "aidl/android/media/ClientConfigParcel.aidl",
     ],
     path: "aidl",
 }
@@ -72,9 +74,11 @@
     name: "libresourcemanagerservice",
 
     srcs: [
+        "ResourceManagerMetrics.cpp",
         "ResourceManagerService.cpp",
         "ResourceObserverService.cpp",
         "ServiceLog.cpp",
+        "UidObserver.cpp",
 
         // TODO: convert to AIDL?
         "IMediaResourceMonitor.cpp",
@@ -87,10 +91,16 @@
         "libbinder_ndk",
         "libutils",
         "liblog",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
+        "libprotobuf-cpp-lite",
+        "libactivitymanager_aidl",
     ],
 
     static_libs: [
         "resourceobserver_aidl_interface-V1-ndk",
+        "libplatformprotos",
     ],
 
     include_dirs: ["frameworks/av/include"],
@@ -101,4 +111,10 @@
     ],
 
     export_include_dirs: ["."],
+
+    export_shared_lib_headers: [
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
+    ],
 }
diff --git a/services/mediaresourcemanager/OWNERS b/services/mediaresourcemanager/OWNERS
index 82abf8f..4fc3728 100644
--- a/services/mediaresourcemanager/OWNERS
+++ b/services/mediaresourcemanager/OWNERS
@@ -1 +1,3 @@
-dwkang@google.com
+girishshetty@google.com
+lajos@google.com
+wonsik@google.com
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
new file mode 100644
index 0000000..f8cdb80
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -0,0 +1,657 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+#include <utils/Log.h>
+#include <mediautils/ProcessInfo.h>
+
+#include <stats_media_metrics.h>
+
+#include "UidObserver.h"
+#include "ResourceManagerMetrics.h"
+
+#include <cmath>
+#include <sstream>
+
+namespace android {
+
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_STARTED;
+using stats::media_metrics::MEDIA_CODEC_STOPPED;
+// Disabling this for now.
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+using stats::media_metrics::MEDIA_CODEC_CONCURRENT_USAGE_REPORTED;
+#endif
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+using stats::media_metrics::\
+    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+using stats::media_metrics::\
+    MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+
+inline const char* getCodecType(MediaResourceSubType codecType) {
+    switch (codecType) {
+        case MediaResourceSubType::kAudioCodec:         return "Audio";
+        case MediaResourceSubType::kVideoCodec:         return "Video";
+        case MediaResourceSubType::kImageCodec:         return "Image";
+        case MediaResourceSubType::kUnspecifiedSubType:
+        default:
+                                                        return "Unspecified";
+    }
+    return "Unspecified";
+}
+
+static CodecBucket getCodecBucket(bool isHardware,
+                                  bool isEncoder,
+                                  MediaResourceSubType codecType) {
+    if (isHardware) {
+        switch (codecType) {
+            case MediaResourceSubType::kAudioCodec:
+                if (isEncoder) return HwAudioEncoder;
+                return HwAudioDecoder;
+            case MediaResourceSubType::kVideoCodec:
+                if (isEncoder) return HwVideoEncoder;
+                return HwVideoDecoder;
+            case MediaResourceSubType::kImageCodec:
+                if (isEncoder) return HwImageEncoder;
+                return HwImageDecoder;
+            case MediaResourceSubType::kUnspecifiedSubType:
+            default:
+                return CodecBucketUnspecified;
+        }
+    } else {
+        switch (codecType) {
+            case MediaResourceSubType::kAudioCodec:
+                if (isEncoder) return SwAudioEncoder;
+                return SwAudioDecoder;
+            case MediaResourceSubType::kVideoCodec:
+                if (isEncoder) return SwVideoEncoder;
+                return SwVideoDecoder;
+            case MediaResourceSubType::kImageCodec:
+                if (isEncoder) return SwImageEncoder;
+                return SwImageDecoder;
+            case MediaResourceSubType::kUnspecifiedSubType:
+            default:
+                return CodecBucketUnspecified;
+        }
+    }
+
+    return CodecBucketUnspecified;
+}
+
+static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
+    bool update = false;
+    logMsg.clear();
+
+    if (hwCount > 0) {
+        logMsg << " HW: " << hwCount;
+        update = true;
+    }
+    if (swCount > 0) {
+        logMsg << " SW: " << swCount;
+        update = true;
+    }
+
+    if (update) {
+        logMsg << " ] ";
+    }
+    return update;
+}
+
+ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
+    // Create a process termination watcher, with 5seconds of polling frequency.
+    mUidObserver = sp<UidObserver>::make(processInfo,
+        [this] (int32_t pid, uid_t uid) {
+            onProcessTerminated(pid, uid);
+        });
+    mUidObserver->start();
+}
+
+ResourceManagerMetrics::~ResourceManagerMetrics() {
+    mUidObserver->stop();
+}
+
+void ResourceManagerMetrics::addPid(int pid, uid_t uid) {
+    if (uid != 0) {
+        std::scoped_lock lock(mLock);
+        mUidObserver->add(pid, uid);
+    }
+}
+
+void ResourceManagerMetrics::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+    std::scoped_lock lock(mLock);
+    // Update the resource instance count.
+    std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientInfo.name);
+    if (found == mConcurrentResourceCountMap.end()) {
+        mConcurrentResourceCountMap[clientInfo.name] = 1;
+    } else {
+        found->second++;
+    }
+}
+
+void ResourceManagerMetrics::notifyClientReleased(const ClientInfoParcel& clientInfo) {
+    bool stopCalled = true;
+    ClientConfigParcel clientConfig;
+    {
+        std::scoped_lock lock(mLock);
+        ClientConfigMap::iterator found = mClientConfigMap.find(clientInfo.id);
+        if (found != mClientConfigMap.end()) {
+            // Release is called without Stop!
+            stopCalled = false;
+            clientConfig = found->second;
+            // Update the timestamp for stopping the codec.
+            clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+        }
+    }
+    if (!stopCalled) {
+        // call Stop to update the metrics.
+        notifyClientStopped(clientConfig);
+    }
+    {
+        std::scoped_lock lock(mLock);
+        // Update the resource instance count also.
+        std::map<std::string, int>::iterator found =
+            mConcurrentResourceCountMap.find(clientInfo.name);
+        if (found != mConcurrentResourceCountMap.end()) {
+            if (found->second > 0) {
+                found->second--;
+            }
+        }
+    }
+}
+
+void ResourceManagerMetrics::notifyClientConfigChanged(const ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock(mLock);
+    ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
+    if (entry != mClientConfigMap.end() &&
+        (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kImageCodec)) {
+        int pid = clientConfig.clientInfo.pid;
+        // Update the pixel count for this process
+        updatePixelCount(pid, clientConfig.width * (long)clientConfig.height,
+                         entry->second.width * (long)entry->second.height);
+        // Update the resolution in the record.
+        entry->second.width = clientConfig.width;
+        entry->second.height = clientConfig.height;
+    }
+}
+
+void ResourceManagerMetrics::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock(mLock);
+    int pid = clientConfig.clientInfo.pid;
+    // We need to observer this process.
+    mUidObserver->add(pid, clientConfig.clientInfo.uid);
+
+    // Update the client config for thic client.
+    mClientConfigMap[clientConfig.clientInfo.id] = clientConfig;
+
+    // Update the concurrent codec count for this process.
+    CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+                                             clientConfig.isEncoder,
+                                             clientConfig.codecType);
+    increaseConcurrentCodecs(pid, codecBucket);
+
+    if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+        // Update the pixel count for this process
+        increasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+    }
+
+    // System concurrent codec usage
+    int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket];
+    // Process/Application concurrent codec usage for this type of codec
+    const ConcurrentCodecs& concurrentCodecs = mProcessConcurrentCodecsMap[pid];
+    int appConcurrentCodecs = concurrentCodecs.mCurrent[codecBucket];
+    int hwVideoCodecs = concurrentCodecs.mHWVideoCodecs;
+    int swVideoCodecs = concurrentCodecs.mSWVideoCodecs;
+    int videoCodecs = concurrentCodecs.mVideoCodecs;
+    int audioCodecs = concurrentCodecs.mAudioCodecs;
+    int imageCodecs = concurrentCodecs.mImageCodecs;
+    // Process/Application's current pixel count.
+    long pixelCount = 0;
+    std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+    if (it != mProcessPixelsMap.end()) {
+        pixelCount = it->second.mCurrent;
+    }
+
+    int result = stats_write(
+         MEDIA_CODEC_STARTED,
+         clientConfig.clientInfo.uid,
+         clientConfig.id,
+         clientConfig.clientInfo.name.c_str(),
+         static_cast<int32_t>(clientConfig.codecType),
+         clientConfig.isEncoder,
+         clientConfig.isHardware,
+         clientConfig.width, clientConfig.height,
+         systemConcurrentCodecs,
+         appConcurrentCodecs,
+         pixelCount,
+         hwVideoCodecs,
+         swVideoCodecs,
+         videoCodecs,
+         audioCodecs,
+         imageCodecs);
+
+    ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: "
+          "Process[pid(%d): uid(%d)] "
+          "Codec: [%s: %ju] is %s %s %s "
+          "Timestamp: %jd "
+          "Resolution: %d x %d "
+          "ConcurrentCodec[%d]={System: %d App: %d} "
+          "AppConcurrentCodecs{Video: %d(HW[%d] SW[%d]) Audio: %d Image: %d} "
+          "result: %d",
+          __func__,
+          pid, clientConfig.clientInfo.uid,
+          clientConfig.clientInfo.name.c_str(),
+          clientConfig.id,
+          clientConfig.isHardware? "hardware" : "software",
+          getCodecType(clientConfig.codecType),
+          clientConfig.isEncoder? "encoder" : "decoder",
+          clientConfig.timeStamp,
+          clientConfig.width, clientConfig.height,
+          codecBucket, systemConcurrentCodecs, appConcurrentCodecs,
+          videoCodecs, hwVideoCodecs, swVideoCodecs, audioCodecs, imageCodecs,
+          result);
+}
+
+void ResourceManagerMetrics::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+    std::scoped_lock lock(mLock);
+    int pid = clientConfig.clientInfo.pid;
+    // Update the concurrent codec count for this process.
+    CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+                                             clientConfig.isEncoder,
+                                             clientConfig.codecType);
+    decreaseConcurrentCodecs(pid, codecBucket);
+
+    if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+        clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+        // Update the pixel count for this process
+        decreasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+    }
+
+    // System concurrent codec usage
+    int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket];
+    // Process/Application concurrent codec usage for this type of codec
+    int appConcurrentCodecs = 0;
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found != mProcessConcurrentCodecsMap.end()) {
+        appConcurrentCodecs = found->second.mCurrent[codecBucket];
+    }
+    // Process/Application's current pixel count.
+    long pixelCount = 0;
+    std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+    if (it != mProcessPixelsMap.end()) {
+        pixelCount = it->second.mCurrent;
+    }
+
+    // calculate the usageTime as:
+    //  MediaCodecStopped.clientConfig.timeStamp -
+    //  MediaCodecStarted.clientConfig.timeStamp
+    int64_t usageTime = 0;
+    ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
+    if (entry != mClientConfigMap.end()) {
+        usageTime = clientConfig.timeStamp - entry->second.timeStamp;
+        // And we can erase this config now.
+        mClientConfigMap.erase(entry);
+    } else {
+        ALOGW("%s: Start Config is missing!", __func__);
+    }
+
+     int result = stats_write(
+         MEDIA_CODEC_STOPPED,
+         clientConfig.clientInfo.uid,
+         clientConfig.id,
+         clientConfig.clientInfo.name.c_str(),
+         static_cast<int32_t>(clientConfig.codecType),
+         clientConfig.isEncoder,
+         clientConfig.isHardware,
+         clientConfig.width, clientConfig.height,
+         systemConcurrentCodecs,
+         appConcurrentCodecs,
+         pixelCount,
+         usageTime);
+    ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: "
+          "Process[pid(%d): uid(%d)] "
+          "Codec: [%s: %ju] is %s %s %s "
+          "Timestamp: %jd Usage time: %jd "
+          "Resolution: %d x %d "
+          "ConcurrentCodec[%d]={System: %d App: %d} "
+          "result: %d",
+          __func__,
+          pid, clientConfig.clientInfo.uid,
+          clientConfig.clientInfo.name.c_str(),
+          clientConfig.id,
+          clientConfig.isHardware? "hardware" : "software",
+          getCodecType(clientConfig.codecType),
+          clientConfig.isEncoder? "encoder" : "decoder",
+          clientConfig.timeStamp, usageTime,
+          clientConfig.width, clientConfig.height,
+          codecBucket, systemConcurrentCodecs, appConcurrentCodecs,
+          result);
+}
+
+void ResourceManagerMetrics::onProcessTerminated(int32_t pid, uid_t uid) {
+    std::scoped_lock lock(mLock);
+    // post MediaCodecConcurrentUsageReported for this terminated pid.
+    pushConcurrentUsageReport(pid, uid);
+}
+
+void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
+    // Process/Application peak concurrent codec usage
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found == mProcessConcurrentCodecsMap.end()) {
+        ALOGI("%s: No MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom Entry for: "
+              "Application[pid(%d): uid(%d)]", __func__, pid, uid);
+        return;
+    }
+    const ConcurrentCodecsMap& codecsMap = found->second.mPeak;
+    int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+    int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+    int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+    int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+    int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+    int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+    int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+    int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+    int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+    int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+    int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+    int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+
+    long peakPixels = 0;
+    std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+    if (it == mProcessPixelsMap.end()) {
+        ALOGI("%s: No Video Codec Entry for Application[pid(%d): uid(%d)]",
+              __func__, pid, uid);
+    } else {
+        peakPixels = it->second.mPeak;
+    }
+    std::string peakPixelsLog("Peak Pixels: " + std::to_string(peakPixels));
+
+    std::stringstream peakCodecLog;
+    peakCodecLog << "Peak { ";
+    std::stringstream logMsg;
+    if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
+        peakCodecLog << "AudioEnc[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
+        peakCodecLog << "AudioDec[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
+        peakCodecLog << "VideoEnc[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
+        peakCodecLog << "VideoDec[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
+        peakCodecLog << "ImageEnc[" << logMsg.str();
+    }
+    if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
+        peakCodecLog << "ImageDec[" << logMsg.str();
+    }
+    peakCodecLog << "}";
+
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+    int result = stats_write(
+        MEDIA_CODEC_CONCURRENT_USAGE_REPORTED,
+        uid,
+        peakHwVideoDecoderCount,
+        peakHwVideoEncoderCount,
+        peakSwVideoDecoderCount,
+        peakSwVideoEncoderCount,
+        peakHwAudioDecoderCount,
+        peakHwAudioEncoderCount,
+        peakSwAudioDecoderCount,
+        peakSwAudioEncoderCount,
+        peakHwImageDecoderCount,
+        peakHwImageEncoderCount,
+        peakSwImageDecoderCount,
+        peakSwImageEncoderCount,
+        peakPixels);
+    ALOGI("%s: Pushed MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom: "
+          "Process[pid(%d): uid(%d)] %s %s result: %d",
+          __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str(), result);
+#else
+    ALOGI("%s: Concurrent Codec Usage Report for the Process[pid(%d): uid(%d)] is %s %s",
+          __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str());
+#endif
+}
+
+void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                        const std::vector<int>& priorities,
+                        const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                        const PidUidVector& idList, bool reclaimed) {
+    // Construct the metrics for codec reclaim as a pushed atom.
+    // 1. Information about the requester.
+    //  - UID and the priority (oom score)
+    int32_t callingPid = clientInfo.pid;
+    int32_t requesterUid = clientInfo.uid;
+    std::string clientName = clientInfo.name;
+    int requesterPriority = priorities[0];
+
+    //  2. Information about the codec.
+    //  - Name of the codec requested
+    //  - Number of concurrent codecs running.
+    int32_t noOfConcurrentCodecs = 0;
+    std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientName);
+    if (found != mConcurrentResourceCountMap.end()) {
+        noOfConcurrentCodecs = found->second;
+    }
+
+    // 3. Information about the Reclaim:
+    // - Status of reclaim request
+    // - How many codecs are reclaimed
+    // - For each codecs reclaimed, information of the process that it belonged to:
+    //    - UID and the Priority (oom score)
+    int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+    if (!reclaimed) {
+      if (clients.size() == 0) {
+        // No clients to reclaim from
+        reclaimStatus =
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+      } else {
+        // Couldn't reclaim resources from the clients
+        reclaimStatus =
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+      }
+    }
+    int32_t noOfCodecsReclaimed = clients.size();
+    int32_t targetIndex = 1;
+    for (PidUidVector::const_reference id : idList) {
+        int32_t targetUid = id.second;
+        int targetPriority = priorities[targetIndex];
+        // Post the pushed atom
+        int result = stats_write(
+            MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+            requesterUid,
+            requesterPriority,
+            clientName.c_str(),
+            noOfConcurrentCodecs,
+            reclaimStatus,
+            noOfCodecsReclaimed,
+            targetIndex,
+            targetUid,
+            targetPriority);
+        ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+              "Requester[pid(%d): uid(%d): priority(%d)] "
+              "Codec: [%s] "
+              "No of concurrent codecs: %d "
+              "Reclaim Status: %d "
+              "No of codecs reclaimed: %d "
+              "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+              __func__, callingPid, requesterUid, requesterPriority,
+              clientName.c_str(), noOfConcurrentCodecs,
+              reclaimStatus, noOfCodecsReclaimed,
+              targetIndex, id.first, targetUid, targetPriority, result);
+        targetIndex++;
+    }
+}
+
+void ResourceManagerMetrics::increaseConcurrentCodecs(int32_t pid,
+                                                      CodecBucket codecBucket) {
+    // Increase the codec usage across the system.
+    mConcurrentCodecsMap[codecBucket]++;
+
+    // Now update the codec usage for this (pid) process.
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found == mProcessConcurrentCodecsMap.end()) {
+        ConcurrentCodecs codecs;
+        codecs.mCurrent[codecBucket] = 1;
+        codecs.mPeak[codecBucket] = 1;
+        auto added = mProcessConcurrentCodecsMap.emplace(pid, codecs);
+        found = added.first;
+    } else {
+        found->second.mCurrent[codecBucket]++;
+        // Check if it's the peak count for this slot.
+        if (found->second.mPeak[codecBucket] < found->second.mCurrent[codecBucket]) {
+            found->second.mPeak[codecBucket] = found->second.mCurrent[codecBucket];
+        }
+    }
+
+    switch (codecBucket) {
+        case HwVideoEncoder:
+        case HwVideoDecoder:
+        case SwVideoEncoder:
+        case SwVideoDecoder:
+            if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) {
+                found->second.mHWVideoCodecs++;
+            } else {
+                found->second.mSWVideoCodecs++;
+            }
+            found->second.mVideoCodecs++;
+            break;
+        case HwAudioEncoder:
+        case HwAudioDecoder:
+        case SwAudioEncoder:
+        case SwAudioDecoder:
+            found->second.mAudioCodecs++;
+            break;
+        case HwImageEncoder:
+        case HwImageDecoder:
+        case SwImageEncoder:
+        case SwImageDecoder:
+            found->second.mImageCodecs++;
+            break;
+        default:
+            break;
+    }
+}
+
+void ResourceManagerMetrics::decreaseConcurrentCodecs(int32_t pid,
+                                                      CodecBucket codecBucket) {
+    // Decrease the codec usage across the system.
+    if (mConcurrentCodecsMap[codecBucket] > 0) {
+        mConcurrentCodecsMap[codecBucket]--;
+    }
+
+    // Now update the codec usage for this (pid) process.
+    std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+    if (found != mProcessConcurrentCodecsMap.end()) {
+        if (found->second.mCurrent[codecBucket] > 0) {
+            found->second.mCurrent[codecBucket]--;
+        }
+
+        switch (codecBucket) {
+            case HwVideoEncoder:
+            case HwVideoDecoder:
+            case SwVideoEncoder:
+            case SwVideoDecoder:
+                if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) {
+                    found->second.mHWVideoCodecs--;
+                } else {
+                    found->second.mSWVideoCodecs--;
+                }
+                found->second.mVideoCodecs--;
+                break;
+            case HwAudioEncoder:
+            case HwAudioDecoder:
+            case SwAudioEncoder:
+            case SwAudioDecoder:
+                found->second.mAudioCodecs--;
+                break;
+            case HwImageEncoder:
+            case HwImageDecoder:
+            case SwImageEncoder:
+            case SwImageDecoder:
+                found->second.mImageCodecs--;
+                break;
+            default:
+                break;
+        }
+    }
+}
+
+void ResourceManagerMetrics::increasePixelCount(int32_t pid, long pixels) {
+    // Now update the current pixel usage for this (pid) process.
+    std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+    if (found == mProcessPixelsMap.end()) {
+        PixelCount pixelCount {pixels, pixels};
+        mProcessPixelsMap.emplace(pid, pixelCount);
+    } else {
+        if (__builtin_add_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+            ALOGI("Pixel Count overflow");
+            return;
+        }
+        // Check if it's the peak count for this slot.
+        if (found->second.mPeak < found->second.mCurrent) {
+            found->second.mPeak = found->second.mCurrent;
+        }
+    }
+}
+
+void ResourceManagerMetrics::updatePixelCount(int32_t pid, long newPixels, long lastPixels) {
+    // Since there is change in resolution, decrease it by last pixels and
+    // increase it by new pixels.
+    decreasePixelCount(pid, lastPixels);
+    increasePixelCount(pid, newPixels);
+}
+
+void ResourceManagerMetrics::decreasePixelCount(int32_t pid, long pixels) {
+    // Now update the current pixel usage for this (pid) process.
+    std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+    if (found != mProcessPixelsMap.end()) {
+        if (found->second.mCurrent < pixels) {
+            found->second.mCurrent = 0;
+        } else {
+            if (__builtin_sub_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+                ALOGI("Pixel Count overflow");
+                return;
+            }
+        }
+    }
+}
+
+long ResourceManagerMetrics::getPeakConcurrentPixelCount(int pid) const {
+    std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+    if (found != mProcessPixelsMap.end()) {
+        return found->second.mPeak;
+    }
+
+    return 0;
+}
+
+long ResourceManagerMetrics::getCurrentConcurrentPixelCount(int pid) const {
+    std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+    if (found != mProcessPixelsMap.end()) {
+        return found->second.mCurrent;
+    }
+
+    return 0;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
new file mode 100644
index 0000000..3124aa2
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -0,0 +1,193 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+#define ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+
+#include "ResourceManagerService.h"
+
+namespace android {
+
+using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
+using ::aidl::android::media::IResourceManagerClient;
+
+struct ProcessInfoInterface;
+
+class UidObserver;
+
+//
+// Enumeration for Codec bucket based on:
+//   - Encoder or Decoder
+//   - hardware implementation or not
+//   - Audio/Video/Image codec
+//
+enum CodecBucket {
+    CodecBucketUnspecified = 0,
+    HwAudioEncoder = 1,
+    HwAudioDecoder = 2,
+    HwVideoEncoder = 3,
+    HwVideoDecoder = 4,
+    HwImageEncoder = 5,
+    HwImageDecoder = 6,
+    SwAudioEncoder = 7,
+    SwAudioDecoder = 8,
+    SwVideoEncoder = 9,
+    SwVideoDecoder = 10,
+    SwImageEncoder = 11,
+    SwImageDecoder = 12,
+    CodecBucketMaxSize = 13,
+};
+
+// Map of client id and client configuration, when it was started last.
+typedef std::map<int64_t, ClientConfigParcel> ClientConfigMap;
+
+// Map of pid and the uid.
+typedef std::map<int32_t, uid_t> PidUidMap;
+
+// Map of concurrent codes by Codec type bucket.
+struct ConcurrentCodecsMap {
+    int& operator[](CodecBucket index) {
+        return mCodec[index];
+    }
+
+    const int& operator[](CodecBucket index) const {
+        return mCodec[index];
+    }
+
+private:
+    int mCodec[CodecBucketMaxSize] = {0};
+};
+
+// Current and Peak ConcurrentCodecMap for a process.
+struct ConcurrentCodecs {
+    ConcurrentCodecsMap mCurrent;
+    ConcurrentCodecsMap mPeak;
+    // concurrent HW Video codecs.
+    int mHWVideoCodecs;
+    // concurrent SW Video codecs.
+    int mSWVideoCodecs;
+    // concurrent Video codecs.
+    int mVideoCodecs;
+    // concurrent Audio codecs.
+    int mAudioCodecs;
+    // concurrent Image codecs.
+    int mImageCodecs;
+};
+
+// Current and Peak pixel count for a process.
+struct PixelCount {
+    long mCurrent = 0;
+    long mPeak = 0;
+};
+
+//
+// ResourceManagerMetrics class that maintaines concurrent codec count based:
+//
+//  1. # of concurrent active codecs (initialized, but aren't released yet) of given
+//     implementation (by codec name) across the system.
+//
+//  2. # of concurrent codec usage (started, but not stopped yet), which is
+//  measured using codec type bucket (CodecBucket) for:
+//   - each process/application.
+//   - across the system.
+//  Also the peak count of the same for each process/application is maintained.
+//
+//  3. # of Peak Concurrent Pixels for each process/application.
+//  This should help with understanding the (video) memory usage per
+//  application.
+//
+//
+class ResourceManagerMetrics {
+public:
+    ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
+    ~ResourceManagerMetrics();
+
+    // To be called when a client is created.
+    void notifyClientCreated(const ClientInfoParcel& clientInfo);
+
+    // To be called when a client is released.
+    void notifyClientReleased(const ClientInfoParcel& clientInfo);
+
+    // To be called when a client is started.
+    void notifyClientStarted(const ClientConfigParcel& clientConfig);
+
+    // To be called when a client is stopped.
+    void notifyClientStopped(const ClientConfigParcel& clientConfig);
+
+    // To be called when a client's configuration has changed.
+    void notifyClientConfigChanged(const ClientConfigParcel& clientConfig);
+
+    // To be called when after a reclaim event.
+    void pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                         const std::vector<int>& priorities,
+                         const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                         const PidUidVector& idList, bool reclaimed);
+
+    // Add this pid/uid set to monitor for the process termination state.
+    void addPid(int pid, uid_t uid = 0);
+
+    // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+    long getPeakConcurrentPixelCount(int pid) const;
+    // Get the current concurrent pixel count (associated with the video codecs) for the process.
+    long getCurrentConcurrentPixelCount(int pid) const;
+
+private:
+    ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
+    ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
+    ResourceManagerMetrics& operator=(const ResourceManagerMetrics&) = delete;
+    ResourceManagerMetrics& operator=(ResourceManagerMetrics&&) = delete;
+
+    // To increase/decrease the concurrent codec usage for a given CodecBucket.
+    void increaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+    void decreaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+
+    // To increase/update/decrease the concurrent pixels usage for a process.
+    void increasePixelCount(int32_t pid, long pixels);
+    void updatePixelCount(int32_t pid, long newPixels, long lastPixels);
+    void decreasePixelCount(int32_t pid, long pixels);
+
+    // Issued when the process/application with given pid/uid is terminated.
+    void onProcessTerminated(int32_t pid, uid_t uid);
+
+    // To push conccuret codec usage of a process/application.
+    void pushConcurrentUsageReport(int32_t pid, uid_t uid);
+
+private:
+    std::mutex mLock;
+
+    // Map of client id and the configuration.
+    ClientConfigMap mClientConfigMap;
+
+    // Concurrent and Peak Pixel count for each process/application.
+    std::map<int32_t, PixelCount> mProcessPixelsMap;
+
+    // Map of resources (name) and number of concurrent instances
+    std::map<std::string, int> mConcurrentResourceCountMap;
+
+    // Map of concurrent codes by CodecBucket across the system.
+    ConcurrentCodecsMap mConcurrentCodecsMap;
+    // Map of concurrent and peak codes by CodecBucket for each process/application.
+    std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
+
+    // Uid Observer to monitor the application termination.
+    sp<UidObserver> mUidObserver;
+};
+
+} // namespace android
+
+#endif  // ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index b4610bc..53cc431 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -26,8 +26,9 @@
 #include <cutils/sched_policy.h>
 #include <dirent.h>
 #include <media/MediaResourcePolicy.h>
-#include <media/stagefright/ProcessInfo.h>
+#include <media/stagefright/foundation/ABase.h>
 #include <mediautils/BatteryNotifier.h>
+#include <mediautils/ProcessInfo.h>
 #include <mediautils/SchedulingPolicyService.h>
 #include <string.h>
 #include <sys/types.h>
@@ -36,6 +37,7 @@
 #include <unistd.h>
 
 #include "IMediaResourceMonitor.h"
+#include "ResourceManagerMetrics.h"
 #include "ResourceManagerService.h"
 #include "ResourceObserverService.h"
 #include "ServiceLog.h"
@@ -51,8 +53,8 @@
 
 class DeathNotifier : public RefBase {
 public:
-    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service, int pid,
-            int64_t clientId);
+    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+                  const ClientInfoParcel& clientInfo);
 
     virtual ~DeathNotifier() {}
 
@@ -62,13 +64,12 @@
 
 protected:
     std::weak_ptr<ResourceManagerService> mService;
-    int mPid;
-    int64_t mClientId;
+    const ClientInfoParcel mClientInfo;
 };
 
 DeathNotifier::DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-        int pid, int64_t clientId)
-    : mService(service), mPid(pid), mClientId(clientId) {}
+                             const ClientInfoParcel& clientInfo)
+    : mService(service), mClientInfo(clientInfo) {}
 
 //static
 void DeathNotifier::BinderDiedCallback(void* cookie) {
@@ -95,15 +96,16 @@
         return;
     }
 
-    service->overridePid(mPid, -1);
+    service->overridePid(mClientInfo.pid, -1);
     // thiz is freed in the call below, so it must be last call referring thiz
-    service->removeResource(mPid, mClientId, false /*checkValid*/);
+    service->removeResource(mClientInfo, false /*checkValid*/);
 }
 
 class OverrideProcessInfoDeathNotifier : public DeathNotifier {
 public:
     OverrideProcessInfoDeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-            int pid) : DeathNotifier(service, pid, 0) {}
+                                     const ClientInfoParcel& clientInfo)
+            : DeathNotifier(service, clientInfo) {}
 
     virtual ~OverrideProcessInfoDeathNotifier() {}
 
@@ -118,7 +120,7 @@
         return;
     }
 
-    service->removeProcessInfoOverride(mPid);
+    service->removeProcessInfoOverride(mClientInfo.pid);
 }
 
 template <typename T>
@@ -183,6 +185,7 @@
 }
 
 static ResourceInfo& getResourceInfoForEdit(uid_t uid, int64_t clientId,
+                                            const std::string& name,
         const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos) {
     ssize_t index = infos.indexOfKey(clientId);
 
@@ -190,6 +193,11 @@
         ResourceInfo info;
         info.uid = uid;
         info.clientId = clientId;
+        if (name.empty()) {
+            info.name = "<unknown client>";
+        } else {
+            info.name = name;
+        }
         info.client = client;
         info.cookie = 0;
         info.pendingRemoval = false;
@@ -262,7 +270,15 @@
 
     result.append("  Processes:\n");
     for (size_t i = 0; i < mapCopy.size(); ++i) {
-        snprintf(buffer, SIZE, "    Pid: %d\n", mapCopy.keyAt(i));
+        int pid = mapCopy.keyAt(i);
+        snprintf(buffer, SIZE, "    Pid: %d\n", pid);
+        result.append(buffer);
+        int priority = 0;
+        if (getPriority_l(pid, &priority)) {
+            snprintf(buffer, SIZE, "    Priority: %d\n", priority);
+        } else {
+            snprintf(buffer, SIZE, "    Priority: <unknown>\n");
+        }
         result.append(buffer);
 
         const ResourceInfos &infos = mapCopy.valueAt(i);
@@ -271,11 +287,7 @@
             snprintf(buffer, SIZE, "        Id: %lld\n", (long long)infos[j].clientId);
             result.append(buffer);
 
-            std::string clientName;
-            Status status = infos[j].client->getName(&clientName);
-            if (!status.isOk()) {
-                clientName = "<unknown client>";
-            }
+            std::string clientName = infos[j].name;
             snprintf(buffer, SIZE, "        Name: %s\n", clientName.c_str());
             result.append(buffer);
 
@@ -337,6 +349,8 @@
       mCpuBoostCount(0),
       mDeathRecipient(AIBinder_DeathRecipient_new(DeathNotifier::BinderDiedCallback)) {
     mSystemCB->noteResetVideo();
+    // Create ResourceManagerMetrics that handles all the metrics.
+    mResourceManagerMetrics = std::make_unique<ResourceManagerMetrics>(mProcessInfo);
 }
 
 //static
@@ -344,7 +358,9 @@
     std::shared_ptr<ResourceManagerService> service =
             ::ndk::SharedRefBase::make<ResourceManagerService>();
     binder_status_t status =
-            AServiceManager_addService(service->asBinder().get(), getServiceName());
+                        AServiceManager_addServiceWithFlags(
+                        service->asBinder().get(), getServiceName(),
+                        AServiceManager_AddServiceFlag::ADD_SERVICE_ALLOW_ISOLATED);
     if (status != STATUS_OK) {
         return;
     }
@@ -434,11 +450,15 @@
     }
 }
 
-Status ResourceManagerService::addResource(int32_t pid, int32_t uid, int64_t clientId,
+Status ResourceManagerService::addResource(const ClientInfoParcel& clientInfo,
         const std::shared_ptr<IResourceManagerClient>& client,
         const std::vector<MediaResourceParcel>& resources) {
-    String8 log = String8::format("addResource(pid %d, clientId %lld, resources %s)",
-            pid, (long long) clientId, getString(resources).string());
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    const std::string& name = clientInfo.name;
+    String8 log = String8::format("addResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).string());
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
@@ -451,7 +471,7 @@
         uid = callingUid;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
-    ResourceInfo& info = getResourceInfoForEdit(uid, clientId, client, infos);
+    ResourceInfo& info = getResourceInfoForEdit(uid, clientId, name, client, infos);
     ResourceList resourceAdded;
 
     for (size_t i = 0; i < resources.size(); ++i) {
@@ -484,19 +504,23 @@
     }
     if (info.cookie == 0 && client != nullptr) {
         info.cookie = addCookieAndLink_l(client,
-                new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
+                new DeathNotifier(ref<ResourceManagerService>(), clientInfo));
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
         mObserverService->onResourceAdded(uid, pid, resourceAdded);
     }
     notifyResourceGranted(pid, resources);
+
     return Status::ok();
 }
 
-Status ResourceManagerService::removeResource(int32_t pid, int64_t clientId,
+Status ResourceManagerService::removeResource(const ClientInfoParcel& clientInfo,
         const std::vector<MediaResourceParcel>& resources) {
-    String8 log = String8::format("removeResource(pid %d, clientId %lld, resources %s)",
-            pid, (long long) clientId, getString(resources).string());
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld, resources %s)",
+            pid, uid, (long long) clientId, getString(resources).string());
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
@@ -556,15 +580,17 @@
     return Status::ok();
 }
 
-Status ResourceManagerService::removeClient(int32_t pid, int64_t clientId) {
-    removeResource(pid, clientId, true /*checkValid*/);
+Status ResourceManagerService::removeClient(const ClientInfoParcel& clientInfo) {
+    removeResource(clientInfo, true /*checkValid*/);
     return Status::ok();
 }
 
-Status ResourceManagerService::removeResource(int pid, int64_t clientId, bool checkValid) {
-    String8 log = String8::format(
-            "removeResource(pid %d, clientId %lld)",
-            pid, (long long) clientId);
+Status ResourceManagerService::removeResource(const ClientInfoParcel& clientInfo, bool checkValid) {
+    int32_t pid = clientInfo.pid;
+    int32_t uid = clientInfo.uid;
+    int64_t clientId = clientInfo.id;
+    String8 log = String8::format("removeResource(pid %d, uid %d clientId %lld)",
+            pid, uid, (long long) clientId);
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
@@ -592,6 +618,9 @@
         onLastRemoved(it->second, info);
     }
 
+    // Since this client has been removed, update the metrics collector.
+    mResourceManagerMetrics->notifyClientReleased(clientInfo);
+
     removeCookieAndUnlink_l(info.client, info.cookie);
 
     if (mObserverService != nullptr && !info.resources.empty()) {
@@ -602,25 +631,30 @@
     return Status::ok();
 }
 
-void ResourceManagerService::getClientForResource_l(int callingPid, const MediaResourceParcel *res,
+void ResourceManagerService::getClientForResource_l(int callingPid,
+        const MediaResourceParcel *res,
+        PidUidVector* idVector,
         Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     if (res == NULL) {
         return;
     }
     std::shared_ptr<IResourceManagerClient> client;
-    if (getLowestPriorityBiggestClient_l(callingPid, res->type, res->subType, &client)) {
+    if (getLowestPriorityBiggestClient_l(callingPid, res->type, res->subType, idVector, &client)) {
         clients->push_back(client);
     }
 }
 
-Status ResourceManagerService::reclaimResource(int32_t callingPid,
+Status ResourceManagerService::reclaimResource(const ClientInfoParcel& clientInfo,
         const std::vector<MediaResourceParcel>& resources, bool* _aidl_return) {
-    String8 log = String8::format("reclaimResource(callingPid %d, resources %s)",
-            callingPid, getString(resources).string());
+    int32_t callingPid = clientInfo.pid;
+    std::string clientName = clientInfo.name;
+    String8 log = String8::format("reclaimResource(callingPid %d, uid %d resources %s)",
+            callingPid, clientInfo.uid, getString(resources).string());
     mServiceLog->add(log);
     *_aidl_return = false;
 
     Vector<std::shared_ptr<IResourceManagerClient>> clients;
+    PidUidVector idVector;
     {
         Mutex::Autolock lock(mLock);
         if (!mProcessInfo->isPidTrusted(callingPid)) {
@@ -656,13 +690,13 @@
         if (secureCodec != NULL) {
             if (!mSupportsMultipleSecureCodecs) {
                 if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
-                            secureCodec->subType, &clients)) {
+                            secureCodec->subType, &idVector, &clients)) {
                     return Status::ok();
                 }
             }
             if (!mSupportsSecureWithNonSecureCodec) {
                 if (!getAllClients_l(callingPid, MediaResource::Type::kNonSecureCodec,
-                            secureCodec->subType, &clients)) {
+                            secureCodec->subType, &idVector, &clients)) {
                     return Status::ok();
                 }
             }
@@ -670,13 +704,13 @@
         if (nonSecureCodec != NULL) {
             if (!mSupportsSecureWithNonSecureCodec) {
                 if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
-                        nonSecureCodec->subType, &clients)) {
+                        nonSecureCodec->subType, &idVector, &clients)) {
                     return Status::ok();
                 }
             }
         }
         if (drmSession != NULL) {
-            getClientForResource_l(callingPid, drmSession, &clients);
+            getClientForResource_l(callingPid, drmSession, &idVector, &clients);
             if (clients.size() == 0) {
                 return Status::ok();
             }
@@ -684,32 +718,54 @@
 
         if (clients.size() == 0) {
             // if no secure/non-secure codec conflict, run second pass to handle other resources.
-            getClientForResource_l(callingPid, graphicMemory, &clients);
+            getClientForResource_l(callingPid, graphicMemory, &idVector, &clients);
         }
 
         if (clients.size() == 0) {
             // if we are here, run the third pass to free one codec with the same type.
-            getClientForResource_l(callingPid, secureCodec, &clients);
-            getClientForResource_l(callingPid, nonSecureCodec, &clients);
+            getClientForResource_l(callingPid, secureCodec, &idVector, &clients);
+            getClientForResource_l(callingPid, nonSecureCodec, &idVector, &clients);
         }
 
         if (clients.size() == 0) {
             // if we are here, run the fourth pass to free one codec with the different type.
             if (secureCodec != NULL) {
                 MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
-                getClientForResource_l(callingPid, &temp, &clients);
+                getClientForResource_l(callingPid, &temp, &idVector, &clients);
             }
             if (nonSecureCodec != NULL) {
                 MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
-                getClientForResource_l(callingPid, &temp, &clients);
+                getClientForResource_l(callingPid, &temp, &idVector, &clients);
             }
         }
     }
 
     *_aidl_return = reclaimUnconditionallyFrom(clients);
+
+    // Log Reclaim Pushed Atom to statsd
+    pushReclaimAtom(clientInfo, clients, idVector, *_aidl_return);
+
     return Status::ok();
 }
 
+void ResourceManagerService::pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                        const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                        const PidUidVector& idVector, bool reclaimed) {
+    int32_t callingPid = clientInfo.pid;
+    int requesterPriority = -1;
+    getPriority_l(callingPid, &requesterPriority);
+    std::vector<int> priorities;
+    priorities.push_back(requesterPriority);
+
+    for (PidUidVector::const_reference id : idVector) {
+        int targetPriority = -1;
+        getPriority_l(id.first, &targetPriority);
+        priorities.push_back(targetPriority);
+    }
+    mResourceManagerMetrics->pushReclaimAtom(clientInfo, priorities, clients,
+                                             idVector, reclaimed);
+}
+
 bool ResourceManagerService::reclaimUnconditionallyFrom(
         const Vector<std::shared_ptr<IResourceManagerClient>> &clients) {
     if (clients.size() == 0) {
@@ -783,6 +839,7 @@
         mOverridePidMap.erase(originalPid);
         if (newPid != -1) {
             mOverridePidMap.emplace(originalPid, newPid);
+            mResourceManagerMetrics->addPid(newPid);
         }
     }
 
@@ -816,8 +873,12 @@
         return Status::fromServiceSpecificError(BAD_VALUE);
     }
 
+    ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+                                .uid = 0,
+                                .id = 0,
+                                .name = "<unknown client>"};
     uintptr_t cookie = addCookieAndLink_l(client,
-            new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
+            new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), clientInfo));
 
     mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
 
@@ -869,7 +930,9 @@
     mProcessInfoOverrideMap.erase(pid);
 }
 
-Status ResourceManagerService::markClientForPendingRemoval(int32_t pid, int64_t clientId) {
+Status ResourceManagerService::markClientForPendingRemoval(const ClientInfoParcel& clientInfo) {
+    int32_t pid = clientInfo.pid;
+    int64_t clientId = clientInfo.id;
     String8 log = String8::format(
             "markClientForPendingRemoval(pid %d, clientId %lld)",
             pid, (long long) clientId);
@@ -927,7 +990,8 @@
                                                            MediaResource::SubType::kVideoCodec,
                                                            MediaResource::SubType::kImageCodec}) {
                         std::shared_ptr<IResourceManagerClient> client;
-                        if (getBiggestClientPendingRemoval_l(pid, type, subType, &client)) {
+                        uid_t uid = 0;
+                        if (getBiggestClientPendingRemoval_l(pid, type, subType, uid, &client)) {
                             clients.add(client);
                             continue;
                         }
@@ -936,8 +1000,9 @@
                 // Non-codec resources are shared by audio, video and image codecs (no subtype).
                 default:
                     std::shared_ptr<IResourceManagerClient> client;
+                    uid_t uid = 0;
                     if (getBiggestClientPendingRemoval_l(pid, type,
-                            MediaResource::SubType::kUnspecifiedSubType, &client)) {
+                            MediaResource::SubType::kUnspecifiedSubType, uid, &client)) {
                         clients.add(client);
                     }
                     break;
@@ -964,8 +1029,12 @@
 }
 
 bool ResourceManagerService::getAllClients_l(int callingPid, MediaResource::Type type,
-        MediaResource::SubType subType, Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
+        MediaResource::SubType subType,
+        PidUidVector* idVector,
+        Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     Vector<std::shared_ptr<IResourceManagerClient>> temp;
+    PidUidVector tempIdList;
+
     for (size_t i = 0; i < mMap.size(); ++i) {
         ResourceInfos &infos = mMap.editValueAt(i);
         for (size_t j = 0; j < infos.size(); ++j) {
@@ -978,6 +1047,7 @@
                     return false;
                 }
                 temp.push_back(infos[j].client);
+                tempIdList.emplace_back(mMap.keyAt(i), infos[j].uid);
             }
         }
     }
@@ -986,19 +1056,24 @@
         return true;
     }
     clients->appendVector(temp);
+    idVector->insert(std::end(*idVector), std::begin(tempIdList), std::end(tempIdList));
     return true;
 }
 
 bool ResourceManagerService::getLowestPriorityBiggestClient_l(int callingPid,
-        MediaResource::Type type, MediaResource::SubType subType,
+        MediaResource::Type type,
+        MediaResource::SubType subType,
+        PidUidVector* idVector,
         std::shared_ptr<IResourceManagerClient> *client) {
     int lowestPriorityPid;
     int lowestPriority;
     int callingPriority;
+    uid_t uid = 0;
 
     // Before looking into other processes, check if we have clients marked for
     // pending removal in the same process.
-    if (getBiggestClientPendingRemoval_l(callingPid, type, subType, client)) {
+    if (getBiggestClientPendingRemoval_l(callingPid, type, subType, uid, client)) {
+        idVector->emplace_back(callingPid, uid);
         return true;
     }
     if (!getPriority_l(callingPid, &callingPriority)) {
@@ -1015,9 +1090,11 @@
         return false;
     }
 
-    if (!getBiggestClient_l(lowestPriorityPid, type, subType, client)) {
+    if (!getBiggestClient_l(lowestPriorityPid, type, subType, uid, client)) {
         return false;
     }
+
+    idVector->emplace_back(lowestPriorityPid, uid);
     return true;
 }
 
@@ -1069,12 +1146,14 @@
 }
 
 bool ResourceManagerService::getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
-        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client) {
-    return getBiggestClient_l(pid, type, subType, client, true /* pendingRemovalOnly */);
+        MediaResource::SubType subType, uid_t& uid,
+        std::shared_ptr<IResourceManagerClient> *client) {
+    return getBiggestClient_l(pid, type, subType, uid, client, true /* pendingRemovalOnly */);
 }
 
 bool ResourceManagerService::getBiggestClient_l(int pid, MediaResource::Type type,
-        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client,
+        MediaResource::SubType subType, uid_t& uid,
+        std::shared_ptr<IResourceManagerClient> *client,
         bool pendingRemovalOnly) {
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
@@ -1097,6 +1176,7 @@
                 if (resource.value > largestValue) {
                     largestValue = resource.value;
                     clientTemp = infos[i].client;
+                    uid = infos[i].uid;
                 }
             }
         }
@@ -1113,4 +1193,32 @@
     return true;
 }
 
+Status ResourceManagerService::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+    mResourceManagerMetrics->notifyClientCreated(clientInfo);
+    return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+    mResourceManagerMetrics->notifyClientStarted(clientConfig);
+    return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+    mResourceManagerMetrics->notifyClientStopped(clientConfig);
+    return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientConfigChanged(const ClientConfigParcel& clientConfig) {
+    mResourceManagerMetrics->notifyClientConfigChanged(clientConfig);
+    return Status::ok();
+}
+
+long ResourceManagerService::getPeakConcurrentPixelCount(int pid) const {
+    return mResourceManagerMetrics->getPeakConcurrentPixelCount(pid);
+}
+
+long ResourceManagerService::getCurrentConcurrentPixelCount(int pid) const {
+    return mResourceManagerMetrics->getCurrentConcurrentPixelCount(pid);
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index c636a0f..1519e0e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -19,7 +19,9 @@
 #define ANDROID_MEDIA_RESOURCEMANAGERSERVICE_H
 
 #include <map>
+#include <set>
 #include <mutex>
+#include <string>
 
 #include <aidl/android/media/BnResourceManagerService.h>
 #include <arpa/inet.h>
@@ -37,26 +39,33 @@
 class ResourceObserverService;
 class ServiceLog;
 struct ProcessInfoInterface;
+class ResourceManagerMetrics;
 
 using Status = ::ndk::ScopedAStatus;
 using ::aidl::android::media::IResourceManagerClient;
 using ::aidl::android::media::BnResourceManagerService;
 using ::aidl::android::media::MediaResourceParcel;
 using ::aidl::android::media::MediaResourcePolicyParcel;
+using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
 
 typedef std::map<std::tuple<
         MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
         MediaResourceParcel> ResourceList;
 
 struct ResourceInfo {
-    int64_t clientId;
     uid_t uid;
+    int64_t clientId;
+    std::string name;
     std::shared_ptr<IResourceManagerClient> client;
     uintptr_t cookie{0};
     ResourceList resources;
     bool pendingRemoval{false};
 };
 
+// vector of <PID, UID>
+typedef std::vector<std::pair<int32_t, uid_t>> PidUidVector;
+
 // TODO: convert these to std::map
 typedef KeyedVector<int64_t, ResourceInfo> ResourceInfos;
 typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap;
@@ -85,31 +94,40 @@
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
 
-    Status addResource(int32_t pid, int32_t uid, int64_t clientId,
-            const std::shared_ptr<IResourceManagerClient>& client,
-            const std::vector<MediaResourceParcel>& resources) override;
+    Status addResource(const ClientInfoParcel& clientInfo,
+                       const std::shared_ptr<IResourceManagerClient>& client,
+                       const std::vector<MediaResourceParcel>& resources) override;
 
-    Status removeResource(int32_t pid, int64_t clientId,
-            const std::vector<MediaResourceParcel>& resources) override;
+    Status removeResource(const ClientInfoParcel& clientInfo,
+                          const std::vector<MediaResourceParcel>& resources) override;
 
-    Status removeClient(int32_t pid, int64_t clientId) override;
+    Status removeClient(const ClientInfoParcel& clientInfo) override;
 
     // Tries to reclaim resource from processes with lower priority than the calling process
     // according to the requested resources.
     // Returns true if any resource has been reclaimed, otherwise returns false.
-    Status reclaimResource(int32_t callingPid, const std::vector<MediaResourceParcel>& resources,
-            bool* _aidl_return) override;
+    Status reclaimResource(const ClientInfoParcel& clientInfo,
+                           const std::vector<MediaResourceParcel>& resources,
+                           bool* _aidl_return) override;
 
-    Status overridePid(int originalPid, int newPid) override;
+    Status overridePid(int32_t originalPid, int32_t newPid) override;
 
-    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client, int pid,
-            int procState, int oomScore) override;
+    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client,
+                               int32_t pid, int32_t procState, int32_t oomScore) override;
 
-    Status markClientForPendingRemoval(int32_t pid, int64_t clientId) override;
+    Status markClientForPendingRemoval(const ClientInfoParcel& clientInfo) override;
 
     Status reclaimResourcesFromClientsPendingRemoval(int32_t pid) override;
 
-    Status removeResource(int pid, int64_t clientId, bool checkValid);
+    Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
+
+    Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
+
+    Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
+
+    Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
+
+    Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
 
 private:
     friend class ResourceManagerServiceTest;
@@ -124,13 +142,15 @@
     // Returns false if any client belongs to a process with higher priority than the
     // calling process. The clients will remain unchanged if returns false.
     bool getAllClients_l(int callingPid, MediaResource::Type type, MediaResource::SubType subType,
+            PidUidVector* idList,
             Vector<std::shared_ptr<IResourceManagerClient>> *clients);
 
     // Gets the client who owns specified resource type from lowest possible priority process.
     // Returns false if the calling process priority is not higher than the lowest process
     // priority. The client will remain unchanged if returns false.
     bool getLowestPriorityBiggestClient_l(int callingPid, MediaResource::Type type,
-            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
+            MediaResource::SubType subType, PidUidVector* idList,
+            std::shared_ptr<IResourceManagerClient> *client);
 
     // Gets lowest priority process that has the specified resource type.
     // Returns false if failed. The output parameters will remain unchanged if failed.
@@ -141,17 +161,19 @@
     // Returns false with no change to client if there are no clients holdiing resources of thisi
     // type.
     bool getBiggestClient_l(int pid, MediaResource::Type type, MediaResource::SubType subType,
-            std::shared_ptr<IResourceManagerClient> *client,
+            uid_t& uid, std::shared_ptr<IResourceManagerClient> *client,
             bool pendingRemovalOnly = false);
     // Same method as above, but with pendingRemovalOnly as true.
     bool getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
-            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
+            MediaResource::SubType subType, uid_t& uid,
+            std::shared_ptr<IResourceManagerClient> *client);
 
     bool isCallingPriorityHigher_l(int callingPid, int pid);
 
     // A helper function basically calls getLowestPriorityBiggestClient_l and add
     // the result client to the given Vector.
     void getClientForResource_l(int callingPid, const MediaResourceParcel *res,
+            PidUidVector* idList,
             Vector<std::shared_ptr<IResourceManagerClient>> *clients);
 
     void onFirstAdded(const MediaResourceParcel& res, const ResourceInfo& clientInfo);
@@ -171,6 +193,15 @@
     void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
                                  uintptr_t cookie);
 
+    void pushReclaimAtom(const ClientInfoParcel& clientInfo,
+                         const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+                         const PidUidVector& idList, bool reclaimed);
+
+    // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+    long getPeakConcurrentPixelCount(int pid) const;
+    // Get the current concurrent pixel count (associated with the video codecs) for the process.
+    long getCurrentConcurrentPixelCount(int pid) const;
+
     mutable Mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
     sp<SystemCallbackInterface> mSystemCB;
@@ -191,6 +222,7 @@
     static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
             GUARDED_BY(sCookieLock);
     std::shared_ptr<ResourceObserverService> mObserverService;
+    std::unique_ptr<ResourceManagerMetrics> mResourceManagerMetrics;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
index 4e97406..ebe3903 100644
--- a/services/mediaresourcemanager/ResourceObserverService.cpp
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -100,8 +100,10 @@
 std::shared_ptr<ResourceObserverService> ResourceObserverService::instantiate() {
     std::shared_ptr<ResourceObserverService> observerService =
             ::ndk::SharedRefBase::make<ResourceObserverService>();
-    binder_status_t status = AServiceManager_addService(observerService->asBinder().get(),
-            ResourceObserverService::getServiceName());
+    binder_status_t status = AServiceManager_addServiceWithFlags(
+      observerService->asBinder().get(),ResourceObserverService::getServiceName(),
+      AServiceManager_AddServiceFlag::ADD_SERVICE_ALLOW_ISOLATED);
+
     if (status != STATUS_OK) {
         return nullptr;
     }
diff --git a/services/mediaresourcemanager/UidObserver.cpp b/services/mediaresourcemanager/UidObserver.cpp
new file mode 100644
index 0000000..4bcd325
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.cpp
@@ -0,0 +1,182 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+
+#include <android/binder_process.h>
+#include <mediautils/ProcessInfo.h>
+#include "UidObserver.h"
+
+namespace {
+const char* kActivityServiceName = "activity";
+}; // namespace anonymous
+
+namespace android {
+
+UidObserver::UidObserver(const sp<ProcessInfoInterface>& processInfo,
+                         OnProcessTerminated onProcessTerminated) :
+     mRegistered(false),
+     mOnProcessTerminated(std::move(onProcessTerminated)),
+     mProcessInfo(processInfo) {
+}
+
+UidObserver::~UidObserver() {
+    stop();
+}
+
+void UidObserver::start() {
+    // Use check service to see if the activity service is available
+    // If not available then register for notifications, instead of blocking
+    // till the service is ready
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->checkService(String16(kActivityServiceName));
+    if (!binder) {
+        sm->registerForNotifications(String16(kActivityServiceName), this);
+    } else {
+        registerWithActivityManager();
+    }
+}
+
+void UidObserver::stop() {
+    std::scoped_lock lock{mLock};
+
+    if (mRegistered) {
+        // Unregistered with ActivityManager
+        mAm.unregisterUidObserver(this);
+        mAm.unlinkToDeath(this);
+        mRegistered = false;
+    }
+}
+
+void UidObserver::add(int pid, uid_t uid) {
+    bool needToRegister = false;
+    {
+        std::scoped_lock lock(mLock);
+        std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+        if (found != mUids.end()) {
+            found->second.insert(pid);
+        } else {
+            std::set<int32_t> pids{pid};
+            mUids.emplace(uid, std::move(pids));
+        }
+        needToRegister = !mRegistered;
+    }
+    if (needToRegister) {
+        start();
+    }
+}
+
+void UidObserver::registerWithActivityManager() {
+    std::scoped_lock lock{mLock};
+
+    if (mRegistered) {
+        return;
+    }
+    status_t res = mAm.linkToDeath(this);
+    // Register for UID gone.
+    mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE,
+                            ActivityManager::PROCESS_STATE_UNKNOWN,
+                            String16("mediaserver"));
+    if (res == OK) {
+        mRegistered = true;
+        ALOGV("UidObserver: Registered with ActivityManager");
+    }
+}
+
+void UidObserver::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+    if (name != String16(kActivityServiceName)) {
+        return;
+    }
+
+    registerWithActivityManager();
+}
+
+void UidObserver::getTerminatedProcesses(const std::vector<int32_t>& pids,
+                                         std::vector<int32_t>& terminatedPids) {
+    std::vector<bool> existent;
+    terminatedPids.clear();
+    if (mProcessInfo->checkProcessExistent(pids, &existent)) {
+        for (size_t index = 0; index < existent.size(); index++) {
+            if (!existent[index]) {
+                // This process has been terminated already.
+                terminatedPids.push_back(pids[index]);
+            }
+        }
+    }
+}
+
+// This callback will be issued for every UID that is gone/terminated.
+// Since one UID could have multiple PIDs, this callback can be issued
+// multiple times with that same UID for each activity/pid.
+// So, we need to check which one among the PIDs (that share the same UID)
+// is gone.
+void UidObserver::onUidGone(uid_t uid, bool /*disabled*/) {
+    std::vector<int32_t> terminatedPids;
+    {
+        std::scoped_lock lock{mLock};
+        std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+        if (found != mUids.end()) {
+            if (found->second.size() == 1) {
+                terminatedPids.push_back(*(found->second.begin()));
+                // Only one PID. So we can remove this UID entry.
+                mUids.erase(found);
+            } else {
+                // There are multiple PIDs with the same UID.
+                // Get the list of all terminated PIDs (with the same UID)
+                std::vector<int32_t> pids;
+                std::copy(found->second.begin(), found->second.end(), std::back_inserter(pids));
+                getTerminatedProcesses(pids, terminatedPids);
+                for (int32_t pid : terminatedPids) {
+                    // Remove all the terminated PIDs
+                    found->second.erase(pid);
+                }
+                // If all PIDs under this UID have terminated, remove this UID entry.
+                if (found->second.size() == 0) {
+                    mUids.erase(uid);
+                }
+            }
+        }
+    }
+
+    for (int32_t pid : terminatedPids) {
+        mOnProcessTerminated(pid, uid);
+    }
+}
+
+void UidObserver::onUidActive(uid_t /*uid*/) {
+}
+
+void UidObserver::onUidIdle(uid_t /*uid*/, bool /*disabled*/) {
+}
+
+void UidObserver::onUidStateChanged(uid_t /*uid*/,
+                                    int32_t /*procState*/,
+                                    int64_t /*procStateSeq*/,
+                                    int32_t /*capability*/) {
+}
+
+void UidObserver::onUidProcAdjChanged(uid_t /*uid*/, int32_t /*adj*/) {
+}
+
+void UidObserver::binderDied(const wp<IBinder>& /*who*/) {
+    std::scoped_lock lock{mLock};
+    ALOGE("UidObserver: ActivityManager has died");
+    mRegistered = false;
+}
+
+}  // namespace android
diff --git a/services/mediaresourcemanager/UidObserver.h b/services/mediaresourcemanager/UidObserver.h
new file mode 100644
index 0000000..b5cc3b9
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.h
@@ -0,0 +1,116 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_UIDOBSERVER_H_
+#define ANDROID_MEDIA_UIDOBSERVER_H_
+
+#include <map>
+#include <set>
+#include <mutex>
+#include <functional>
+#include <binder/ActivityManager.h>
+#include <binder/IUidObserver.h>
+#include <binder/BinderService.h>
+
+namespace android {
+
+using OnProcessTerminated = std::function<void(int32_t pid, uid_t)>;
+
+struct ProcessInfoInterface;
+
+//
+// UidObserver class
+//
+// This class implements a callback mechanism to notify the termination of the
+// process/applications that are registered with this class.
+//
+// It uses ActivityManager get notification on when an UID is not existent
+// anymore.
+// Since one UID could have multiple PIDs, it uses ActivityManager
+// (through ProcessInfoInterface) to query for the process/application
+// state for the pids.
+//
+class UidObserver :
+        public BnUidObserver,
+        public virtual IBinder::DeathRecipient,
+        public virtual IServiceManager::LocalRegistrationCallback {
+public:
+    explicit UidObserver(const sp<ProcessInfoInterface>& processInfo,
+                         OnProcessTerminated onProcessTerminated);
+    virtual ~UidObserver();
+
+    // Start registration (with Application Manager)
+    void start();
+    // Stop registration (with Application Manager)
+    void stop();
+
+    // Add this pid/uid to set of Uid to be observed.
+    void add(int pid, uid_t uid);
+
+private:
+    UidObserver() = delete;
+    UidObserver(const UidObserver&) = delete;
+    UidObserver(UidObserver&&) = delete;
+    UidObserver& operator=(const UidObserver&) = delete;
+    UidObserver& operator=(UidObserver&&) = delete;
+
+    // IUidObserver implementation.
+    void onUidGone(uid_t uid, bool disabled) override;
+    void onUidActive(uid_t uid) override;
+    void onUidIdle(uid_t uid, bool disabled) override;
+    void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
+            int32_t capability) override;
+    void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
+
+    // IServiceManager::LocalRegistrationCallback implementation.
+    void onServiceRegistration(const String16& name,
+                    const sp<IBinder>& binder) override;
+
+    // IBinder::DeathRecipient implementation.
+    void binderDied(const wp<IBinder> &who) override;
+
+    // Registers with Application Manager for UID gone event
+    // to track the termination of Applications.
+    void registerWithActivityManager();
+
+    /*
+     * For a list of input pids, it will check whether the corresponding
+     * processes are already terminated or not.
+     *
+     * @param[in] pids List of pids to check whether they are terminated.
+     * @param[out] terminatedPids List of pid of terminated processes.
+     *
+     * Upon return, terminatedPids returns list of all the termibated pids
+     * that will be a subset of input pids (in that order).
+     * If none of the input pids have terminated, terminatedPids will be empty.
+     */
+    void getTerminatedProcesses(const std::vector<int32_t>& pids,
+                                std::vector<int32_t>& terminatedPids);
+
+    bool mRegistered = false;
+    std::mutex mLock;
+    ActivityManager mAm;
+    // map of UID and all the PIDs associated with it
+    // as one UID could have multiple PIDs.
+    std::map<uid_t, std::set<int32_t>> mUids;
+    OnProcessTerminated mOnProcessTerminated;
+    sp<ProcessInfoInterface> mProcessInfo;
+};
+
+}  // namespace android
+
+#endif  //ANDROID_MEDIA_UIDOBSERVER_H_
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
new file mode 100644
index 0000000..3c9c8c7
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
@@ -0,0 +1,65 @@
+/**
+ * Copyright (c) 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.ClientInfoParcel;
+import android.media.MediaResourceSubType;
+
+/**
+ * Description of a Client(codec) configuration.
+ *
+ * {@hide}
+ */
+parcelable ClientConfigParcel {
+    /**
+     * Client info.
+     */
+    ClientInfoParcel clientInfo;
+
+    /**
+     * Type of codec (Audio/Video/Image).
+     */
+    MediaResourceSubType codecType;
+
+    /**
+     * true if this is an encoder, false if this is a decoder.
+     */
+    boolean isEncoder;
+
+    /**
+     * true if this is hardware codec, false otherwise.
+     */
+    boolean isHardware;
+
+    /*
+     * Video Resolution of the codec when it was configured, as width and height (in pixels).
+     */
+    int width;
+    int height;
+
+    /*
+     * Timestamp (in microseconds) when this configuration is created.
+     */
+    long timeStamp;
+    /*
+     * ID associated with the Codec.
+     * This will be used by the metrics:
+     * - Associate MediaCodecStarted with MediaCodecStopped Atom.
+     * - Correlate MediaCodecReported Atom for codec configuration parameters.
+     */
+    long id;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl
new file mode 100644
index 0000000..eb4bc42
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/ClientInfoParcel.aidl
@@ -0,0 +1,44 @@
+/**
+ * Copyright (c) 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Description of a Client(codec) information.
+ *
+ * {@hide}
+ */
+parcelable ClientInfoParcel {
+    /**
+     * The PID of the client process.
+     */
+    int pid = -1;
+
+    /**
+     * The UID of the client process.
+     */
+    int uid = -1;
+
+    /**
+     * The ID of the client.
+     */
+    long id = 0;
+
+    /**
+     * Name of the resource associated with the client.
+     */
+    @utf8InCpp String name;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index 7a0a50f..5071fa3 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -19,6 +19,8 @@
 import android.media.IResourceManagerClient;
 import android.media.MediaResourceParcel;
 import android.media.MediaResourcePolicyParcel;
+import android.media.ClientInfoParcel;
+import android.media.ClientConfigParcel;
 
 /**
  * ResourceManagerService interface that keeps track of media resource
@@ -44,46 +46,40 @@
     /**
      * Add a client to a process with a list of resources.
      *
-     * @param pid pid of the client.
-     * @param uid uid of the client.
-     * @param clientId an identifier that uniquely identifies the client within the pid.
+     * @param clientInfo info of the calling client.
      * @param client interface for the ResourceManagerService to call the client.
      * @param resources an array of resources to be added.
      */
     void addResource(
-            int pid,
-            int uid,
-            long clientId,
+            in ClientInfoParcel clientInfo,
             IResourceManagerClient client,
             in MediaResourceParcel[] resources);
 
     /**
      * Remove the listed resources from a client.
      *
-     * @param pid pid from which the list of resources will be removed.
-     * @param clientId clientId within the pid from which the list of resources will be removed.
+     * @param clientInfo info of the calling client.
      * @param resources an array of resources to be removed from the client.
      */
-    void removeResource(int pid, long clientId, in MediaResourceParcel[] resources);
+    void removeResource(in ClientInfoParcel clientInfo, in MediaResourceParcel[] resources);
 
     /**
      * Remove all resources from a client.
      *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
+     * @param clientInfo info of the calling client.
      */
-    void removeClient(int pid, long clientId);
+    void removeClient(in ClientInfoParcel clientInfo);
 
     /**
      * Tries to reclaim resource from processes with lower priority than the
      * calling process according to the requested resources.
      *
-     * @param callingPid pid of the calling process.
+     * @param clientInfo info of the calling client.
      * @param resources an array of resources to be reclaimed.
      *
      * @return true if the reclaim was successful and false otherwise.
      */
-    boolean reclaimResource(int callingPid, in MediaResourceParcel[] resources);
+    boolean reclaimResource(in ClientInfoParcel clientInfo, in MediaResourceParcel[] resources);
 
     /**
      * Override the pid of original calling process with the pid of the process
@@ -120,10 +116,9 @@
     /**
      * Mark a client for pending removal
      *
-     * @param pid pid from which the client's resources will be removed.
-     * @param clientId clientId within the pid that will be removed.
+     * @param clientInfo info of the calling client.
      */
-    void markClientForPendingRemoval(int pid, long clientId);
+    void markClientForPendingRemoval(in ClientInfoParcel clientInfo);
 
     /**
      * Reclaim resources from clients pending removal, if any.
@@ -131,4 +126,46 @@
      * @param pid pid from which resources will be reclaimed.
      */
     void reclaimResourcesFromClientsPendingRemoval(int pid);
+
+    /**
+     * Notify that the client has been created.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     *
+     * @param clientInfo Information of the client.
+     */
+    void notifyClientCreated(in ClientInfoParcel clientInfo);
+
+    /**
+     * Notify that the client has been started.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     *
+     * @param clientConfig Configuration information of the client.
+     */
+    void notifyClientStarted(in ClientConfigParcel clientConfig);
+
+    /**
+     * Notify that the client has been stopped.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     *
+     * @param clientConfig Configuration information of the client.
+     */
+    void notifyClientStopped(in ClientConfigParcel clientConfig);
+
+    /**
+     * Notify that the client's configuration has changed.
+     *
+     * This call is made to collect the (concurrent) metrics about the
+     * resources associated with the Codec (and also DRM sessions).
+     * This is called after notifyClientStarted (and before notifyClientStopped)
+     * to make changes to some of the configurations associated with the client.
+     *
+     * @param clientConfig Configuration information of the client.
+     */
+    void notifyClientConfigChanged(in ClientConfigParcel clientConfig);
 }
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 81c85e5..d98974f 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -40,7 +40,12 @@
         "libbinder",
         "libbinder_ndk",
         "libmedia",
+        "libmediautils",
         "libutils",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
+        "libactivitymanager_aidl",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index e4aaea0..6fa9831 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -22,8 +22,8 @@
 #include <aidl/android/media/BnResourceManagerClient.h>
 #include <media/MediaResource.h>
 #include <media/MediaResourcePolicy.h>
-#include <media/stagefright/ProcessInfoInterface.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <mediautils/ProcessInfoInterface.h>
 #include "ResourceManagerService.h"
 #include "fuzzer/FuzzedDataProvider.h"
 
@@ -135,11 +135,15 @@
 };
 
 struct TestClient : public BnResourceManagerClient {
-    TestClient(int pid, const shared_ptr<ResourceManagerService>& service)
-        : mReclaimed(false), mPid(pid), mService(service) {}
+    TestClient(int pid, int uid, const shared_ptr<ResourceManagerService>& service)
+        : mReclaimed(false), mPid(pid), mUid(uid), mService(service) {}
 
     Status reclaimResource(bool* aidlReturn) override {
-        mService->removeClient(mPid, getId(ref<TestClient>()));
+        ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                    .uid = static_cast<int32_t>(mUid),
+                                    .id = getId(ref<TestClient>()),
+                                    .name = ""};
+        mService->removeClient(clientInfo);
         mReclaimed = true;
         *aidlReturn = true;
         return Status::ok();
@@ -155,6 +159,7 @@
    private:
     bool mReclaimed;
     int mPid;
+    int mUid;
     shared_ptr<ResourceManagerService> mService;
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
 };
@@ -176,9 +181,12 @@
     static void* addResource(void* arg) {
         resourceThreadArgs* tArgs = (resourceThreadArgs*)arg;
         if (tArgs) {
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(tArgs->pid),
+                                        .uid = static_cast<int32_t>(tArgs->uid),
+                                        .id = tArgs->testClientId,
+                                        .name = ""};
             (tArgs->service)
-                ->addResource(tArgs->pid, tArgs->uid, tArgs->testClientId, tArgs->testClient,
-                              tArgs->mediaResource);
+                ->addResource(clientInfo, tArgs->testClient, tArgs->mediaResource);
         }
         return nullptr;
     }
@@ -187,10 +195,14 @@
         resourceThreadArgs* tArgs = (resourceThreadArgs*)arg;
         if (tArgs) {
             bool result;
-            (tArgs->service)->markClientForPendingRemoval(tArgs->pid, tArgs->testClientId);
-            (tArgs->service)->removeResource(tArgs->pid, tArgs->testClientId, tArgs->mediaResource);
-            (tArgs->service)->reclaimResource(tArgs->pid, tArgs->mediaResource, &result);
-            (tArgs->service)->removeClient(tArgs->pid, tArgs->testClientId);
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(tArgs->pid),
+                                        .uid = static_cast<int32_t>(tArgs->uid),
+                                        .id = tArgs->testClientId,
+                                        .name = ""};
+            (tArgs->service)->markClientForPendingRemoval(clientInfo);
+            (tArgs->service)->removeResource(clientInfo, tArgs->mediaResource);
+            (tArgs->service)->reclaimResource(clientInfo, tArgs->mediaResource, &result);
+            (tArgs->service)->removeClient(clientInfo);
             (tArgs->service)->overridePid(tArgs->pid, tArgs->pid - 1);
         }
         return nullptr;
@@ -240,7 +252,8 @@
         uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
         threadArgs[k].service = mService;
         shared_ptr<IResourceManagerClient> testClient =
-                ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, mService);
+                ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, threadArgs[k].uid,
+                                                       mService);
         threadArgs[k].testClient = testClient;
         threadArgs[k].testClientId = getId(testClient);
         mediaResource[k].push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
@@ -258,7 +271,7 @@
     // No resource was added with pid = 0
     int32_t pidZero = 0;
     shared_ptr<IResourceManagerClient> testClient =
-        ::ndk::SharedRefBase::make<TestClient>(pidZero, mService);
+        ::ndk::SharedRefBase::make<TestClient>(pidZero, 0, mService);
     int32_t mediaResourceType =
         mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
     int32_t mediaResourceSubType =
@@ -269,9 +282,13 @@
                                      static_cast<MedResSubType>(mediaResourceSubType),
                                      mediaResourceValue));
     bool result;
-    mService->reclaimResource(pidZero, mediaRes, &result);
-    mService->removeResource(pidZero, getId(testClient), mediaRes);
-    mService->removeClient(pidZero, getId(testClient));
+    ClientInfoParcel pidZeroClient{.pid = static_cast<int32_t>(pidZero),
+                                   .uid = static_cast<int32_t>(0),
+                                   .id = getId(testClient),
+                                   .name = ""};
+    mService->reclaimResource(pidZeroClient, mediaRes, &result);
+    mService->removeResource(pidZeroClient, mediaRes);
+    mService->removeClient(pidZeroClient);
 }
 
 void ResourceManagerServiceFuzzer::setServiceLog() {
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 618626f..f903c62 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -18,7 +18,12 @@
         "libbinder_ndk",
         "liblog",
         "libmedia",
+        "libmediautils",
         "libutils",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
+        "libactivitymanager_aidl",
     ],
     include_dirs: [
         "frameworks/av/include",
@@ -63,7 +68,12 @@
         "libbinder_ndk",
         "liblog",
         "libmedia",
+        "libmediautils",
         "libutils",
+        "libstats_media_metrics",
+        "libstatspull",
+        "libstatssocket",
+        "libactivitymanager_aidl",
     ],
     include_dirs: [
         "frameworks/av/include",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 5bf44ce..474ff0f 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -15,13 +15,14 @@
  */
 
 #include <gtest/gtest.h>
+#include <android/binder_process.h>
 
 #include "ResourceManagerService.h"
 #include <aidl/android/media/BnResourceManagerClient.h>
 #include <media/MediaResource.h>
 #include <media/MediaResourcePolicy.h>
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/ProcessInfoInterface.h>
+#include <mediautils/ProcessInfoInterface.h>
 
 namespace android {
 
@@ -122,11 +123,15 @@
 
 
 struct TestClient : public BnResourceManagerClient {
-    TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
-        : mPid(pid), mService(service) {}
+    TestClient(int pid, int uid, const std::shared_ptr<ResourceManagerService> &service)
+        : mPid(pid), mUid(uid), mService(service) {}
 
     Status reclaimResource(bool* _aidl_return) override {
-        mService->removeClient(mPid, getId(ref<TestClient>()));
+        ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+                                    .uid = static_cast<int32_t>(mUid),
+                                    .id = getId(ref<TestClient>()),
+                                    .name = "none"};
+        mService->removeClient(clientInfo);
         mWasReclaimResourceCalled = true;
         *_aidl_return = true;
         return Status::ok();
@@ -148,6 +153,7 @@
 private:
     bool mWasReclaimResourceCalled = false;
     int mPid;
+    int mUid;
     std::shared_ptr<ResourceManagerService> mService;
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
 };
@@ -192,17 +198,24 @@
         return static_cast<TestClient*>(testClient.get());
     }
 
-    ResourceManagerServiceTestBase()
-        : mSystemCB(new TestSystemCallback()),
-          mService(::ndk::SharedRefBase::make<ResourceManagerService>(
-                  new TestProcessInfo, mSystemCB)),
-          mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, mService)),
-          mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)),
-          mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
+    ResourceManagerServiceTestBase() {
+        ALOGI("ResourceManagerServiceTestBase created");
     }
 
-    std::shared_ptr<IResourceManagerClient> createTestClient(int pid) {
-        return ::ndk::SharedRefBase::make<TestClient>(pid, mService);
+    void SetUp() override {
+        // Need thread pool to receive callbacks, otherwise oneway callbacks are
+        // silently ignored.
+        ABinderProcess_startThreadPool();
+        mSystemCB = new TestSystemCallback();
+        mService = ::ndk::SharedRefBase::make<ResourceManagerService>(
+            new TestProcessInfo, mSystemCB);
+        mTestClient1 = ::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService);
+        mTestClient2 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
+        mTestClient3 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
+    }
+
+    std::shared_ptr<IResourceManagerClient> createTestClient(int pid, int uid) {
+        return ::ndk::SharedRefBase::make<TestClient>(pid, uid, mService);
     }
 
     sp<TestSystemCallback> mSystemCB;
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 8739c3b..4e575f0 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -98,24 +98,36 @@
         // kTestPid1 mTestClient1
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
         resources1.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
         std::vector<MediaResourceParcel> resources11;
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         // kTestPid2 mTestClient2
         std::vector<MediaResourceParcel> resources2;
         resources2.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources2.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 300));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->addResource(client2Info, mTestClient2, resources2);
 
         // kTestPid2 mTestClient3
         std::vector<MediaResourceParcel> resources3;
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
+        ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient3),
+                                     .name = "none"};
+        mService->addResource(client3Info, mTestClient3, resources3);
         resources3.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources3.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 100));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
+        mService->addResource(client3Info, mTestClient3, resources3);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(2u, map.size());
@@ -138,7 +150,11 @@
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, -100));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, -100));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // 1) the client should have been added;
@@ -155,11 +171,11 @@
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, INT64_MAX));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, INT64_MAX));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, 10));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 10));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // Both values should saturate to INT64_MAX
@@ -170,7 +186,7 @@
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, -10));
         resources1.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, -10));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // 1) DrmSession resource should allow negative value addition, and value should drop accordingly
@@ -182,7 +198,7 @@
         resources1.clear();
         resources1.push_back(MediaResource(MediaResource::Type::kDrmSession, INT64_MIN));
         expected.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, INT64_MIN));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         // Expected result:
         // 1) DrmSession resource value should drop to 0, but the entry shouldn't be removed.
@@ -228,11 +244,15 @@
         // kTestPid1 mTestClient1
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         std::vector<MediaResourceParcel> resources11;
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(1u, map.size());
@@ -243,7 +263,7 @@
 
         // test adding existing types to combine values
         resources1.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 100));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         std::vector<MediaResourceParcel> expected;
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 2));
@@ -253,7 +273,7 @@
         // test adding new types (including types that differs only in subType)
         resources11.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources11.push_back(MediaResource(MediaResource::Type::kSecureCodec, MediaResource::SubType::kVideoCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         expected.clear();
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 2));
@@ -267,11 +287,15 @@
         // kTestPid1 mTestClient1
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
 
         std::vector<MediaResourceParcel> resources11;
         resources11.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 200));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(client1Info, mTestClient1, resources11);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(1u, map.size());
@@ -282,7 +306,7 @@
 
         // test partial removal
         resources11[0].value = 100;
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+        mService->removeResource(client1Info, resources11);
 
         std::vector<MediaResourceParcel> expected;
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -291,13 +315,13 @@
 
         // test removal request with negative value, should be ignored
         resources11[0].value = -10000;
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+        mService->removeResource(client1Info, resources11);
 
         expectEqResourceInfo(infos1.valueFor(getId(mTestClient1)), kTestUid1, mTestClient1, expected);
 
         // test complete removal with overshoot value
         resources11[0].value = 1000;
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+        mService->removeResource(client1Info, resources11);
 
         expected.clear();
         expected.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -317,19 +341,35 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low to reclaim resource
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                        .uid = static_cast<int32_t>(kTestUid1),
+                                        .id = 0,
+                                        .name = "none"};
+            CHECK_STATUS_FALSE(mService->reclaimResource(clientInfo, resources, &result));
 
             // override Low Priority Pid with High Priority Pid
             mService->overridePid(kLowPriorityPid, kHighPriorityPid);
-            CHECK_STATUS_TRUE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(clientInfo, resources, &result));
 
             // restore Low Priority Pid
             mService->overridePid(kLowPriorityPid, -1);
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(clientInfo, resources, &result));
         }
     }
 
     void testMarkClientForPendingRemoval() {
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient3),
+                                     .name = "none"};
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = true;
@@ -338,24 +378,24 @@
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
 
             // Remove low priority clients
-            mService->removeClient(kTestPid1, getId(mTestClient1));
+            mService->removeClient(client1Info);
 
             // no lower priority client
-            CHECK_STATUS_FALSE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+            mService->markClientForPendingRemoval(client2Info);
 
             // client marked for pending removal from the same process got reclaimed
-            CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
-            mService->removeClient(kTestPid2, getId(mTestClient3));
+            mService->removeClient(client3Info);
         }
 
         {
@@ -365,30 +405,30 @@
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+            mService->markClientForPendingRemoval(client2Info);
 
             // client marked for pending removal from the same process got reclaimed
             // first, even though there are lower priority process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // lower priority client got reclaimed
-            CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(client2Info, resources, &result));
             EXPECT_EQ(true, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
-            mService->removeClient(kTestPid2, getId(mTestClient3));
+            mService->removeClient(client3Info);
         }
 
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = true;
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
+            mService->markClientForPendingRemoval(client2Info);
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
@@ -402,7 +442,7 @@
             EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
-            mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient3));
+            mService->markClientForPendingRemoval(client3Info);
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
@@ -411,14 +451,18 @@
             EXPECT_EQ(true, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 1 which still left
-            mService->removeClient(kTestPid1, getId(mTestClient1));
+            mService->removeClient(client1Info);
         }
     }
 
     void testRemoveClient() {
         addResource();
 
-        mService->removeClient(kTestPid2, getId(mTestClient2));
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->removeClient(client2Info);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(2u, map.size());
@@ -437,11 +481,12 @@
         MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
 
         Vector<std::shared_ptr<IResourceManagerClient> > clients;
-        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, subType, &clients));
+        PidUidVector idList;
+        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, subType, &idList, &clients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
         // will fail.
-        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, subType, &clients));
-        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, subType, &clients));
+        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, subType, &idList, &clients));
+        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, subType, &idList, &clients));
 
         EXPECT_EQ(2u, clients.size());
         // (OK to require ordering in clients[], as the pid map is sorted)
@@ -454,6 +499,19 @@
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
 
+        ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                          .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
+                                           .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+
         // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
         {
             addResource();
@@ -461,23 +519,23 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codecs can't coexist and secure codec can't coexist with non-secure codec ###
@@ -487,17 +545,17 @@
             mService->mSupportsSecureWithNonSecureCodec = false;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all secure and non-secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
 
@@ -508,29 +566,29 @@
             mService->mSupportsSecureWithNonSecureCodec = false;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all non-secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another largest graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
@@ -540,28 +598,28 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
@@ -573,20 +631,20 @@
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // secure codec from lowest process got reclaimed
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another secure codec from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more secure codec, non-secure codec will be reclaimed.
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
@@ -598,29 +656,42 @@
         resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
 
+        ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                          .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
+                                           .uid = static_cast<int32_t>(kTestUid2),
+                                           .id = 0,
+                                           .name = "none"};
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+
         // ### secure codec can't coexist with non-secure codec ###
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = false;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
-            CHECK_STATUS_FALSE(mService->reclaimResource(kMidPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
 
             // reclaim all secure codecs
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
 
@@ -630,28 +701,28 @@
             mService->mSupportsSecureWithNonSecureCodec = true;
 
             // priority too low
-            CHECK_STATUS_FALSE(mService->reclaimResource(kLowPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
-            CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
         }
 
         // ### secure codec can coexist with non-secure codec ###
@@ -662,20 +733,24 @@
             std::vector<MediaResourceParcel> resources;
             resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
 
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             // one non secure codec from lowest process got reclaimed
             EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more non-secure codec, secure codec from lowest priority process will be reclaimed
-            CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
+            CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, resources, &result));
             EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
             EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
-            mService->removeClient(kTestPid2, getId(mTestClient3));
+            ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kTestPid2),
+                                        .uid = static_cast<int32_t>(kTestUid2),
+                                        .id = getId(mTestClient3),
+                                        .name = "none"};
+            mService->removeClient(clientInfo);
         }
     }
 
@@ -683,15 +758,16 @@
         MediaResource::Type type = MediaResource::Type::kGraphicMemory;
         MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
         std::shared_ptr<IResourceManagerClient> client;
+        PidUidVector idList;
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
-                &client));
+                &idList, &client));
 
         addResource();
 
         EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, subType,
-                &client));
+                &idList, &client));
         EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
-                &client));
+                &idList, &client));
 
         // kTestPid1 is the lowest priority process with MediaResource::Type::kGraphicMemory.
         // mTestClient1 has the largest MediaResource::Type::kGraphicMemory within kTestPid1.
@@ -737,33 +813,41 @@
         // new client request should cause VIDEO_ON
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kBattery, MediaResource::SubType::kVideoCodec, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_ON, kTestUid1}), mSystemCB->lastEvent());
 
         // each client should only cause 1 VIDEO_ON
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
 
         // new client request should cause VIDEO_ON
         std::vector<MediaResourceParcel> resources2;
         resources2.push_back(MediaResource(MediaResource::Type::kBattery, MediaResource::SubType::kVideoCodec, 2));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->addResource(client2Info, mTestClient2, resources2);
         EXPECT_EQ(3u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_ON, kTestUid2}), mSystemCB->lastEvent());
 
         // partially remove mTestClient1's request, shouldn't be any VIDEO_OFF
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources1);
+        mService->removeResource(client1Info, resources1);
         EXPECT_EQ(3u, mSystemCB->eventCount());
 
         // remove mTestClient1's request, should be VIDEO_OFF for kTestUid1
         // (use resource2 to test removing more instances than previously requested)
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources2);
+        mService->removeResource(client1Info, resources2);
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_OFF, kTestUid1}), mSystemCB->lastEvent());
 
         // remove mTestClient2, should be VIDEO_OFF for kTestUid2
-        mService->removeClient(kTestPid2, getId(mTestClient2));
+        mService->removeClient(client2Info);
         EXPECT_EQ(5u, mSystemCB->eventCount());
         EXPECT_EQ(EventEntry({EventType::VIDEO_OFF, kTestUid2}), mSystemCB->lastEvent());
     }
@@ -776,32 +860,40 @@
         // new client request should cause CPUSET_ENABLE
         std::vector<MediaResourceParcel> resources1;
         resources1.push_back(MediaResource(MediaResource::Type::kCpuBoost, 1));
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_ENABLE, mSystemCB->lastEventType());
 
         // each client should only cause 1 CPUSET_ENABLE
-        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(client1Info, mTestClient1, resources1);
         EXPECT_EQ(2u, mSystemCB->eventCount());
 
         // new client request should cause CPUSET_ENABLE
         std::vector<MediaResourceParcel> resources2;
         resources2.push_back(MediaResource(MediaResource::Type::kCpuBoost, 2));
-        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        mService->addResource(client2Info, mTestClient2, resources2);
         EXPECT_EQ(3u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_ENABLE, mSystemCB->lastEventType());
 
         // remove mTestClient2 should not cause CPUSET_DISABLE, mTestClient1 still active
-        mService->removeClient(kTestPid2, getId(mTestClient2));
+        mService->removeClient(client2Info);
         EXPECT_EQ(3u, mSystemCB->eventCount());
 
         // remove 1 cpuboost from mTestClient1, should not be CPUSET_DISABLE (still 1 left)
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources1);
+        mService->removeResource(client1Info, resources1);
         EXPECT_EQ(3u, mSystemCB->eventCount());
 
         // remove 2 cpuboost from mTestClient1, should be CPUSET_DISABLE
         // (use resource2 to test removing more than previously requested)
-        mService->removeResource(kTestPid1, getId(mTestClient1), resources2);
+        mService->removeResource(client1Info, resources2);
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
     }
@@ -814,22 +906,32 @@
         std::vector<MediaResourceParcel> audioImageResources;
         audioImageResources.push_back(createNonSecureAudioCodecResource());
         audioImageResources.push_back(createNonSecureImageCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(audioImageTestClient),
-                audioImageTestClient, audioImageResources);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(audioImageTestClient),
+                                     .name = "none"};
+        mService->addResource(client1Info, audioImageTestClient, audioImageResources);
 
         // Fail to reclaim a video codec resource
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureVideoCodecResource());
-        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Now add a video codec resource
         std::vector<MediaResourceParcel> videoResources;
         videoResources.push_back(createNonSecureVideoCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoTestClient), videoTestClient,
-                videoResources);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(videoTestClient),
+                                     .name = "none"};
+        mService->addResource(client2Info, videoTestClient, videoResources);
 
         // Verify that the newly-created video codec resource can be reclaimed
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the audio and image resources are untouched
         EXPECT_FALSE(toTestClient(audioImageTestClient)->checkIfReclaimedAndReset());
@@ -845,22 +947,32 @@
         std::vector<MediaResourceParcel> videoImageResources;
         videoImageResources.push_back(createNonSecureVideoCodecResource());
         videoImageResources.push_back(createNonSecureImageCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoImageTestClient),
-                videoImageTestClient, videoImageResources);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(videoImageTestClient),
+                                     .name = "none"};
+        mService->addResource(client1Info, videoImageTestClient, videoImageResources);
 
         // Fail to reclaim an audio codec resource
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureAudioCodecResource());
-        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Now add an audio codec resource
         std::vector<MediaResourceParcel> audioResources;
         audioResources.push_back(createNonSecureAudioCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid2, getId(audioTestClient), audioTestClient,
-                audioResources);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(audioTestClient),
+                                     .name = "none"};
+        mService->addResource(client2Info, audioTestClient, audioResources);
 
         // Verify that the newly-created audio codec resource can be reclaimed
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the video and image resources are untouched
         EXPECT_FALSE(toTestClient(videoImageTestClient)->checkIfReclaimedAndReset());
@@ -876,22 +988,32 @@
         std::vector<MediaResourceParcel> videoAudioResources;
         videoAudioResources.push_back(createNonSecureVideoCodecResource());
         videoAudioResources.push_back(createNonSecureAudioCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoAudioTestClient),
-                videoAudioTestClient, videoAudioResources);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(videoAudioTestClient),
+                                     .name = "none"};
+        mService->addResource(client1Info, videoAudioTestClient, videoAudioResources);
 
         // Fail to reclaim an image codec resource
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureImageCodecResource());
-        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Now add an image codec resource
         std::vector<MediaResourceParcel> imageResources;
         imageResources.push_back(createNonSecureImageCodecResource());
-        mService->addResource(kLowPriorityPid, kTestUid2, getId(imageTestClient), imageTestClient,
-                imageResources);
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(imageTestClient),
+                                     .name = "none"};
+        mService->addResource(client2Info, imageTestClient, imageResources);
 
         // Verify that the newly-created image codec resource can be reclaimed
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the video and audio resources are untouched
         EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
@@ -901,20 +1023,27 @@
 
     void testReclaimResources_whenPartialResourceMatch_reclaims() {
         const int onlyUid = kTestUid1;
-        const auto onlyClient = createTestClient(kLowPriorityPid);
+        const auto onlyClient = createTestClient(kLowPriorityPid, onlyUid);
 
         std::vector<MediaResourceParcel> ownedResources;
         ownedResources.push_back(createNonSecureVideoCodecResource());
         ownedResources.push_back(createGraphicMemoryResource(100));
-        mService->addResource(kLowPriorityPid, onlyUid, getId(onlyClient), onlyClient,
-                ownedResources);
+        ClientInfoParcel onlyClientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
+                                       .uid = static_cast<int32_t>(onlyUid),
+                                       .id = getId(onlyClient),
+                                       .name = "none"};
+        mService->addResource(onlyClientInfo, onlyClient, ownedResources);
 
         // Reclaim an image codec instead of the video codec that is owned, but also reclaim
         // graphics memory, which will trigger the reclaim.
         std::vector<MediaResourceParcel> reclaimResources;
         reclaimResources.push_back(createNonSecureImageCodecResource());
         reclaimResources.push_back(createGraphicMemoryResource(100));
-        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+        ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
+                                            .uid = static_cast<int32_t>(kTestUid2),
+                                            .id = 0,
+                                            .name = "none"};
+        CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
 
         // Verify that the video codec resources (including the needed graphic memory) is reclaimed
         EXPECT_TRUE(toTestClient(onlyClient)->checkIfReclaimedAndReset());
@@ -926,200 +1055,278 @@
         const int onlyUid = kTestUid1;
 
         // secure video codec
-        const auto smallSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largeSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largestSecureVideoActiveClient = createTestClient(onlyPid);
+        const auto smallSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestSecureVideoActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientA{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientB{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientC{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestSecureVideoActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createSecureVideoCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallSecureVideoMarkedClient),
-                    smallSecureVideoMarkedClient, resources);
+            mService->addResource(clientA, smallSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureVideoCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeSecureVideoMarkedClient),
-                    largeSecureVideoMarkedClient, resources);
+            mService->addResource(clientB, largeSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureVideoCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
-                    largestSecureVideoActiveClient, resources);
+            mService->addResource(clientC, largestSecureVideoActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureVideoMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(clientA);
+        mService->markClientForPendingRemoval(clientB);
         // don't mark the largest client
 
         // non-secure video codec
-        const auto smallNonSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largeNonSecureVideoMarkedClient = createTestClient(onlyPid);
-        const auto largestNonSecureVideoActiveClient = createTestClient(onlyPid);
+        const auto smallNonSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeNonSecureVideoMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestNonSecureVideoActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientD{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallNonSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientE{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeNonSecureVideoMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientF{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestNonSecureVideoActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createNonSecureVideoCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureVideoMarkedClient),
-                    smallNonSecureVideoMarkedClient, resources);
+            mService->addResource(clientD, smallNonSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureVideoCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureVideoMarkedClient),
-                    largeNonSecureVideoMarkedClient, resources);
+            mService->addResource(clientE, largeNonSecureVideoMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureVideoCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureVideoActiveClient),
-                    largestNonSecureVideoActiveClient, resources);
+            mService->addResource(clientF, largestNonSecureVideoActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureVideoMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(clientD);
+        mService->markClientForPendingRemoval(clientE);
         // don't mark the largest client
 
         // secure audio codec
-        const auto smallSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largeSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largestSecureAudioActiveClient = createTestClient(onlyPid);
+        const auto smallSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestSecureAudioActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientG{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientH{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientI{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestSecureVideoActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createSecureAudioCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallSecureAudioMarkedClient),
-                    smallSecureAudioMarkedClient, resources);
+            mService->addResource(clientG, smallSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureAudioCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeSecureAudioMarkedClient),
-                    largeSecureAudioMarkedClient, resources);
+            mService->addResource(clientH, largeSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureAudioCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
-                    largestSecureVideoActiveClient, resources);
+            mService->addResource(clientI, largestSecureVideoActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureAudioMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(clientG);
+        mService->markClientForPendingRemoval(clientH);
         // don't mark the largest client
 
         // non-secure audio codec
-        const auto smallNonSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largeNonSecureAudioMarkedClient = createTestClient(onlyPid);
-        const auto largestNonSecureAudioActiveClient = createTestClient(onlyPid);
+        const auto smallNonSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeNonSecureAudioMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestNonSecureAudioActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientJ{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallNonSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientK{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeNonSecureAudioMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientL{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestNonSecureAudioActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createNonSecureAudioCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureAudioMarkedClient),
-                    smallNonSecureAudioMarkedClient, resources);
+            mService->addResource(clientJ, smallNonSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureAudioCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureAudioMarkedClient),
-                    largeNonSecureAudioMarkedClient, resources);
+            mService->addResource(clientK, largeNonSecureAudioMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureAudioCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureAudioActiveClient),
-                    largestNonSecureAudioActiveClient, resources);
+            mService->addResource(clientL, largestNonSecureAudioActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureAudioMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(clientJ);
+        mService->markClientForPendingRemoval(clientK);
         // don't mark the largest client
 
         // secure image codec
-        const auto smallSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largeSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largestSecureImageActiveClient = createTestClient(onlyPid);
+        const auto smallSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestSecureImageActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientM{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientN{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientO{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestSecureImageActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createSecureImageCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallSecureImageMarkedClient),
-                    smallSecureImageMarkedClient, resources);
+            mService->addResource(clientM, smallSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureImageCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeSecureImageMarkedClient),
-                    largeSecureImageMarkedClient, resources);
+            mService->addResource(clientN, largeSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createSecureImageCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestSecureImageActiveClient),
-                    largestSecureImageActiveClient, resources);
+            mService->addResource(clientO, largestSecureImageActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureImageMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(clientM);
+        mService->markClientForPendingRemoval(clientN);
         // don't mark the largest client
 
         // non-secure image codec
-        const auto smallNonSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largeNonSecureImageMarkedClient = createTestClient(onlyPid);
-        const auto largestNonSecureImageActiveClient = createTestClient(onlyPid);
+        const auto smallNonSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeNonSecureImageMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestNonSecureImageActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientP{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallNonSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientQ{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeNonSecureImageMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientR{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestNonSecureImageActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createNonSecureImageCodecResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureImageMarkedClient),
-                    smallNonSecureImageMarkedClient, resources);
+            mService->addResource(clientP, smallNonSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureImageCodecResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureImageMarkedClient),
-                    largeNonSecureImageMarkedClient, resources);
+            mService->addResource(clientQ, largeNonSecureImageMarkedClient, resources);
             resources.clear();
             resources.push_back(createNonSecureImageCodecResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureImageActiveClient),
-                    largestNonSecureImageActiveClient, resources);
+            mService->addResource(clientR, largestNonSecureImageActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureImageMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(clientP);
+        mService->markClientForPendingRemoval(clientQ);
         // don't mark the largest client
 
         // graphic memory
-        const auto smallGraphicMemoryMarkedClient = createTestClient(onlyPid);
-        const auto largeGraphicMemoryMarkedClient = createTestClient(onlyPid);
-        const auto largestGraphicMemoryActiveClient = createTestClient(onlyPid);
+        const auto smallGraphicMemoryMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeGraphicMemoryMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestGraphicMemoryActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientS{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallGraphicMemoryMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientT{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeGraphicMemoryMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientU{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestGraphicMemoryActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createGraphicMemoryResource(100));
-            mService->addResource(onlyPid, onlyUid, getId(smallGraphicMemoryMarkedClient),
-                    smallGraphicMemoryMarkedClient, resources);
+            mService->addResource(clientS, smallGraphicMemoryMarkedClient, resources);
             resources.clear();
             resources.push_back(createGraphicMemoryResource(200));
-            mService->addResource(onlyPid, onlyUid, getId(largeGraphicMemoryMarkedClient),
-                    largeGraphicMemoryMarkedClient, resources);
+            mService->addResource(clientT, largeGraphicMemoryMarkedClient, resources);
             resources.clear();
             resources.push_back(createGraphicMemoryResource(300));
-            mService->addResource(onlyPid, onlyUid, getId(largestGraphicMemoryActiveClient),
-                    largestGraphicMemoryActiveClient, resources);
+            mService->addResource(clientU, largestGraphicMemoryActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallGraphicMemoryMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeGraphicMemoryMarkedClient));
+        mService->markClientForPendingRemoval(clientS);
+        mService->markClientForPendingRemoval(clientT);
         // don't mark the largest client
 
         // DRM session
-        const auto smallDrmSessionMarkedClient = createTestClient(onlyPid);
-        const auto largeDrmSessionMarkedClient = createTestClient(onlyPid);
-        const auto largestDrmSessionActiveClient = createTestClient(onlyPid);
+        const auto smallDrmSessionMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largeDrmSessionMarkedClient = createTestClient(onlyPid, onlyUid);
+        const auto largestDrmSessionActiveClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientV{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(smallDrmSessionMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientW{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largeDrmSessionMarkedClient),
+                                 .name = "none"};
+        ClientInfoParcel clientX{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(largestDrmSessionActiveClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createDrmSessionResource(1));
-            mService->addResource(onlyPid, onlyUid, getId(smallDrmSessionMarkedClient),
-                    smallDrmSessionMarkedClient, resources);
+            mService->addResource(clientV, smallDrmSessionMarkedClient, resources);
             resources.clear();
             resources.push_back(createDrmSessionResource(2));
-            mService->addResource(onlyPid, onlyUid, getId(largeDrmSessionMarkedClient),
-                    largeDrmSessionMarkedClient, resources);
+            mService->addResource(clientW, largeDrmSessionMarkedClient, resources);
             resources.clear();
             resources.push_back(createDrmSessionResource(3));
-            mService->addResource(onlyPid, onlyUid, getId(largestDrmSessionActiveClient),
-                    largestDrmSessionActiveClient, resources);
+            mService->addResource(clientX, largestDrmSessionActiveClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(smallDrmSessionMarkedClient));
-        mService->markClientForPendingRemoval(onlyPid, getId(largeDrmSessionMarkedClient));
+        mService->markClientForPendingRemoval(clientV);
+        mService->markClientForPendingRemoval(clientW);
         // don't mark the largest client
 
         // battery
-        const auto batteryMarkedClient = createTestClient(onlyPid);
+        const auto batteryMarkedClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientY{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(batteryMarkedClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createBatteryResource());
-            mService->addResource(onlyPid, onlyUid, getId(batteryMarkedClient),
-                    batteryMarkedClient, resources);
+            mService->addResource(clientY, batteryMarkedClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(batteryMarkedClient));
+        mService->markClientForPendingRemoval(clientY);
 
         // CPU boost
-        const auto cpuBoostMarkedClient = createTestClient(onlyPid);
+        const auto cpuBoostMarkedClient = createTestClient(onlyPid, onlyUid);
+        ClientInfoParcel clientZ{.pid = static_cast<int32_t>(onlyPid),
+                                 .uid = static_cast<int32_t>(onlyUid),
+                                 .id = getId(cpuBoostMarkedClient),
+                                 .name = "none"};
         {
             std::vector<MediaResourceParcel> resources;
             resources.push_back(createCpuBoostResource());
-            mService->addResource(onlyPid, onlyUid, getId(cpuBoostMarkedClient),
-                    cpuBoostMarkedClient, resources);
+            mService->addResource(clientZ, cpuBoostMarkedClient, resources);
         }
-        mService->markClientForPendingRemoval(onlyPid, getId(cpuBoostMarkedClient));
+        mService->markClientForPendingRemoval(clientZ);
 
         // now we expect that we only reclaim resources from the biggest marked client
         EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(onlyPid).isOk());
@@ -1160,6 +1367,143 @@
         // CPU boost is not expected to be reclaimed when marked as pending removal
         EXPECT_FALSE(toTestClient(cpuBoostMarkedClient)->checkIfReclaimedAndReset());
     }
+
+    inline void initClientConfigParcel(bool encoder, bool hw,
+                                       int32_t width, int32_t height,
+                                       int64_t id,
+                                       const ClientInfoParcel& clientInfo,
+                                       ClientConfigParcel& clientConfig) {
+        clientConfig.codecType = MediaResource::SubType::kVideoCodec;
+        clientConfig.isEncoder = encoder;
+        clientConfig.isHardware = hw;
+        clientConfig.width = width;
+        clientConfig.height = height;
+        clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+        clientConfig.id = id;
+        clientConfig.clientInfo = clientInfo;
+    }
+
+    void testConcurrentCodecs() {
+        std::shared_ptr<IResourceManagerClient> testClient4 =
+            createTestClient(kTestPid1, kTestUid1);
+        ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(mTestClient1),
+                                     .name = "none"};
+        ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient2),
+                                     .name = "none"};
+        ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                     .uid = static_cast<int32_t>(kTestUid2),
+                                     .id = getId(mTestClient3),
+                                     .name = "none"};
+        ClientInfoParcel client4Info{.pid = static_cast<int32_t>(kTestPid1),
+                                     .uid = static_cast<int32_t>(kTestUid1),
+                                     .id = getId(testClient4),
+                                     .name = "none"};
+        ClientConfigParcel client1Config;
+        ClientConfigParcel client2Config;
+        ClientConfigParcel client3Config;
+        ClientConfigParcel client4Config;
+
+        // HW Video Encoder @ 1080P.
+        initClientConfigParcel(true, true, 1920, 1080, 11111111,
+                               client1Info, client1Config);
+        // HW Video Decoder @ 4K.
+        initClientConfigParcel(true, true, 2160, 3840, 22222222,
+                               client2Info, client2Config);
+        // SW Video Encoder @ 1080P.
+        initClientConfigParcel(true, true, 1920, 1080, 33333333,
+                               client3Info, client3Config);
+        // SW Video Decoder @ 4K.
+        initClientConfigParcel(true, true, 2160, 3840, 44444444,
+                               client4Info, client4Config);
+
+        // Start client1 at 1080P.
+        mService->notifyClientStarted(client1Config);
+        long peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        long currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        EXPECT_TRUE(peakPixelCountP1 = client1Config.width * client1Config.height);
+        EXPECT_TRUE(currentPixelCountP1 = client1Config.width * client1Config.height);
+
+        // Stop client1.
+        mService->notifyClientStopped(client1Config);
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+        EXPECT_TRUE(currentPixelCountP1 == 0);
+
+        // Start client1 at 1080P.
+        mService->notifyClientStarted(client1Config);
+        // Start client2 at 4K.
+        mService->notifyClientStarted(client2Config);
+
+        // Verify the Peak and Current Concurrent pixel count for both the process
+        // (kTestPid1, kTestPid2)
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        long peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+        long currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+        EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+        EXPECT_TRUE(peakPixelCountP2 == client2Config.width * client2Config.height);
+        EXPECT_TRUE(currentPixelCountP2 == client2Config.width * client2Config.height);
+
+        // Start client3 at 1080P.
+        mService->notifyClientStarted(client3Config);
+        // Start client4 at 4K.
+        mService->notifyClientStarted(client4Config);
+
+        // Verify the Peak and Current Concurrent pixel count for both the process
+        // (kTestPid1, kTestPid2)
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+        currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(peakPixelCountP1 ==
+            (client1Config.width * client1Config.height +
+             client4Config.width * client4Config.height));
+        EXPECT_TRUE(currentPixelCountP1 ==
+            (client1Config.width * client1Config.height +
+             client4Config.width * client4Config.height));
+        EXPECT_TRUE(peakPixelCountP2 ==
+            (client2Config.width * client2Config.height +
+             client3Config.width * client3Config.height));
+        EXPECT_TRUE(currentPixelCountP2 ==
+            (client2Config.width * client2Config.height +
+             client3Config.width * client3Config.height));
+
+        // Stop client4
+        mService->notifyClientStopped(client4Config);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+
+        // Stop client1
+        mService->notifyClientStopped(client1Config);
+
+        // Stop client2
+        mService->notifyClientStopped(client2Config);
+        currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(currentPixelCountP2 == client3Config.width * client3Config.height);
+        // Stop client3
+        mService->notifyClientStopped(client3Config);
+
+        // Verify the Peak and Current Concurrent pixel count for both the process
+        // (kTestPid1, kTestPid2)
+        peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+        currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+        peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+        currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+        EXPECT_TRUE(peakPixelCountP1 ==
+            (client1Config.width * client1Config.height +
+             client4Config.width * client4Config.height));
+        EXPECT_TRUE(currentPixelCountP1 == 0);
+        EXPECT_TRUE(peakPixelCountP2 ==
+            (client2Config.width * client2Config.height +
+             client3Config.width * client3Config.height));
+        EXPECT_TRUE(currentPixelCountP2 == 0);
+    }
 };
 
 TEST_F(ResourceManagerServiceTest, config) {
@@ -1244,4 +1588,8 @@
     testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
 }
 
+TEST_F(ResourceManagerServiceTest, concurrentCodecs) {
+    testConcurrentCodecs();
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index 003569d..85769d5 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -166,11 +166,14 @@
 
 class ResourceObserverServiceTest : public ResourceManagerServiceTestBase {
 public:
-    ResourceObserverServiceTest() : ResourceManagerServiceTestBase(),
-        mObserverService(::ndk::SharedRefBase::make<ResourceObserverService>()),
-        mTestObserver1(::ndk::SharedRefBase::make<TestObserver>("observer1")),
-        mTestObserver2(::ndk::SharedRefBase::make<TestObserver>("observer2")),
-        mTestObserver3(::ndk::SharedRefBase::make<TestObserver>("observer3")) {
+    ResourceObserverServiceTest() : ResourceManagerServiceTestBase() {}
+
+    void SetUp() override {
+        ResourceManagerServiceTestBase::SetUp();
+        mObserverService = ::ndk::SharedRefBase::make<ResourceObserverService>();
+        mTestObserver1 = ::ndk::SharedRefBase::make<TestObserver>("observer1");
+        mTestObserver2 = ::ndk::SharedRefBase::make<TestObserver>("observer2");
+        mTestObserver3 = ::ndk::SharedRefBase::make<TestObserver>("observer3");
         mService->setObserverService(mObserverService);
     }
 
@@ -251,17 +254,31 @@
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
                    {MediaObservableType::kVideoNonSecureCodec, 1}};
 
+    ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                 .uid = static_cast<int32_t>(kTestUid1),
+                                 .id = getId(mTestClient1),
+                                 .name = "none"};
+
+    ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient2),
+                                 .name = "none"};
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec.
     resources = {createSecureVideoCodecResource()};
-    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    mService->addResource(client1Info, mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
 
     // Add non-secure video codec.
     resources = {createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    mService->addResource(client2Info, mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
@@ -269,7 +286,7 @@
     // Add secure & non-secure video codecs.
     resources = {createSecureVideoCodecResource(),
                  createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
@@ -277,7 +294,7 @@
     // Add additional audio codecs, should be ignored.
     resources.push_back(createSecureAudioCodecResource());
     resources.push_back(createNonSecureAudioCodecResource());
-    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    mService->addResource(client1Info, mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables3));
@@ -303,7 +320,11 @@
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
                    {MediaObservableType::kVideoNonSecureCodec, 3}};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
@@ -318,47 +339,61 @@
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
                    {MediaObservableType::kVideoNonSecureCodec, 1}};
 
+    ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                 .uid = static_cast<int32_t>(kTestUid1),
+                                 .id = getId(mTestClient1),
+                                 .name = "none"};
+
+    ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient2),
+                                 .name = "none"};
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec to client1.
     resources = {createSecureVideoCodecResource()};
-    mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
+    mService->addResource(client1Info, mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     // Remove secure video codec. observer 1&3 should receive updates.
-    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    mService->removeResource(client1Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid1, kTestPid1, observables1));
     // Remove secure video codec again, should have no event.
-    mService->removeResource(kTestPid1, getId(mTestClient1), resources);
+    mService->removeResource(client1Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
     // Remove client1, should have no event.
-    mService->removeClient(kTestPid1, getId(mTestClient1));
+    mService->removeClient(client1Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add non-secure video codec to client2.
     resources = {createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
+    mService->addResource(client2Info, mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     // Remove client2, observer 2&3 should receive updates.
-    mService->removeClient(kTestPid2, getId(mTestClient2));
+    mService->removeClient(client2Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     // Remove non-secure codec after client2 removed, should have no event.
-    mService->removeResource(kTestPid2, getId(mTestClient2), resources);
+    mService->removeResource(client2Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
     // Remove client2 again, should have no event.
-    mService->removeClient(kTestPid2, getId(mTestClient2));
+    mService->removeClient(client2Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
@@ -368,13 +403,13 @@
                  createNonSecureVideoCodecResource(),
                  createSecureAudioCodecResource(),
                  createNonSecureAudioCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, should have no event.
     resources = {createSecureAudioCodecResource()};
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
@@ -382,12 +417,12 @@
     // removal should be reported.
     resources = {createNonSecureAudioCodecResource(),
                  createSecureVideoCodecResource()};
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
     // Remove client3 entirely. Non-secure video codec removal should be reported.
-    mService->removeClient(kTestPid2, getId(mTestClient3));
+    mService->removeClient(client3Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
@@ -410,7 +445,12 @@
                  createNonSecureVideoCodecResource(4),
                  createSecureAudioCodecResource(),
                  createNonSecureAudioCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
+    mService->addResource(client3Info, mTestClient3, resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
@@ -424,7 +464,7 @@
                  createSecureVideoCodecResource(),
                  createSecureVideoCodecResource(),
                  createNonSecureVideoCodecResource(2)};
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 1},
@@ -433,7 +473,7 @@
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables3));
     // Remove client3 entirely. 2 non-secure video codecs removal should be reported.
-    mService->removeClient(kTestPid2, getId(mTestClient3));
+    mService->removeClient(client3Info);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
@@ -465,13 +505,27 @@
     // Add secure & non-secure video codecs.
     resources = {createSecureVideoCodecResource(),
                  createNonSecureVideoCodecResource()};
-    mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
+    ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+                                 .uid = static_cast<int32_t>(kTestUid1),
+                                 .id = getId(mTestClient1),
+                                 .name = "none"};
+
+    ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient2),
+                                 .name = "none"};
+
+    ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+                                 .uid = static_cast<int32_t>(kTestUid2),
+                                 .id = getId(mTestClient3),
+                                 .name = "none"};
+    mService->addResource(client3Info, mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
 
     // Remove secure & non-secure video codecs.
-    mService->removeResource(kTestPid2, getId(mTestClient3), resources);
+    mService->removeResource(client3Info, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index 054a896..c91ead0 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -73,13 +73,13 @@
         return AAUDIO_ERROR_NULL;
     }
 
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     if (mNotificationClients.count(pid) == 0) {
-        sp<IBinder> binder = IInterface::asBinder(client);
-        sp<NotificationClient> notificationClient = new NotificationClient(pid, binder);
+        const sp<IBinder> binder = IInterface::asBinder(client);
+        const sp<NotificationClient> notificationClient = new NotificationClient(pid, binder);
         mNotificationClients[pid] = notificationClient;
 
-        status_t status = binder->linkToDeath(notificationClient);
+        const status_t status = binder->linkToDeath(notificationClient);
         ALOGW_IF(status != NO_ERROR, "registerClient() linkToDeath = %d\n", status);
         return AAudioConvert_androidToAAudioResult(status);
     } else {
@@ -90,12 +90,12 @@
 
 void AAudioClientTracker::unregisterClient(pid_t pid) {
     ALOGV("unregisterClient(), calling pid = %d, getpid() = %d\n", pid, getpid());
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     mNotificationClients.erase(pid);
 }
 
 int32_t AAudioClientTracker::getStreamCount(pid_t pid) {
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     auto it = mNotificationClients.find(pid);
     if (it != mNotificationClients.end()) {
         return it->second->getStreamCount();
@@ -105,18 +105,19 @@
 }
 
 aaudio_result_t
-AAudioClientTracker::registerClientStream(pid_t pid, sp<AAudioServiceStreamBase> serviceStream) {
+AAudioClientTracker::registerClientStream(
+        pid_t pid, const sp<AAudioServiceStreamBase>& serviceStream) {
     ALOGV("registerClientStream(%d,)\n", pid);
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     return getNotificationClient_l(pid)->registerClientStream(serviceStream);
 }
 
 // Find the tracker for this process and remove it.
 aaudio_result_t
 AAudioClientTracker::unregisterClientStream(pid_t pid,
-                                            sp<AAudioServiceStreamBase> serviceStream) {
+                                            const sp<AAudioServiceStreamBase>& serviceStream) {
     ALOGV("unregisterClientStream(%d,)\n", pid);
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     auto it = mNotificationClients.find(pid);
     if (it != mNotificationClients.end()) {
         ALOGV("unregisterClientStream(%d,) found NotificationClient\n", pid);
@@ -129,12 +130,12 @@
 
 void AAudioClientTracker::setExclusiveEnabled(pid_t pid, bool enabled) {
     ALOGD("%s(%d, %d)\n", __func__, pid, enabled);
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     getNotificationClient_l(pid)->setExclusiveEnabled(enabled);
 }
 
 bool AAudioClientTracker::isExclusiveEnabled(pid_t pid) {
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     return getNotificationClient_l(pid)->isExclusiveEnabled();
 }
 
@@ -158,24 +159,21 @@
         : mProcessId(pid), mBinder(binder) {
 }
 
-AAudioClientTracker::NotificationClient::~NotificationClient() {
-}
-
 int32_t AAudioClientTracker::NotificationClient::getStreamCount() {
-    std::lock_guard<std::mutex> lock(mLock);
+    const std::lock_guard<std::mutex> lock(mLock);
     return mStreams.size();
 }
 
 aaudio_result_t AAudioClientTracker::NotificationClient::registerClientStream(
-        sp<AAudioServiceStreamBase> serviceStream) {
-    std::lock_guard<std::mutex> lock(mLock);
+        const sp<AAudioServiceStreamBase>& serviceStream) {
+    const std::lock_guard<std::mutex> lock(mLock);
     mStreams.insert(serviceStream);
     return AAUDIO_OK;
 }
 
 aaudio_result_t AAudioClientTracker::NotificationClient::unregisterClientStream(
-        sp<AAudioServiceStreamBase> serviceStream) {
-    std::lock_guard<std::mutex> lock(mLock);
+        const sp<AAudioServiceStreamBase>& serviceStream) {
+    const std::lock_guard<std::mutex> lock(mLock);
     mStreams.erase(serviceStream);
     return AAUDIO_OK;
 }
@@ -189,20 +187,21 @@
         std::set<sp<AAudioServiceStreamBase>>  streamsToClose;
 
         {
-            std::lock_guard<std::mutex> lock(mLock);
+            const std::lock_guard<std::mutex> lock(mLock);
             for (const auto& serviceStream : mStreams) {
                 streamsToClose.insert(serviceStream);
             }
         }
 
         for (const auto& serviceStream : streamsToClose) {
-            aaudio_handle_t handle = serviceStream->getHandle();
+            const aaudio_handle_t handle = serviceStream->getHandle();
             ALOGW("binderDied() close abandoned stream 0x%08X\n", handle);
-            aaudioService->asAAudioServiceInterface().closeStream(handle);
+            AAudioHandleInfo handleInfo(DEFAULT_AAUDIO_SERVICE_ID, handle);
+            aaudioService->asAAudioServiceInterface().closeStream(handleInfo);
         }
         // mStreams should be empty now
     }
-    sp<NotificationClient> keep(this);
+    const sp<NotificationClient> keep(this);
     AAudioClientTracker::getInstance().unregisterClient(mProcessId);
 }
 
diff --git a/services/oboeservice/AAudioClientTracker.h b/services/oboeservice/AAudioClientTracker.h
index 2b38621..cd3b75a 100644
--- a/services/oboeservice/AAudioClientTracker.h
+++ b/services/oboeservice/AAudioClientTracker.h
@@ -54,10 +54,10 @@
     int32_t getStreamCount(pid_t pid);
 
     aaudio_result_t registerClientStream(pid_t pid,
-                                         android::sp<AAudioServiceStreamBase> serviceStream);
+                                         const android::sp<AAudioServiceStreamBase>& serviceStream);
 
-    aaudio_result_t unregisterClientStream(pid_t pid,
-                                           android::sp<AAudioServiceStreamBase> serviceStream);
+    aaudio_result_t unregisterClientStream(
+            pid_t pid, const android::sp<AAudioServiceStreamBase>& serviceStream);
 
     /**
      * Specify whether a process is allowed to create an EXCLUSIVE MMAP stream.
@@ -84,15 +84,17 @@
     class NotificationClient : public IBinder::DeathRecipient {
     public:
         NotificationClient(pid_t pid, const android::sp<IBinder>& binder);
-        virtual ~NotificationClient();
+        ~NotificationClient() override = default;
 
         int32_t getStreamCount();
 
         std::string dump() const;
 
-        aaudio_result_t registerClientStream(android::sp<AAudioServiceStreamBase> serviceStream);
+        aaudio_result_t registerClientStream(
+                const android::sp<AAudioServiceStreamBase>& serviceStream);
 
-        aaudio_result_t unregisterClientStream(android::sp<AAudioServiceStreamBase> serviceStream);
+        aaudio_result_t unregisterClientStream(
+                const android::sp<AAudioServiceStreamBase>& serviceStream);
 
         void setExclusiveEnabled(bool enabled) {
             mExclusiveEnabled = enabled;
@@ -103,7 +105,7 @@
         }
 
         // IBinder::DeathRecipient
-        virtual     void    binderDied(const android::wp<IBinder>& who);
+        void binderDied(const android::wp<IBinder>& who) override;
 
     private:
         mutable std::mutex                              mLock;
diff --git a/services/oboeservice/AAudioCommandQueue.cpp b/services/oboeservice/AAudioCommandQueue.cpp
index 9bd18b3..be80b12 100644
--- a/services/oboeservice/AAudioCommandQueue.cpp
+++ b/services/oboeservice/AAudioCommandQueue.cpp
@@ -25,7 +25,7 @@
 
 namespace aaudio {
 
-aaudio_result_t AAudioCommandQueue::sendCommand(std::shared_ptr<AAudioCommand> command) {
+aaudio_result_t AAudioCommandQueue::sendCommand(const std::shared_ptr<AAudioCommand>& command) {
     {
         std::scoped_lock<std::mutex> _l(mLock);
         if (!mRunning) {
diff --git a/services/oboeservice/AAudioCommandQueue.h b/services/oboeservice/AAudioCommandQueue.h
index 64442a3..ad62d8a 100644
--- a/services/oboeservice/AAudioCommandQueue.h
+++ b/services/oboeservice/AAudioCommandQueue.h
@@ -26,7 +26,7 @@
 
 namespace aaudio {
 
-typedef int32_t aaudio_command_opcode;
+using aaudio_command_opcode = int32_t;
 
 class AAudioCommandParam {
 public:
@@ -39,7 +39,7 @@
     explicit AAudioCommand(
             aaudio_command_opcode opCode, std::shared_ptr<AAudioCommandParam> param = nullptr,
             bool waitForReply = false, int64_t timeoutNanos = 0)
-            : operationCode(opCode), parameter(param), isWaitingForReply(waitForReply),
+            : operationCode(opCode), parameter(std::move(param)), isWaitingForReply(waitForReply),
               timeoutNanoseconds(timeoutNanos) { }
     virtual ~AAudioCommand() = default;
 
@@ -66,7 +66,7 @@
      * @return the result of sending the command or the result of executing the command if command
      *         need to wait for a reply. If timeout happens, AAUDIO_ERROR_TIMEOUT will be returned.
      */
-    aaudio_result_t sendCommand(std::shared_ptr<AAudioCommand> command);
+    aaudio_result_t sendCommand(const std::shared_ptr<AAudioCommand>& command);
 
     /**
      * Wait for next available command OR until the timeout is expired.
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index 20e4cc5..b5ee2f2 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -162,7 +162,7 @@
         const aaudio::AAudioStreamRequest &request,
         sp<AAudioServiceEndpoint> &endpointToSteal) {
 
-    std::lock_guard<std::mutex> lock(mExclusiveLock);
+    const std::lock_guard<std::mutex> lock(mExclusiveLock);
 
     const AAudioStreamConfiguration &configuration = request.getConstantConfiguration();
 
@@ -183,19 +183,20 @@
             // and START calls. This will help preserve app compatibility.
             // An app can avoid having this happen by closing their streams when
             // the app is paused.
-            pid_t pid = VALUE_OR_FATAL(
+            const pid_t pid = VALUE_OR_FATAL(
                 aidl2legacy_int32_t_pid_t(request.getAttributionSource().pid));
             AAudioClientTracker::getInstance().setExclusiveEnabled(pid, false);
             endpointToSteal = endpoint; // return it to caller
         }
         return nullptr;
     } else {
-        sp<AAudioServiceEndpointMMAP> endpointMMap = new AAudioServiceEndpointMMAP(aaudioService);
+        const sp<AAudioServiceEndpointMMAP> endpointMMap =
+                new AAudioServiceEndpointMMAP(aaudioService);
         ALOGV("%s(), no match so try to open MMAP %p for dev %d",
               __func__, endpointMMap.get(), configuration.getDeviceId());
         endpoint = endpointMMap;
 
-        aaudio_result_t result = endpoint->open(request);
+        const aaudio_result_t result = endpoint->open(request);
         if (result != AAUDIO_OK) {
             endpoint.clear();
         } else {
@@ -217,10 +218,10 @@
         AAudioService &aaudioService,
         const aaudio::AAudioStreamRequest &request) {
 
-    std::lock_guard<std::mutex> lock(mSharedLock);
+    const std::lock_guard<std::mutex> lock(mSharedLock);
 
     const AAudioStreamConfiguration &configuration = request.getConstantConfiguration();
-    aaudio_direction_t direction = configuration.getDirection();
+    const aaudio_direction_t direction = configuration.getDirection();
 
     // Try to find an existing endpoint.
     sp<AAudioServiceEndpointShared> endpoint = findSharedEndpoint_l(configuration);
@@ -228,7 +229,7 @@
     // If we can't find an existing one then open a new one.
     if (endpoint.get() == nullptr) {
         // we must call openStream with audioserver identity
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        const int64_t token = IPCThreadState::self()->clearCallingIdentity();
         switch (direction) {
             case AAUDIO_DIRECTION_INPUT:
                 endpoint = new AAudioServiceEndpointCapture(aaudioService);
@@ -241,7 +242,7 @@
         }
 
         if (endpoint.get() != nullptr) {
-            aaudio_result_t result = endpoint->open(request);
+            const aaudio_result_t result = endpoint->open(request);
             if (result != AAUDIO_OK) {
                 endpoint.clear();
             } else {
@@ -261,7 +262,7 @@
     return endpoint;
 }
 
-void AAudioEndpointManager::closeEndpoint(sp<AAudioServiceEndpoint>serviceEndpoint) {
+void AAudioEndpointManager::closeEndpoint(const sp<AAudioServiceEndpoint>& serviceEndpoint) {
     if (serviceEndpoint->getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
         return closeExclusiveEndpoint(serviceEndpoint);
     } else {
@@ -269,14 +270,15 @@
     }
 }
 
-void AAudioEndpointManager::closeExclusiveEndpoint(sp<AAudioServiceEndpoint> serviceEndpoint) {
+void AAudioEndpointManager::closeExclusiveEndpoint(
+        const sp<AAudioServiceEndpoint>& serviceEndpoint) {
     if (serviceEndpoint.get() == nullptr) {
         return;
     }
 
     // Decrement the reference count under this lock.
-    std::lock_guard<std::mutex> lock(mExclusiveLock);
-    int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
+    const std::lock_guard<std::mutex> lock(mExclusiveLock);
+    const int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
     serviceEndpoint->setOpenCount(newRefCount);
 
     // If no longer in use then actually close it.
@@ -292,14 +294,14 @@
     }
 }
 
-void AAudioEndpointManager::closeSharedEndpoint(sp<AAudioServiceEndpoint> serviceEndpoint) {
+void AAudioEndpointManager::closeSharedEndpoint(const sp<AAudioServiceEndpoint>& serviceEndpoint) {
     if (serviceEndpoint.get() == nullptr) {
         return;
     }
 
     // Decrement the reference count under this lock.
-    std::lock_guard<std::mutex> lock(mSharedLock);
-    int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
+    const std::lock_guard<std::mutex> lock(mSharedLock);
+    const int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
     serviceEndpoint->setOpenCount(newRefCount);
 
     // If no longer in use then actually close it.
diff --git a/services/oboeservice/AAudioEndpointManager.h b/services/oboeservice/AAudioEndpointManager.h
index b07bcef..1d38d26 100644
--- a/services/oboeservice/AAudioEndpointManager.h
+++ b/services/oboeservice/AAudioEndpointManager.h
@@ -61,7 +61,7 @@
     android::sp<AAudioServiceEndpoint> openEndpoint(android::AAudioService &audioService,
                                         const aaudio::AAudioStreamRequest &request);
 
-    void closeEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
+    void closeEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
 
 private:
     android::sp<AAudioServiceEndpoint> openExclusiveEndpoint(android::AAudioService &aaudioService,
@@ -79,8 +79,8 @@
             const AAudioStreamConfiguration& configuration)
             REQUIRES(mSharedLock);
 
-    void closeExclusiveEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
-    void closeSharedEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
+    void closeExclusiveEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
+    void closeSharedEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
 
     // Use separate locks because opening a Shared endpoint requires opening an Exclusive one.
     // That could cause a recursive lock.
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index ad4b830..6890985 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -45,7 +45,8 @@
     memset(mOutputBuffer.get(), 0, mBufferSizeInBytes);
 }
 
-int32_t AAudioMixer::mix(int streamIndex, std::shared_ptr<FifoBuffer> fifo, bool allowUnderflow) {
+int32_t AAudioMixer::mix(
+        int streamIndex, const std::shared_ptr<FifoBuffer>& fifo, bool allowUnderflow) {
     WrappingBuffer wrappingBuffer;
     float *destination = mOutputBuffer.get();
 
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index 1a120f2..d773178 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -24,7 +24,7 @@
 
 class AAudioMixer {
 public:
-    AAudioMixer() {}
+    AAudioMixer() = default;
 
     void allocate(int32_t samplesPerFrame, int32_t framesPerBurst);
 
@@ -37,7 +37,9 @@
      * @param allowUnderflow if true then allow mixer to advance read index past the write index
      * @return frames read from this stream
      */
-    int32_t mix(int streamIndex, std::shared_ptr<android::FifoBuffer> fifo, bool allowUnderflow);
+    int32_t mix(int streamIndex,
+                const std::shared_ptr<android::FifoBuffer>& fifo,
+                bool allowUnderflow);
 
     float *getOutputBuffer();
 
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index 2679b2e..e9c0884 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -62,7 +62,7 @@
     AAudioClientTracker::getInstance().setAAudioService(this);
 }
 
-status_t AAudioService::dump(int fd, const Vector<String16>& args) {
+status_t AAudioService::dump(int fd, const Vector<String16>& /*args*/) {
     std::string result;
 
     if (!dumpAllowed()) {
@@ -83,7 +83,7 @@
 }
 
 Status AAudioService::registerClient(const sp<IAAudioClient> &client) {
-    pid_t pid = IPCThreadState::self()->getCallingPid();
+    const pid_t pid = IPCThreadState::self()->getCallingPid();
     AAudioClientTracker::getInstance().registerClient(pid, client);
     return Status::ok();
 }
@@ -106,24 +106,24 @@
     // 4) Thread A can then get the lock and also open a shared stream.
     // Without the lock. Thread A might sneak in and reallocate an exclusive stream
     // before B can open the shared stream.
-    std::unique_lock<std::recursive_mutex> lock(mOpenLock);
+    const std::unique_lock<std::recursive_mutex> lock(mOpenLock);
 
     aaudio_result_t result = AAUDIO_OK;
     sp<AAudioServiceStreamBase> serviceStream;
     const AAudioStreamConfiguration &configurationInput = request.getConstantConfiguration();
-    bool sharingModeMatchRequired = request.isSharingModeMatchRequired();
-    aaudio_sharing_mode_t sharingMode = configurationInput.getSharingMode();
+    const bool sharingModeMatchRequired = request.isSharingModeMatchRequired();
+    const aaudio_sharing_mode_t sharingMode = configurationInput.getSharingMode();
 
     // Enforce limit on client processes.
     AttributionSourceState attributionSource = request.getAttributionSource();
-    pid_t pid = IPCThreadState::self()->getCallingPid();
+    const pid_t pid = IPCThreadState::self()->getCallingPid();
     attributionSource.pid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
         legacy2aidl_pid_t_int32_t(pid));
     attributionSource.uid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
         legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
     attributionSource.token = sp<BBinder>::make();
     if (attributionSource.pid != mAudioClient.attributionSource.pid) {
-        int32_t count = AAudioClientTracker::getInstance().getStreamCount(pid);
+        const int32_t count = AAudioClientTracker::getInstance().getStreamCount(pid);
         if (count >= MAX_STREAMS_PER_PROCESS) {
             ALOGE("openStream(): exceeded max streams per process %d >= %d",
                   count,  MAX_STREAMS_PER_PROCESS);
@@ -168,7 +168,7 @@
         serviceStream.clear();
         AIDL_RETURN(result);
     } else {
-        aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
+        const aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
         serviceStream->setHandle(handle);
         AAudioClientTracker::getInstance().registerClientStream(pid, serviceStream);
         paramsOut.copyFrom(*serviceStream);
@@ -185,7 +185,7 @@
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
     // Check permission and ownership first.
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGE("closeStream(0x%0x), illegal stream handle", streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -197,13 +197,13 @@
                                            int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
     }
     AudioEndpointParcelable endpointParcelable;
-    aaudio_result_t result = serviceStream->getDescription(endpointParcelable);
+    const aaudio_result_t result = serviceStream->getDescription(endpointParcelable);
     if (result == AAUDIO_OK) {
         *endpoint = std::move(endpointParcelable).parcelable();
     }
@@ -213,7 +213,7 @@
 Status AAudioService::startStream(int32_t streamHandle, int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -224,7 +224,7 @@
 Status AAudioService::pauseStream(int32_t streamHandle, int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -235,7 +235,7 @@
 Status AAudioService::stopStream(int32_t streamHandle, int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -246,7 +246,7 @@
 Status AAudioService::flushStream(int32_t streamHandle, int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -254,16 +254,16 @@
     AIDL_RETURN(serviceStream->flush());
 }
 
-Status AAudioService::registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
-                                          int32_t *_aidl_return) {
+Status AAudioService::registerAudioThread(int32_t streamHandle, int32_t clientThreadId,
+                                          int64_t /*periodNanoseconds*/, int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
     }
-    int32_t priority = isCallerInService()
+    const int32_t priority = isCallerInService()
         ? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
     AIDL_RETURN(serviceStream->registerAudioThread(clientThreadId, priority));
 }
@@ -272,7 +272,7 @@
                                             int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -283,13 +283,13 @@
 Status AAudioService::exitStandby(int32_t streamHandle, Endpoint* endpoint, int32_t *_aidl_return) {
     static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
 
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
         AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
     }
     AudioEndpointParcelable endpointParcelable;
-    aaudio_result_t result = serviceStream->exitStandby(&endpointParcelable);
+    const aaudio_result_t result = serviceStream->exitStandby(&endpointParcelable);
     if (result == AAUDIO_OK) {
         *endpoint = std::move(endpointParcelable).parcelable();
     }
@@ -297,16 +297,18 @@
 }
 
 bool AAudioService::isCallerInService() {
-    pid_t clientPid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAudioClient.attributionSource.pid));
-    uid_t clientUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
+    const pid_t clientPid = VALUE_OR_FATAL(
+            aidl2legacy_int32_t_pid_t(mAudioClient.attributionSource.pid));
+    const uid_t clientUid = VALUE_OR_FATAL(
+            aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
     return clientPid == IPCThreadState::self()->getCallingPid() &&
         clientUid == IPCThreadState::self()->getCallingUid();
 }
 
-aaudio_result_t AAudioService::closeStream(sp<AAudioServiceStreamBase> serviceStream) {
+aaudio_result_t AAudioService::closeStream(const sp<AAudioServiceStreamBase>& serviceStream) {
     // This is protected by a lock in AAudioClientTracker.
     // It is safe to unregister the same stream twice.
-    pid_t pid = serviceStream->getOwnerProcessId();
+    const pid_t pid = serviceStream->getOwnerProcessId();
     AAudioClientTracker::getInstance().unregisterClientStream(pid, serviceStream);
     // This is protected by a lock in mStreamTracker.
     // It is safe to remove the same stream twice.
@@ -325,10 +327,10 @@
         const uid_t ownerUserId = serviceStream->getOwnerUserId();
         const uid_t clientUid = VALUE_OR_FATAL(
             aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
-        bool callerOwnsIt = callingUserId == ownerUserId;
-        bool serverCalling = callingUserId == clientUid;
-        bool serverOwnsIt = ownerUserId == clientUid;
-        bool allowed = callerOwnsIt || serverCalling || serverOwnsIt;
+        const bool callerOwnsIt = callingUserId == ownerUserId;
+        const bool serverCalling = callingUserId == clientUid;
+        const bool serverOwnsIt = ownerUserId == clientUid;
+        const bool allowed = callerOwnsIt || serverCalling || serverOwnsIt;
         if (!allowed) {
             ALOGE("AAudioService: calling uid %d cannot access stream 0x%08X owned by %d",
                   callingUserId, streamHandle, ownerUserId);
@@ -342,7 +344,7 @@
                                            const android::AudioClient& client,
                                            const audio_attributes_t *attr,
                                            audio_port_handle_t *clientHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
@@ -352,7 +354,7 @@
 
 aaudio_result_t AAudioService::stopClient(aaudio_handle_t streamHandle,
                                           audio_port_handle_t portHandle) {
-    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
     if (serviceStream.get() == nullptr) {
         ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
@@ -364,14 +366,14 @@
 // So we do not have to check permissions.
 aaudio_result_t AAudioService::disconnectStreamByPortHandle(audio_port_handle_t portHandle) {
     ALOGD("%s(%d) called", __func__, portHandle);
-    sp<AAudioServiceStreamBase> serviceStream =
+    const sp<AAudioServiceStreamBase> serviceStream =
             mStreamTracker.findStreamByPortHandle(portHandle);
     if (serviceStream.get() == nullptr) {
         ALOGE("%s(), could not find stream with portHandle = %d", __func__, portHandle);
         return AAUDIO_ERROR_INVALID_HANDLE;
     }
     // This is protected by a lock and will just return if already stopped.
-    aaudio_result_t result = serviceStream->stop();
+    const aaudio_result_t result = serviceStream->stop();
     serviceStream->disconnect();
     return result;
 }
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index 0a111fb..ada3d53 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -36,6 +36,7 @@
 namespace android {
 
 #define AAUDIO_SERVICE_NAME  "media.aaudio"
+#define DEFAULT_AAUDIO_SERVICE_ID 0
 
 class AAudioService :
     public BinderService<AAudioService>,
@@ -45,7 +46,7 @@
 
 public:
     AAudioService();
-    virtual ~AAudioService() = default;
+    ~AAudioService() override = default;
 
     aaudio::AAudioServiceInterface& asAAudioServiceInterface() {
         return mAdapter;
@@ -53,7 +54,7 @@
 
     static const char* getServiceName() { return AAUDIO_SERVICE_NAME; }
 
-    virtual status_t        dump(int fd, const Vector<String16>& args) override;
+    status_t dump(int fd, const Vector<String16>& args) override;
 
     binder::Status registerClient(const ::android::sp<::aaudio::IAAudioClient>& client) override;
 
@@ -103,25 +104,27 @@
      * This is only called from within the Service.
      * It bypasses the permission checks in closeStream(handle).
      */
-    aaudio_result_t closeStream(sp<aaudio::AAudioServiceStreamBase> serviceStream);
+    aaudio_result_t closeStream(const sp<aaudio::AAudioServiceStreamBase>& serviceStream);
 
 private:
     class Adapter : public aaudio::AAudioBinderAdapter {
     public:
+        // Always use default service id in server side since when crash happens,
+        // the aaudio service will restart.
         explicit Adapter(AAudioService *service)
-                : aaudio::AAudioBinderAdapter(service),
+                : aaudio::AAudioBinderAdapter(service, DEFAULT_AAUDIO_SERVICE_ID),
                   mService(service) {}
 
-        aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
+        aaudio_result_t startClient(const aaudio::AAudioHandleInfo& streamHandleInfo,
                                     const android::AudioClient &client,
                                     const audio_attributes_t *attr,
                                     audio_port_handle_t *clientHandle) override {
-            return mService->startClient(streamHandle, client, attr, clientHandle);
+            return mService->startClient(streamHandleInfo.getHandle(), client, attr, clientHandle);
         }
 
-        aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
+        aaudio_result_t stopClient(const aaudio::AAudioHandleInfo& streamHandleInfo,
                                    audio_port_handle_t clientHandle) override {
-            return mService->stopClient(streamHandle, clientHandle);
+            return mService->stopClient(streamHandleInfo.getHandle(), clientHandle);
         }
 
     private:
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index b55b601..e7d14a0 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -69,6 +69,10 @@
     result << "    Reference Count:      " << mOpenCount << "\n";
     result << "    Session Id:           " << getSessionId() << "\n";
     result << "    Privacy Sensitive:    " << isPrivacySensitive() << "\n";
+    result << "    Hardware Channel Count:" << getHardwareSamplesPerFrame() << "\n";
+    result << "    Hardware Format:      " << getHardwareFormat() << " ("
+                                           << audio_format_to_string(getHardwareFormat()) << ")\n";
+    result << "    Hardware Sample Rate: " << getHardwareSampleRate() << "\n";
     result << "    Connected:            " << mConnected.load() << "\n";
     result << "    Registered Streams:" << "\n";
     result << AAudioServiceStreamShared::dumpHeader() << "\n";
@@ -84,7 +88,7 @@
 
 // @return true if stream found
 bool AAudioServiceEndpoint::isStreamRegistered(audio_port_handle_t portHandle) {
-    std::lock_guard<std::mutex> lock(mLockStreams);
+    const std::lock_guard<std::mutex> lock(mLockStreams);
     for (const auto& stream : mRegisteredStreams) {
         if (stream->getPortHandle() == portHandle) {
             return true;
@@ -97,7 +101,7 @@
         AAudioServiceEndpoint::disconnectRegisteredStreams() {
     std::vector<android::sp<AAudioServiceStreamBase>> streamsDisconnected;
     {
-        std::lock_guard<std::mutex> lock(mLockStreams);
+        const std::lock_guard<std::mutex> lock(mLockStreams);
         mRegisteredStreams.swap(streamsDisconnected);
     }
     mConnected.store(false);
@@ -118,7 +122,7 @@
 
 void AAudioServiceEndpoint::releaseRegisteredStreams() {
     // List of streams to be closed after we disconnect everything.
-    std::vector<android::sp<AAudioServiceStreamBase>> streamsToClose
+    const std::vector<android::sp<AAudioServiceStreamBase>> streamsToClose
             = disconnectRegisteredStreams();
 
     // Close outside the lock to avoid recursive locks.
@@ -129,14 +133,14 @@
     }
 }
 
-aaudio_result_t AAudioServiceEndpoint::registerStream(sp<AAudioServiceStreamBase>stream) {
-    std::lock_guard<std::mutex> lock(mLockStreams);
+aaudio_result_t AAudioServiceEndpoint::registerStream(const sp<AAudioServiceStreamBase>& stream) {
+    const std::lock_guard<std::mutex> lock(mLockStreams);
     mRegisteredStreams.push_back(stream);
     return AAUDIO_OK;
 }
 
-aaudio_result_t AAudioServiceEndpoint::unregisterStream(sp<AAudioServiceStreamBase>stream) {
-    std::lock_guard<std::mutex> lock(mLockStreams);
+aaudio_result_t AAudioServiceEndpoint::unregisterStream(const sp<AAudioServiceStreamBase>& stream) {
+    const std::lock_guard<std::mutex> lock(mLockStreams);
     mRegisteredStreams.erase(std::remove(
             mRegisteredStreams.begin(), mRegisteredStreams.end(), stream),
                              mRegisteredStreams.end());
@@ -192,11 +196,11 @@
             : AUDIO_SOURCE_DEFAULT;
     audio_flags_mask_t flags;
     if (direction == AAUDIO_DIRECTION_OUTPUT) {
-        flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
-                | AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+        flags = AAudio_computeAudioFlagsMask(
                         params->getAllowedCapturePolicy(),
                         params->getSpatializationBehavior(),
-                        params->isContentSpatialized()));
+                        params->isContentSpatialized(),
+                        AUDIO_OUTPUT_FLAG_FAST);
     } else {
         flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
                 | AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index 92004c5..dff571b 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -43,7 +43,7 @@
         , public AAudioStreamParameters {
 public:
 
-    virtual ~AAudioServiceEndpoint();
+    ~AAudioServiceEndpoint() override;
 
     virtual std::string dump() const;
 
@@ -55,9 +55,9 @@
      */
     virtual void close() = 0;
 
-    aaudio_result_t registerStream(android::sp<AAudioServiceStreamBase> stream);
+    aaudio_result_t registerStream(const android::sp<AAudioServiceStreamBase>& stream);
 
-    aaudio_result_t unregisterStream(android::sp<AAudioServiceStreamBase> stream);
+    aaudio_result_t unregisterStream(const android::sp<AAudioServiceStreamBase>& stream);
 
     virtual aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                         audio_port_handle_t *clientHandle) = 0;
@@ -65,14 +65,14 @@
     virtual aaudio_result_t stopStream(android::sp<AAudioServiceStreamBase> stream,
                                        audio_port_handle_t clientHandle) = 0;
 
-    virtual aaudio_result_t startClient(const android::AudioClient& client,
-                                        const audio_attributes_t *attr,
-                                        audio_port_handle_t *clientHandle) {
+    virtual aaudio_result_t startClient(const android::AudioClient& /*client*/,
+                                        const audio_attributes_t* /*attr*/,
+                                        audio_port_handle_t* /*clientHandle*/) {
         ALOGD("AAudioServiceEndpoint::startClient(...) AAUDIO_ERROR_UNAVAILABLE");
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    virtual aaudio_result_t stopClient(audio_port_handle_t clientHandle) {
+    virtual aaudio_result_t stopClient(audio_port_handle_t /*clientHandle*/) {
         ALOGD("AAudioServiceEndpoint::stopClient(...) AAUDIO_ERROR_UNAVAILABLE");
         return AAUDIO_ERROR_UNAVAILABLE;
     }
@@ -82,7 +82,7 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    virtual aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) {
+    virtual aaudio_result_t exitStandby(AudioEndpointParcelable* /*parcelable*/) {
         ALOGD("AAudioServiceEndpoint::exitStandby() AAUDIO_ERROR_UNAVAILABLE");
         return AAUDIO_ERROR_UNAVAILABLE;
     }
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 95bd4bb..ba8070b 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -90,5 +90,5 @@
     }
 
     ALOGD("callbackLoop() exiting");
-    return NULL; // TODO review
+    return nullptr; // TODO review
 }
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.h b/services/oboeservice/AAudioServiceEndpointCapture.h
index 2ca43cf..b77100d 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.h
+++ b/services/oboeservice/AAudioServiceEndpointCapture.h
@@ -30,7 +30,7 @@
 class AAudioServiceEndpointCapture : public AAudioServiceEndpointShared {
 public:
     explicit AAudioServiceEndpointCapture(android::AAudioService &audioService);
-    virtual ~AAudioServiceEndpointCapture() = default;
+    ~AAudioServiceEndpointCapture() override = default;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 3f18b95..7f228c7 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -22,6 +22,7 @@
 #include <assert.h>
 #include <map>
 #include <mutex>
+#include <set>
 #include <sstream>
 #include <thread>
 #include <utils/Singleton.h>
@@ -36,7 +37,7 @@
 #include "AAudioServiceEndpointPlay.h"
 #include "AAudioServiceEndpointMMAP.h"
 
-#define AAUDIO_BUFFER_CAPACITY_MIN    4 * 512
+#define AAUDIO_BUFFER_CAPACITY_MIN    (4 * 512)
 #define AAUDIO_SAMPLE_RATE_DEFAULT    48000
 
 // This is an estimate of the time difference between the HW and the MMAP time.
@@ -57,7 +58,7 @@
     result << "  MMAP: framesTransferred = " << mFramesTransferred.get();
     result << ", HW nanos = " << mHardwareTimeOffsetNanos;
     result << ", port handle = " << mPortHandle;
-    result << ", audio data FD = " << mAudioDataFileDescriptor;
+    result << ", audio data FD = " << mAudioDataWrapper->getDataFileDescriptor();
     result << "\n";
 
     result << "    HW Offset Micros:     " <<
@@ -68,9 +69,30 @@
     return result.str();
 }
 
+namespace {
+
+const static std::map<audio_format_t, audio_format_t> NEXT_FORMAT_TO_TRY = {
+        {AUDIO_FORMAT_PCM_FLOAT,         AUDIO_FORMAT_PCM_32_BIT},
+        {AUDIO_FORMAT_PCM_32_BIT,        AUDIO_FORMAT_PCM_24_BIT_PACKED},
+        {AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT}
+};
+
+audio_format_t getNextFormatToTry(audio_format_t curFormat, audio_format_t returnedFromAPM) {
+    if (returnedFromAPM != AUDIO_FORMAT_DEFAULT) {
+        return returnedFromAPM;
+    }
+    const auto it = NEXT_FORMAT_TO_TRY.find(curFormat);
+    return it != NEXT_FORMAT_TO_TRY.end() ? it->second : AUDIO_FORMAT_DEFAULT;
+}
+
+} // namespace
+
 aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
     aaudio_result_t result = AAUDIO_OK;
+    mAudioDataWrapper = std::make_unique<SharedMemoryWrapper>();
     copyFrom(request.getConstantConfiguration());
+    mRequestedDeviceId = getDeviceId();
+
     mMmapClient.attributionSource = request.getAttributionSource();
     // TODO b/182392769: use attribution source util
     mMmapClient.attributionSource.uid = VALUE_OR_FATAL(
@@ -79,43 +101,45 @@
         legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
 
     audio_format_t audioFormat = getFormat();
+    std::set<audio_format_t> formatsTried;
+    while (true) {
+        if (formatsTried.find(audioFormat) != formatsTried.end()) {
+            // APM returning something that has already tried.
+            ALOGW("Have already tried to open with format=%#x, but failed before", audioFormat);
+            break;
+        }
+        formatsTried.insert(audioFormat);
 
-    result = openWithFormat(audioFormat);
-    if (result == AAUDIO_OK) return result;
+        audio_format_t nextFormatToTry = AUDIO_FORMAT_DEFAULT;
+        result = openWithFormat(audioFormat, &nextFormatToTry);
+        if (result != AAUDIO_ERROR_UNAVAILABLE) {
+            // Return if it is successful or there is an error that is not
+            // AAUDIO_ERROR_UNAVAILABLE happens.
+            ALOGI("Opened format=%#x with result=%d", audioFormat, result);
+            break;
+        }
 
-    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
-        ALOGD("%s() FLOAT failed, perhaps due to format. Try again with 32_BIT", __func__);
-        audioFormat = AUDIO_FORMAT_PCM_32_BIT;
-        result = openWithFormat(audioFormat);
-    }
-    if (result == AAUDIO_OK) return result;
-
-    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_32_BIT) {
-        ALOGD("%s() 32_BIT failed, perhaps due to format. Try again with 24_BIT_PACKED", __func__);
-        audioFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
-        result = openWithFormat(audioFormat);
-    }
-    if (result == AAUDIO_OK) return result;
-
-    // TODO The HAL and AudioFlinger should be recommending a format if the open fails.
-    //      But that recommendation is not propagating back from the HAL.
-    //      So for now just try something very likely to work.
-    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_24_BIT_PACKED) {
-        ALOGD("%s() 24_BIT failed, perhaps due to format. Try again with 16_BIT", __func__);
-        audioFormat = AUDIO_FORMAT_PCM_16_BIT;
-        result = openWithFormat(audioFormat);
+        nextFormatToTry = getNextFormatToTry(audioFormat, nextFormatToTry);
+        ALOGD("%s() %#x failed, perhaps due to format. Try again with %#x",
+              __func__, audioFormat, nextFormatToTry);
+        audioFormat = nextFormatToTry;
+        if (audioFormat == AUDIO_FORMAT_DEFAULT) {
+            // Nothing else to try
+            break;
+        }
     }
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(audio_format_t audioFormat) {
+aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(
+        audio_format_t audioFormat, audio_format_t* nextFormatToTry) {
     aaudio_result_t result = AAUDIO_OK;
     audio_config_base_t config;
     audio_port_handle_t deviceId;
 
     const audio_attributes_t attributes = getAudioAttributesFrom(this);
 
-    mRequestedDeviceId = deviceId = getDeviceId();
+    deviceId = mRequestedDeviceId;
 
     // Fill in config
     config.format = audioFormat;
@@ -142,30 +166,38 @@
         return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
     }
 
-    MmapStreamInterface::stream_direction_t streamDirection =
+    const MmapStreamInterface::stream_direction_t streamDirection =
             (direction == AAUDIO_DIRECTION_OUTPUT)
             ? MmapStreamInterface::DIRECTION_OUTPUT
             : MmapStreamInterface::DIRECTION_INPUT;
 
-    aaudio_session_id_t requestedSessionId = getSessionId();
+    const aaudio_session_id_t requestedSessionId = getSessionId();
     audio_session_t sessionId = AAudioConvert_aaudioToAndroidSessionId(requestedSessionId);
 
     // Open HAL stream. Set mMmapStream
-    status_t status = MmapStreamInterface::openMmapStream(streamDirection,
-                                                          &attributes,
-                                                          &config,
-                                                          mMmapClient,
-                                                          &deviceId,
-                                                          &sessionId,
-                                                          this, // callback
-                                                          mMmapStream,
-                                                          &mPortHandle);
+    ALOGD("%s trying to open MMAP stream with format=%#x, "
+          "sample_rate=%u, channel_mask=%#x, device=%d",
+          __func__, config.format, config.sample_rate,
+          config.channel_mask, deviceId);
+    const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
+                                                                &attributes,
+                                                                &config,
+                                                                mMmapClient,
+                                                                &deviceId,
+                                                                &sessionId,
+                                                                this, // callback
+                                                                mMmapStream,
+                                                                &mPortHandle);
     ALOGD("%s() mMapClient.attributionSource = %s => portHandle = %d\n",
           __func__, mMmapClient.attributionSource.toString().c_str(), mPortHandle);
     if (status != OK) {
         // This can happen if the resource is busy or the config does
         // not match the hardware.
-        ALOGD("%s() - openMmapStream() returned status %d",  __func__, status);
+        ALOGD("%s() - openMmapStream() returned status=%d, suggested format=%#x, sample_rate=%u, "
+              "channel_mask=%#x",
+              __func__, status, config.format, config.sample_rate, config.channel_mask);
+        *nextFormatToTry = config.format != audioFormat ? config.format
+                                                        : *nextFormatToTry;
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
@@ -178,7 +210,7 @@
         ALOGW("%s() - openMmapStream() failed to set sessionId", __func__);
     }
 
-    aaudio_session_id_t actualSessionId =
+    const aaudio_session_id_t actualSessionId =
             (requestedSessionId == AAUDIO_SESSION_ID_NONE)
             ? AAUDIO_SESSION_ID_NONE
             : (aaudio_session_id_t) sessionId;
@@ -188,7 +220,7 @@
           __func__, audioFormat, getDeviceId(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
-    result = createMmapBuffer(&mAudioDataFileDescriptor);
+    result = createMmapBuffer();
     if (result != AAUDIO_OK) {
         goto error;
     }
@@ -200,6 +232,9 @@
 
     setFormat(config.format);
     setSampleRate(config.sample_rate);
+    setHardwareSampleRate(getSampleRate());
+    setHardwareFormat(getFormat());
+    setHardwareSamplesPerFrame(AAudioConvert_channelMaskToCount(getChannelMask()));
 
     // If the position is not updated while the timestamp is updated for more than a certain amount,
     // the timestamp reported from the HAL may not be accurate. Here, a timestamp grace period is
@@ -209,6 +244,8 @@
     mTimestampGracePeriodMs = ((int64_t) kTimestampGraceBurstCount * mFramesPerBurst
             * AAUDIO_MILLIS_PER_SECOND) / getSampleRate();
 
+    mDataReportOffsetNanos = ((int64_t)mTimestampGracePeriodMs) * AAUDIO_NANOS_PER_MILLISECOND;
+
     ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
           __func__, getSampleRate(), getSamplesPerFrame(), getChannelMask(),
           deviceId, getBufferCapacity());
@@ -221,6 +258,9 @@
 
 error:
     close();
+    // restore original requests
+    setDeviceId(mRequestedDeviceId);
+    setSessionId(requestedSessionId);
     return result;
 }
 
@@ -228,9 +268,6 @@
     if (mMmapStream != nullptr) {
         // Needs to be explicitly cleared or CTS will fail but it is not clear why.
         mMmapStream.clear();
-        // Apparently the above close is asynchronous. An attempt to open a new device
-        // right after a close can fail. Also some callbacks may still be in flight!
-        // FIXME Make closing synchronous.
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
 }
@@ -244,7 +281,7 @@
     if (stream != nullptr) {
         attr = getAudioAttributesFrom(stream.get());
     }
-    aaudio_result_t result = startClient(
+    const aaudio_result_t result = startClient(
             mMmapClient, stream == nullptr ? nullptr : &attr, &tempHandle);
     // When AudioFlinger is passed a valid port handle then it should not change it.
     LOG_ALWAYS_FATAL_IF(tempHandle != mPortHandle,
@@ -254,8 +291,8 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::stopStream(sp<AAudioServiceStreamBase> stream,
-                                                  audio_port_handle_t clientHandle __unused) {
+aaudio_result_t AAudioServiceEndpointMMAP::stopStream(sp<AAudioServiceStreamBase> /*stream*/,
+                                                      audio_port_handle_t /*clientHandle*/) {
     mFramesTransferred.reset32();
 
     // Round 64-bit counter up to a multiple of the buffer capacity.
@@ -272,39 +309,31 @@
 aaudio_result_t AAudioServiceEndpointMMAP::startClient(const android::AudioClient& client,
                                                        const audio_attributes_t *attr,
                                                        audio_port_handle_t *clientHandle) {
-    if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
-    status_t status = mMmapStream->start(client, attr, clientHandle);
-    return AAudioConvert_androidToAAudioResult(status);
+    return mMmapStream == nullptr
+            ? AAUDIO_ERROR_NULL
+            : AAudioConvert_androidToAAudioResult(mMmapStream->start(client, attr, clientHandle));
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t clientHandle) {
-    if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
-    aaudio_result_t result = AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
-    return result;
+    return mMmapStream == nullptr
+            ? AAUDIO_ERROR_NULL
+            : AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::standby() {
-    if (mMmapStream == nullptr) {
-        return AAUDIO_ERROR_NULL;
-    }
-    aaudio_result_t result = AAudioConvert_androidToAAudioResult(mMmapStream->standby());
-    return result;
+    return mMmapStream == nullptr
+            ? AAUDIO_ERROR_NULL
+            : AAudioConvert_androidToAAudioResult(mMmapStream->standby());
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::exitStandby(AudioEndpointParcelable* parcelable) {
     if (mMmapStream == nullptr) {
         return AAUDIO_ERROR_NULL;
     }
-    mAudioDataFileDescriptor.reset();
-    aaudio_result_t result = createMmapBuffer(&mAudioDataFileDescriptor);
+    mAudioDataWrapper->reset();
+    const aaudio_result_t result = createMmapBuffer();
     if (result == AAUDIO_OK) {
-        int32_t bytesPerFrame = calculateBytesPerFrame();
-        int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
-        int fdIndex = parcelable->addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
-        parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
-        parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
-        parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
-        parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+        getDownDataDescription(parcelable);
     }
     return result;
 }
@@ -316,10 +345,10 @@
     if (mMmapStream == nullptr) {
         return AAUDIO_ERROR_NULL;
     }
-    status_t status = mMmapStream->getMmapPosition(&position);
+    const status_t status = mMmapStream->getMmapPosition(&position);
     ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
           __func__, status, position.position_frames, (long long) position.time_nanoseconds);
-    aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
+    const aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
     if (result == AAUDIO_ERROR_UNAVAILABLE) {
         ALOGW("%s(): getMmapPosition() has no position data available", __func__);
     } else if (result != AAUDIO_OK) {
@@ -333,8 +362,8 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::getTimestamp(int64_t *positionFrames,
-                                                    int64_t *timeNanos) {
+aaudio_result_t AAudioServiceEndpointMMAP::getTimestamp(int64_t* /*positionFrames*/,
+                                                        int64_t* /*timeNanos*/) {
     return 0; // TODO
 }
 
@@ -347,7 +376,7 @@
     } else {
         // Must be a SHARED stream?
         ALOGD("%s(%d) disconnect a specific stream", __func__, portHandle);
-        aaudio_result_t result = mAAudioService.disconnectStreamByPortHandle(portHandle);
+        const aaudio_result_t result = mAAudioService.disconnectStreamByPortHandle(portHandle);
         ALOGD("%s(%d) disconnectStreamByPortHandle returned %d", __func__, portHandle, result);
     }
 };
@@ -355,31 +384,27 @@
 // This is called by AudioFlinger when it wants to destroy a stream.
 void AAudioServiceEndpointMMAP::onTearDown(audio_port_handle_t portHandle) {
     ALOGD("%s(portHandle = %d) called", __func__, portHandle);
-    android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+    const android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
     std::thread asyncTask([holdEndpoint, portHandle]() {
         holdEndpoint->handleTearDownAsync(portHandle);
     });
     asyncTask.detach();
 }
 
-void AAudioServiceEndpointMMAP::onVolumeChanged(audio_channel_mask_t channels,
-                                              android::Vector<float> values) {
-    // TODO Do we really need a different volume for each channel?
-    // We get called with an array filled with a single value!
-    float volume = values[0];
-    ALOGD("%s() volume[0] = %f", __func__, volume);
-    std::lock_guard<std::mutex> lock(mLockStreams);
+void AAudioServiceEndpointMMAP::onVolumeChanged(float volume) {
+    ALOGD("%s() volume = %f", __func__, volume);
+    const std::lock_guard<std::mutex> lock(mLockStreams);
     for(const auto& stream : mRegisteredStreams) {
         stream->onVolumeChanged(volume);
     }
 };
 
 void AAudioServiceEndpointMMAP::onRoutingChanged(audio_port_handle_t portHandle) {
-    const int32_t deviceId = static_cast<int32_t>(portHandle);
+    const auto deviceId = static_cast<int32_t>(portHandle);
     ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
     if (getDeviceId() != deviceId) {
         if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
-            android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+            const android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
             std::thread asyncTask([holdEndpoint, deviceId]() {
                 ALOGD("onRoutingChanged() asyncTask launched");
                 holdEndpoint->disconnectRegisteredStreams();
@@ -398,14 +423,19 @@
 aaudio_result_t AAudioServiceEndpointMMAP::getDownDataDescription(
         AudioEndpointParcelable* parcelable)
 {
+    if (mAudioDataWrapper->setupFifoBuffer(calculateBytesPerFrame(), getBufferCapacity())
+        != AAUDIO_OK) {
+        ALOGE("Failed to setup audio data wrapper, will not be able to "
+              "set data for sound dose computation");
+        // This will not affect the audio processing capability
+    }
     // Gather information on the data queue based on HAL info.
-    int32_t bytesPerFrame = calculateBytesPerFrame();
-    int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
-    int fdIndex = parcelable->addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
-    parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
-    parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
-    parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
-    parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+    mAudioDataWrapper->fillParcelable(parcelable, parcelable->mDownDataQueueParcelable,
+                                      calculateBytesPerFrame(), mFramesPerBurst,
+                                      getBufferCapacity(),
+                                      getDirection() == AAUDIO_DIRECTION_OUTPUT
+                                              ? SharedMemoryWrapper::WRITE
+                                              : SharedMemoryWrapper::NONE);
     return AAUDIO_OK;
 }
 
@@ -417,7 +447,7 @@
     }
     uint64_t tempPositionFrames;
     int64_t tempTimeNanos;
-    status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
+    const status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
     if (status != OK) {
         // getExternalPosition reports error. The HAL may not support the API. Cache the result
         // so that the call will not go to the HAL next time.
@@ -489,16 +519,15 @@
     return mHalExternalPositionStatus;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer(
-        android::base::unique_fd* fileDescriptor)
+aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer()
 {
     memset(&mMmapBufferinfo, 0, sizeof(struct audio_mmap_buffer_info));
     int32_t minSizeFrames = getBufferCapacity();
     if (minSizeFrames <= 0) { // zero will get rejected
         minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
     }
-    status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
-    bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
+    const status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
+    const bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
     if (status != OK) {
         ALOGE("%s() - createMmapBuffer() failed with status %d %s",
               __func__, status, strerror(-status));
@@ -515,7 +544,7 @@
     setBufferCapacity(mMmapBufferinfo.buffer_size_frames);
     if (!isBufferShareable) {
         // Exclusive mode can only be used by the service because the FD cannot be shared.
-        int32_t audioServiceUid =
+        const int32_t audioServiceUid =
             VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
         if ((mMmapClient.attributionSource.uid != audioServiceUid) &&
             getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
@@ -526,8 +555,9 @@
 
     // AAudio creates a copy of this FD and retains ownership of the copy.
     // Assume that AudioFlinger will close the original shared_memory_fd.
-    fileDescriptor->reset(dup(mMmapBufferinfo.shared_memory_fd));
-    if (fileDescriptor->get() == -1) {
+
+    mAudioDataWrapper->getDataFileDescriptor().reset(dup(mMmapBufferinfo.shared_memory_fd));
+    if (mAudioDataWrapper->getDataFileDescriptor().get() == -1) {
         ALOGE("%s() - could not dup shared_memory_fd", __func__);
         return AAUDIO_ERROR_INTERNAL;
     }
@@ -542,3 +572,31 @@
 
     return AAUDIO_OK;
 }
+
+int64_t AAudioServiceEndpointMMAP::nextDataReportTime() {
+    return getDirection() == AAUDIO_DIRECTION_OUTPUT
+            ? AudioClock::getNanoseconds() + mDataReportOffsetNanos
+            : std::numeric_limits<int64_t>::max();
+}
+
+void AAudioServiceEndpointMMAP::reportData() {
+    if (mMmapStream == nullptr) {
+        // This must not happen
+        ALOGE("%s() invalid state, mmap stream is not initialized", __func__);
+        return;
+    }
+    auto fifo = mAudioDataWrapper->getFifoBuffer();
+    if (fifo == nullptr) {
+        ALOGE("%s() fifo buffer is not initialized, cannot report data", __func__);
+        return;
+    }
+
+    WrappingBuffer wrappingBuffer;
+    fifo_frames_t framesAvailable = fifo->getFullDataAvailable(&wrappingBuffer);
+    for (size_t i = 0; i < WrappingBuffer::SIZE; ++i) {
+        if (wrappingBuffer.numFrames[i] > 0) {
+            mMmapStream->reportData(wrappingBuffer.data[i], wrappingBuffer.numFrames[i]);
+        }
+    }
+    fifo->advanceReadIndex(framesAvailable);
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 3e7f2c7..38cf0ba 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -30,6 +30,7 @@
 #include "AAudioServiceStreamMMAP.h"
 #include "AAudioMixer.h"
 #include "AAudioService.h"
+#include "SharedMemoryWrapper.h"
 
 namespace aaudio {
 
@@ -44,7 +45,7 @@
 public:
     explicit AAudioServiceEndpointMMAP(android::AAudioService &audioService);
 
-    virtual ~AAudioServiceEndpointMMAP() = default;
+    ~AAudioServiceEndpointMMAP() override = default;
 
     std::string dump() const override;
 
@@ -77,8 +78,7 @@
     // -------------- Callback functions for MmapStreamCallback ---------------------
     void onTearDown(audio_port_handle_t portHandle) override;
 
-    void onVolumeChanged(audio_channel_mask_t channels,
-                         android::Vector<float> values) override;
+    void onVolumeChanged(float volume) override;
 
     void onRoutingChanged(audio_port_handle_t portHandle) override;
     // ------------------------------------------------------------------------------
@@ -91,11 +91,15 @@
 
     aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
 
+    int64_t nextDataReportTime();
+
+    void reportData();
+
 private:
 
-    aaudio_result_t openWithFormat(audio_format_t audioFormat);
+    aaudio_result_t openWithFormat(audio_format_t audioFormat, audio_format_t* nextFormatToTry);
 
-    aaudio_result_t createMmapBuffer(android::base::unique_fd* fileDescriptor);
+    aaudio_result_t createMmapBuffer();
 
     MonotonicCounter                          mFramesTransferred;
 
@@ -108,7 +112,7 @@
 
     android::AAudioService                    &mAAudioService;
 
-    android::base::unique_fd                  mAudioDataFileDescriptor;
+    std::unique_ptr<SharedMemoryWrapper>      mAudioDataWrapper;
 
     int64_t                                   mHardwareTimeOffsetNanos = 0; // TODO get from HAL
 
@@ -118,6 +122,7 @@
     int32_t                                   mTimestampGracePeriodMs;
     int32_t                                   mFrozenPositionCount = 0;
     int32_t                                   mFrozenTimestampCount = 0;
+    int64_t                                   mDataReportOffsetNanos = 0;
 
 };
 
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 2a5939f..637405d 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -158,5 +158,5 @@
 
     ALOGD("%s() exiting, enabled = %d, state = %d, result = %d <<<<<<<<<<<<< MIXER",
           __func__, mCallbackEnabled.load(), getStreamInternal()->getState(), result);
-    return NULL; // TODO review
+    return nullptr; // TODO review
 }
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index dd421fe..1dd0c3a 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -82,6 +82,9 @@
     setDeviceId(mStreamInternal->getDeviceId());
     setSessionId(mStreamInternal->getSessionId());
     setFormat(AUDIO_FORMAT_PCM_FLOAT); // force for mixer
+    setHardwareSampleRate(mStreamInternal->getHardwareSampleRate());
+    setHardwareFormat(mStreamInternal->getHardwareFormat());
+    setHardwareSamplesPerFrame(mStreamInternal->getHardwareSamplesPerFrame());
     mFramesPerBurst = mStreamInternal->getFramesPerBurst();
 
     return result;
@@ -100,8 +103,7 @@
     // Prevent the stream from being deleted while being used.
     // This is just for extra safety. It is probably not needed because
     // this callback should be joined before the stream is closed.
-    AAudioServiceEndpointShared *endpointPtr =
-        static_cast<AAudioServiceEndpointShared *>(arg);
+    auto endpointPtr = static_cast<AAudioServiceEndpointShared *>(arg);
     android::sp<AAudioServiceEndpointShared> endpoint(endpointPtr);
     // Balance the incStrong() in startSharingThread_l().
     endpoint->decStrong(nullptr);
@@ -137,7 +139,7 @@
 
 aaudio_result_t aaudio::AAudioServiceEndpointShared::stopSharingThread() {
     mCallbackEnabled.store(false);
-    return getStreamInternal()->joinThread(NULL);
+    return getStreamInternal()->joinThread(nullptr);
 }
 
 aaudio_result_t AAudioServiceEndpointShared::startStream(
@@ -177,8 +179,8 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointShared::stopStream(sp<AAudioServiceStreamBase> sharedStream,
-                                                        audio_port_handle_t clientHandle) {
+aaudio_result_t AAudioServiceEndpointShared::stopStream(
+        sp<AAudioServiceStreamBase> /*sharedStream*/, audio_port_handle_t clientHandle) {
     // Ignore result.
     (void) getStreamInternal()->stopClient(clientHandle);
 
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index 3e760c4..0efb227 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -39,7 +39,7 @@
 public:
     explicit AAudioServiceEndpointShared(AudioStreamInternal *streamInternal);
 
-    virtual ~AAudioServiceEndpointShared() = default;
+    ~AAudioServiceEndpointShared() override = default;
 
     std::string dump() const override;
 
@@ -79,6 +79,6 @@
     std::atomic<int>         mRunningStreamCount{0};
 };
 
-}
+} // namespace aaudio
 
 #endif //AAUDIO_SERVICE_ENDPOINT_SHARED_H
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 9f48f80..65854c8 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -83,8 +83,8 @@
 }
 
 std::string AAudioServiceStreamBase::dumpHeader() {
-    return std::string(
-            "    T   Handle   UId   Port Run State Format Burst Chan Mask     Capacity");
+    return {"    T   Handle   UId   Port Run State Format Burst Chan Mask     Capacity"
+            " HwFormat HwChan HwRate"};
 }
 
 std::string AAudioServiceStreamBase::dump() const {
@@ -101,6 +101,9 @@
     result << std::setw(5) << getSamplesPerFrame();
     result << std::setw(8) << std::hex << getChannelMask() << std::dec;
     result << std::setw(9) << getBufferCapacity();
+    result << std::setw(9) << getHardwareFormat();
+    result << std::setw(7) << getHardwareSamplesPerFrame();
+    result << std::setw(7) << getHardwareSampleRate();
 
     return result.str();
 }
@@ -278,7 +281,7 @@
     if (result != AAUDIO_OK) goto error;
 
     // This should happen at the end of the start.
-    sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
+    sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED, static_cast<int64_t>(mClientHandle));
     setState(AAUDIO_STREAM_STATE_STARTED);
 
     return result;
@@ -401,7 +404,8 @@
     // Hold onto the ref counted stream until the end.
     android::sp<AAudioServiceStreamBase> holdStream(this);
     TimestampScheduler timestampScheduler;
-    int64_t nextTime;
+    int64_t nextTimestampReportTime;
+    int64_t nextDataReportTime;
     int64_t standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
     // Balance the incStrong from when the thread was launched.
     holdStream->decStrong(nullptr);
@@ -414,8 +418,18 @@
     while (mThreadEnabled.load()) {
         loopCount++;
         int64_t timeoutNanos = -1;
-        if (isRunning() || (isIdle_l() && !isStandby_l())) {
-            timeoutNanos = (isRunning() ? nextTime : standbyTime) - AudioClock::getNanoseconds();
+        if (isDisconnected_l()) {
+            if (!isStandby_l()) {
+                // If the stream is disconnected but not in standby mode, wait until standby time.
+                timeoutNanos = standbyTime - AudioClock::getNanoseconds();
+                timeoutNanos = std::max<int64_t>(0, timeoutNanos);
+            } // else {
+                // If the stream is disconnected and in standby mode, keep `timeoutNanos` as
+                // -1 to wait forever until next command as the stream can only be closed.
+            // }
+        } else if (isRunning() || (isIdle_l() && !isStandby_l())) {
+            timeoutNanos = (isRunning() ? std::min(nextTimestampReportTime, nextDataReportTime)
+                                        : standbyTime) - AudioClock::getNanoseconds();
             timeoutNanos = std::max<int64_t>(0, timeoutNanos);
         }
 
@@ -425,17 +439,31 @@
             break;
         }
 
-        if (isRunning() && AudioClock::getNanoseconds() >= nextTime) {
-            // It is time to update timestamp.
-            if (sendCurrentTimestamp_l() != AAUDIO_OK) {
-                ALOGE("Failed to send current timestamp, stop updating timestamp");
-                disconnect_l();
-            } else {
-                nextTime = timestampScheduler.nextAbsoluteTime();
+        if (isRunning() && !isDisconnected_l()) {
+            auto currentTimestamp = AudioClock::getNanoseconds();
+            if (currentTimestamp >= nextDataReportTime) {
+                reportData_l();
+                nextDataReportTime = nextDataReportTime_l();
+            }
+            if (currentTimestamp >= nextTimestampReportTime) {
+                // It is time to update timestamp.
+                if (sendCurrentTimestamp_l() != AAUDIO_OK) {
+                    ALOGE("Failed to send current timestamp, stop updating timestamp");
+                    disconnect_l();
+                }
+                nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
             }
         }
-        if (isIdle_l() && AudioClock::getNanoseconds() >= standbyTime) {
-            standby_l();
+        if ((isIdle_l() || isDisconnected_l()) && AudioClock::getNanoseconds() >= standbyTime) {
+            aaudio_result_t result = standby_l();
+            if (result != AAUDIO_OK) {
+                // If standby failed because of the function is not implemented, there is no
+                // need to retry. Otherwise, retry standby later.
+                ALOGW("Failed to enter standby, error=%d", result);
+                standbyTime = result == AAUDIO_ERROR_UNIMPLEMENTED
+                        ? std::numeric_limits<int64_t>::max()
+                        : AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
+            }
         }
 
         if (command != nullptr) {
@@ -445,7 +473,8 @@
                     command->result = start_l();
                     timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
                     timestampScheduler.start(AudioClock::getNanoseconds());
-                    nextTime = timestampScheduler.nextAbsoluteTime();
+                    nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
+                    nextDataReportTime = nextDataReportTime_l();
                     break;
                 case PAUSE:
                     command->result = pause_l();
@@ -465,8 +494,7 @@
                     disconnect_l();
                     break;
                 case REGISTER_AUDIO_THREAD: {
-                    RegisterAudioThreadParam *param =
-                            (RegisterAudioThreadParam *) command->parameter.get();
+                    auto param = (RegisterAudioThreadParam *) command->parameter.get();
                     command->result =
                             param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
                                              : registerAudioThread_l(param->mOwnerPid,
@@ -475,21 +503,20 @@
                 }
                     break;
                 case UNREGISTER_AUDIO_THREAD: {
-                    UnregisterAudioThreadParam *param =
-                            (UnregisterAudioThreadParam *) command->parameter.get();
+                    auto param = (UnregisterAudioThreadParam *) command->parameter.get();
                     command->result =
                             param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
                                              : unregisterAudioThread_l(param->mClientThreadId);
                 }
                     break;
                 case GET_DESCRIPTION: {
-                    GetDescriptionParam *param = (GetDescriptionParam *) command->parameter.get();
+                    auto param = (GetDescriptionParam *) command->parameter.get();
                     command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
                                                         : getDescription_l(param->mParcelable);
                 }
                     break;
                 case EXIT_STANDBY: {
-                    ExitStandbyParam *param = (ExitStandbyParam *) command->parameter.get();
+                    auto param = (ExitStandbyParam *) command->parameter.get();
                     command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
                                                        : exitStandby_l(param->mParcelable);
                     standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index b2ba725..bc7ccde 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -62,7 +62,7 @@
 public:
     explicit AAudioServiceStreamBase(android::AAudioService &aAudioService);
 
-    virtual ~AAudioServiceStreamBase();
+    ~AAudioServiceStreamBase() override;
 
     enum {
         ILLEGAL_THREAD_ID = 0
@@ -254,7 +254,7 @@
         RegisterAudioThreadParam(pid_t ownerPid, pid_t clientThreadId, int priority)
                 : AAudioCommandParam(), mOwnerPid(ownerPid),
                   mClientThreadId(clientThreadId), mPriority(priority) { }
-        ~RegisterAudioThreadParam() = default;
+        ~RegisterAudioThreadParam() override = default;
 
         pid_t mOwnerPid;
         pid_t mClientThreadId;
@@ -265,9 +265,9 @@
 
     class UnregisterAudioThreadParam : public AAudioCommandParam {
     public:
-        UnregisterAudioThreadParam(pid_t clientThreadId)
+        explicit UnregisterAudioThreadParam(pid_t clientThreadId)
                 : AAudioCommandParam(), mClientThreadId(clientThreadId) { }
-        ~UnregisterAudioThreadParam() = default;
+        ~UnregisterAudioThreadParam() override = default;
 
         pid_t mClientThreadId;
     };
@@ -275,9 +275,9 @@
 
     class GetDescriptionParam : public AAudioCommandParam {
     public:
-        GetDescriptionParam(AudioEndpointParcelable* parcelable)
+        explicit GetDescriptionParam(AudioEndpointParcelable* parcelable)
                 : AAudioCommandParam(), mParcelable(parcelable) { }
-        ~GetDescriptionParam() = default;
+        ~GetDescriptionParam() override = default;
 
         AudioEndpointParcelable* mParcelable;
     };
@@ -320,13 +320,13 @@
     }
 
     virtual aaudio_result_t standby_l() REQUIRES(mLock) {
-        return AAUDIO_ERROR_UNAVAILABLE;
+        return AAUDIO_ERROR_UNIMPLEMENTED;
     }
     class ExitStandbyParam : public AAudioCommandParam {
     public:
-        ExitStandbyParam(AudioEndpointParcelable* parcelable)
+        explicit ExitStandbyParam(AudioEndpointParcelable* parcelable)
                 : AAudioCommandParam(), mParcelable(parcelable) { }
-        ~ExitStandbyParam() = default;
+        ~ExitStandbyParam() override = default;
 
         AudioEndpointParcelable* mParcelable;
     };
@@ -346,6 +346,11 @@
                 || mState == AAUDIO_STREAM_STATE_STOPPED;
     }
 
+    virtual int64_t nextDataReportTime_l() REQUIRES(mLock) {
+        return std::numeric_limits<int64_t>::max();
+    }
+    virtual void reportData_l() REQUIRES(mLock) { return; }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index ec9b2e2..89f6e33 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -238,3 +238,25 @@
             static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
     return serviceEndpointMMAP->getDownDataDescription(parcelable);
 }
+
+int64_t AAudioServiceStreamMMAP::nextDataReportTime_l() {
+    sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+    if (endpoint == nullptr) {
+        ALOGE("%s() has no endpoint", __func__);
+        return std::numeric_limits<int64_t>::max();
+    }
+    sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
+            static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
+    return serviceEndpointMMAP->nextDataReportTime();
+}
+
+void AAudioServiceStreamMMAP::reportData_l() {
+    sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+    if (endpoint == nullptr) {
+        ALOGE("%s() has no endpoint", __func__);
+        return;
+    }
+    sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
+            static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
+    return serviceEndpointMMAP->reportData();
+}
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index cd8c91e..db3c8d0 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -47,7 +47,7 @@
 public:
     AAudioServiceStreamMMAP(android::AAudioService &aAudioService,
                             bool inService);
-    virtual ~AAudioServiceStreamMMAP() = default;
+    ~AAudioServiceStreamMMAP() override = default;
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
@@ -84,6 +84,10 @@
     aaudio_result_t getHardwareTimestamp_l(
             int64_t *positionFrames, int64_t *timeNanos) REQUIRES(mLock) override;
 
+    int64_t nextDataReportTime_l() REQUIRES(mLock) override;
+
+    void reportData_l() REQUIRES(mLock) override;
+
     /**
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 78f9787..0b2513a 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -44,7 +44,7 @@
 
 public:
     explicit AAudioServiceStreamShared(android::AAudioService &aAudioService);
-    virtual ~AAudioServiceStreamShared() = default;
+    ~AAudioServiceStreamShared() override = default;
 
     static std::string dumpHeader();
 
diff --git a/services/oboeservice/AAudioStreamTracker.cpp b/services/oboeservice/AAudioStreamTracker.cpp
index 9bbbc73..c86a7a2 100644
--- a/services/oboeservice/AAudioStreamTracker.cpp
+++ b/services/oboeservice/AAudioStreamTracker.cpp
@@ -78,7 +78,8 @@
     return handle;
 }
 
-aaudio_handle_t AAudioStreamTracker::addStreamForHandle(sp<AAudioServiceStreamBase> serviceStream) {
+aaudio_handle_t AAudioStreamTracker::addStreamForHandle(
+        const sp<AAudioServiceStreamBase>& serviceStream) {
     std::lock_guard<std::mutex> lock(mHandleLock);
     aaudio_handle_t handle = mPreviousHandle;
     // Assign a unique handle.
diff --git a/services/oboeservice/AAudioStreamTracker.h b/services/oboeservice/AAudioStreamTracker.h
index 43870fc..99f4b6c 100644
--- a/services/oboeservice/AAudioStreamTracker.h
+++ b/services/oboeservice/AAudioStreamTracker.h
@@ -64,7 +64,7 @@
      * @param serviceStream
      * @return handle for identifying the stream
      */
-    aaudio_handle_t addStreamForHandle(android::sp<AAudioServiceStreamBase> serviceStream);
+    aaudio_handle_t addStreamForHandle(const android::sp<AAudioServiceStreamBase>& serviceStream);
 
     /**
      * @return string that can be added to dumpsys
diff --git a/services/oboeservice/AAudioThread.h b/services/oboeservice/AAudioThread.h
index b2774e0..91ad715 100644
--- a/services/oboeservice/AAudioThread.h
+++ b/services/oboeservice/AAudioThread.h
@@ -29,7 +29,7 @@
  */
 class Runnable {
 public:
-    Runnable() {};
+    Runnable() = default;
     virtual ~Runnable() = default;
 
     virtual void run() = 0;
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 80e4296..c5080a4 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -21,10 +21,72 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+tidy_errors = [
+    // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+    // For many categories, the checks are too many to specify individually.
+    // Feel free to disable as needed - as warnings are generally ignored,
+    // we treat warnings as errors.
+    "android-*",
+    "bugprone-*",
+    "cert-*",
+    "clang-analyzer-security*",
+    "google-*",
+    "misc-*",
+    //"modernize-*",  // explicitly list the modernize as they can be subjective.
+    "modernize-avoid-bind",
+    //"modernize-avoid-c-arrays", // std::array<> can be verbose
+    "modernize-concat-nested-namespaces",
+    //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+    "modernize-deprecated-ios-base-aliases",
+    "modernize-loop-convert",
+    "modernize-make-shared",
+    "modernize-make-unique",
+    "modernize-pass-by-value",
+    "modernize-raw-string-literal",
+    "modernize-redundant-void-arg",
+    "modernize-replace-auto-ptr",
+    "modernize-replace-random-shuffle",
+    "modernize-return-braced-init-list",
+    "modernize-shrink-to-fit",
+    "modernize-unary-static-assert",
+    "modernize-use-auto",
+    "modernize-use-bool-literals",
+    "modernize-use-default-member-init",
+    "modernize-use-emplace",
+    "modernize-use-equals-default",
+    "modernize-use-equals-delete",
+    // "modernize-use-nodiscard", // Maybe add this in the future
+    "modernize-use-noexcept",
+    "modernize-use-nullptr",
+    "modernize-use-override",
+    // "modernize-use-trailing-return-type", // not necessarily more readable
+    "modernize-use-transparent-functors",
+    "modernize-use-uncaught-exceptions",
+    "modernize-use-using",
+    "performance-*",
+
+    // Remove some pedantic stylistic requirements.
+    "-android-cloexec-dup", // found in AAudioServiceEndpointMMAP.cpp
+    "-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
+
+    "-google-readability-casting", // C++ casts not always necessary and may be verbose
+    "-google-readability-todo", // do not require TODO(info)
+    "-google-build-using-namespace", // Reenable and fix later.
+    "-google-global-names-in-headers", // found in several files
+
+    "-misc-non-private-member-variables-in-classes", // found in aidl generated files
+
+]
+
+
 cc_library {
 
     name: "libaaudioservice",
 
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
     srcs: [
         "AAudioClientTracker.cpp",
         "AAudioCommandQueue.cpp",
@@ -42,6 +104,7 @@
         "AAudioStreamTracker.cpp",
         "AAudioThread.cpp",
         "SharedMemoryProxy.cpp",
+        "SharedMemoryWrapper.cpp",
         "SharedRingBuffer.cpp",
         "TimestampScheduler.cpp",
     ],
@@ -70,7 +133,6 @@
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
-        "android.media.audio.common.types-V1-cpp",
     ],
 
     export_shared_lib_headers: [
@@ -86,4 +148,11 @@
         "frameworks/av/media/libnbaio/include_mono",
         "frameworks/av/media/libnbaio/include",
     ],
+
+    tidy: true,
+    tidy_checks: tidy_errors,
+    tidy_checks_as_errors: tidy_errors,
+    tidy_flags: [
+        "-format-style=file",
+    ]
 }
diff --git a/services/oboeservice/SharedMemoryProxy.cpp b/services/oboeservice/SharedMemoryProxy.cpp
index 78d4884..549a36e 100644
--- a/services/oboeservice/SharedMemoryProxy.cpp
+++ b/services/oboeservice/SharedMemoryProxy.cpp
@@ -58,7 +58,7 @@
     }
 
     // Get original memory address.
-    mOriginalSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+    mOriginalSharedMemory = (uint8_t *) mmap(nullptr, mSharedMemorySizeInBytes,
                          PROT_READ|PROT_WRITE,
                          MAP_SHARED,
                          mOriginalFileDescriptor, 0);
diff --git a/services/oboeservice/SharedMemoryProxy.h b/services/oboeservice/SharedMemoryProxy.h
index 89eeb4b..81a0753 100644
--- a/services/oboeservice/SharedMemoryProxy.h
+++ b/services/oboeservice/SharedMemoryProxy.h
@@ -30,7 +30,7 @@
  */
 class SharedMemoryProxy {
 public:
-    SharedMemoryProxy() {}
+    SharedMemoryProxy() = default;
 
     ~SharedMemoryProxy();
 
diff --git a/services/oboeservice/SharedMemoryWrapper.cpp b/services/oboeservice/SharedMemoryWrapper.cpp
new file mode 100644
index 0000000..c0dcccb
--- /dev/null
+++ b/services/oboeservice/SharedMemoryWrapper.cpp
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SharedMemoryWrapper"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <iomanip>
+#include <iostream>
+#include <sys/mman.h>
+
+#include "SharedMemoryWrapper.h"
+
+namespace aaudio {
+
+constexpr int COUNTER_SIZE_IN_BYTES = sizeof(android::fifo_counter_t);
+constexpr int WRAPPER_SIZE_IN_BYTES = 2 * COUNTER_SIZE_IN_BYTES;
+
+SharedMemoryWrapper::SharedMemoryWrapper() {
+    mCounterFd.reset(ashmem_create_region("AAudioSharedMemoryWrapper", WRAPPER_SIZE_IN_BYTES));
+    if (mCounterFd.get() == -1) {
+        ALOGE("allocate() ashmem_create_region() failed %d", errno);
+        return;
+    }
+    int err = ashmem_set_prot_region(mCounterFd.get(), PROT_READ|PROT_WRITE);
+    if (err < 0) {
+        ALOGE("allocate() ashmem_set_prot_region() failed %d", errno);
+        mCounterFd.reset();
+        return;
+    }
+    auto tmpPtr = (uint8_t *) mmap(nullptr, WRAPPER_SIZE_IN_BYTES,
+                                   PROT_READ|PROT_WRITE,
+                                   MAP_SHARED,
+                                   mCounterFd.get(), 0);
+    if (tmpPtr == MAP_FAILED) {
+        ALOGE("allocate() mmap() failed %d", errno);
+        mCounterFd.reset();
+        return;
+    }
+    mCounterMemoryAddress = tmpPtr;
+
+    mReadCounterAddress = (android::fifo_counter_t*) mCounterMemoryAddress;
+    mWriteCounterAddress = (android::fifo_counter_t*) &mCounterMemoryAddress[COUNTER_SIZE_IN_BYTES];
+}
+
+SharedMemoryWrapper::~SharedMemoryWrapper()
+{
+    reset();
+    if (mCounterMemoryAddress != nullptr) {
+        munmap(mCounterMemoryAddress, COUNTER_SIZE_IN_BYTES);
+        mCounterMemoryAddress = nullptr;
+    }
+}
+
+aaudio_result_t SharedMemoryWrapper::setupFifoBuffer(android::fifo_frames_t bytesPerFrame,
+                                                     android::fifo_frames_t capacityInFrames) {
+    if (mDataFd.get() == -1) {
+        ALOGE("%s data file descriptor is not initialized", __func__);
+        return AAUDIO_ERROR_INTERNAL;
+    }
+    if (mCounterMemoryAddress == nullptr) {
+        ALOGE("%s the counter memory is not allocated correctly", __func__);
+        return AAUDIO_ERROR_INTERNAL;
+    }
+    mSharedMemorySizeInBytes = bytesPerFrame * capacityInFrames;
+    auto tmpPtr = (uint8_t *) mmap(nullptr, mSharedMemorySizeInBytes,
+                                   PROT_READ|PROT_WRITE,
+                                   MAP_SHARED,
+                                   mDataFd.get(), 0);
+    if (tmpPtr == MAP_FAILED) {
+        ALOGE("allocate() mmap() failed %d", errno);
+        return AAUDIO_ERROR_INTERNAL;
+    }
+    mSharedMemory = tmpPtr;
+
+    mFifoBuffer = std::make_shared<android::FifoBufferIndirect>(
+            bytesPerFrame, capacityInFrames, mReadCounterAddress,
+            mWriteCounterAddress, mSharedMemory);
+    return AAUDIO_OK;
+}
+
+void SharedMemoryWrapper::reset() {
+    mFifoBuffer.reset();
+    if (mSharedMemory != nullptr) {
+        munmap(mSharedMemory, mSharedMemorySizeInBytes);
+        mSharedMemory = nullptr;
+    }
+    mDataFd.reset();
+}
+
+void SharedMemoryWrapper::fillParcelable(
+        AudioEndpointParcelable* endpointParcelable, RingBufferParcelable &ringBufferParcelable,
+        int32_t bytesPerFrame, int32_t framesPerBurst, int32_t capacityInFrames,
+        CounterFilling counterFilling) {
+    const int capacityInBytes = bytesPerFrame * capacityInFrames;
+    const int dataFdIndex =
+                endpointParcelable->addFileDescriptor(mDataFd, mSharedMemorySizeInBytes);
+    ringBufferParcelable.setBytesPerFrame(bytesPerFrame);
+    ringBufferParcelable.setFramesPerBurst(framesPerBurst);
+    ringBufferParcelable.setCapacityInFrames(capacityInFrames);
+    if (mCounterFd.get() == -1 || counterFilling == NONE) {
+        // Failed to create shared memory for read/write counter or requesting no filling counters.
+        ALOGD("%s no counter is filled, counterFd=%d", __func__, mCounterFd.get());
+        ringBufferParcelable.setupMemory(dataFdIndex, 0, capacityInBytes);
+    } else {
+        int counterFdIndex =
+                endpointParcelable->addFileDescriptor(mCounterFd, WRAPPER_SIZE_IN_BYTES);
+        const int readCounterSize = (counterFilling & READ) == NONE ? 0 : COUNTER_SIZE_IN_BYTES;
+        const int writeCounterSize = (counterFilling & WRITE) == NONE ? 0 : COUNTER_SIZE_IN_BYTES;
+        ALOGD("%s counterFdIndex=%d readCounterSize=%d, writeCounterSize=%d",
+              __func__, counterFdIndex, readCounterSize, writeCounterSize);
+        ringBufferParcelable.setupMemory(
+                {dataFdIndex, 0 /*offset*/, capacityInBytes},
+                {counterFdIndex, 0 /*offset*/, readCounterSize},
+                {counterFdIndex, COUNTER_SIZE_IN_BYTES, writeCounterSize});
+    }
+}
+
+} // namespace aaudio
diff --git a/services/oboeservice/SharedMemoryWrapper.h b/services/oboeservice/SharedMemoryWrapper.h
new file mode 100644
index 0000000..323c7f1
--- /dev/null
+++ b/services/oboeservice/SharedMemoryWrapper.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/unique_fd.h>
+#include <cutils/ashmem.h>
+#include <stdint.h>
+#include <string>
+#include <sys/mman.h>
+
+#include "fifo/FifoBuffer.h"
+#include "binding/RingBufferParcelable.h"
+#include "binding/AudioEndpointParcelable.h"
+
+namespace aaudio {
+
+/**
+ * Wrap the shared memory with read and write counters. Provide a fifo buffer to access the
+ * wrapped shared memory.
+ */
+class SharedMemoryWrapper {
+public:
+    explicit SharedMemoryWrapper();
+
+    virtual ~SharedMemoryWrapper();
+
+    android::base::unique_fd& getDataFileDescriptor() { return mDataFd; }
+
+    aaudio_result_t setupFifoBuffer(android::fifo_frames_t bytesPerFrame,
+                                    android::fifo_frames_t capacityInFrames);
+
+    void reset();
+
+    enum CounterFilling {
+        NONE = 0,
+        READ = 1,
+        WRITE = 2,
+    };
+    /**
+     * Fill shared memory into parcelable.
+     *
+     * @param endpointParcelable container for ring buffers and shared memories
+     * @param ringBufferParcelable the ring buffer
+     * @param bytesPerFrame the bytes per frame of the data memory
+     * @param framesPerBurst the frame per burst of the data memory
+     * @param capacityInFrames the capacity in frames of the data memory
+     * @param counterFilling a bit mask to control if the counter from the wrapper should be filled
+     *                       or not.
+     */
+    void fillParcelable(AudioEndpointParcelable* endpointParcelable,
+                        RingBufferParcelable &ringBufferParcelable,
+                        int32_t bytesPerFrame,
+                        int32_t framesPerBurst,
+                        int32_t capacityInFrames,
+                        CounterFilling counterFilling = NONE);
+
+    std::shared_ptr<android::FifoBuffer> getFifoBuffer() {
+        return mFifoBuffer;
+    }
+
+private:
+    android::base::unique_fd mDataFd;
+    android::base::unique_fd mCounterFd;
+    uint8_t* mCounterMemoryAddress = nullptr;
+    android::fifo_counter_t* mReadCounterAddress = nullptr;
+    android::fifo_counter_t* mWriteCounterAddress = nullptr;
+    std::shared_ptr<android::FifoBufferIndirect> mFifoBuffer;
+    uint8_t* mSharedMemory = nullptr;
+    int32_t mSharedMemorySizeInBytes = 0;
+};
+
+} /* namespace aaudio */
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index fd2a454..eebff51 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -62,7 +62,7 @@
 
     // Map the fd to memory addresses. Use a temporary pointer to keep the mmap result and update
     // it to `mSharedMemory` only when mmap operate successfully.
-    uint8_t* tmpPtr = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+    auto tmpPtr = (uint8_t *) mmap(nullptr, mSharedMemorySizeInBytes,
                          PROT_READ|PROT_WRITE,
                          MAP_SHARED,
                          mFileDescriptor.get(), 0);
@@ -74,10 +74,8 @@
     mSharedMemory = tmpPtr;
 
     // Get addresses for our counters and data from the shared memory.
-    fifo_counter_t *readCounterAddress =
-            (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_READ_OFFSET];
-    fifo_counter_t *writeCounterAddress =
-            (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
+    auto readCounterAddress = (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_READ_OFFSET];
+    auto writeCounterAddress = (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
     uint8_t *dataAddress = &mSharedMemory[SHARED_RINGBUFFER_DATA_OFFSET];
 
     mFifoBuffer = std::make_shared<FifoBufferIndirect>(bytesPerFrame, capacityInFrames,
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index cff1261..463bf11 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -39,7 +39,7 @@
  */
 class SharedRingBuffer {
 public:
-    SharedRingBuffer() {}
+    SharedRingBuffer() = default;
 
     virtual ~SharedRingBuffer();
 
diff --git a/services/oboeservice/TimestampScheduler.h b/services/oboeservice/TimestampScheduler.h
index baa5c41..6b5f4b1 100644
--- a/services/oboeservice/TimestampScheduler.h
+++ b/services/oboeservice/TimestampScheduler.h
@@ -31,7 +31,7 @@
 class TimestampScheduler
 {
 public:
-    TimestampScheduler() {};
+    TimestampScheduler() = default;
     virtual ~TimestampScheduler() = default;
 
     /**
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 605ac01..91ae511 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -29,6 +29,9 @@
 
 cc_fuzz {
     name: "oboeservice_fuzzer",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
     srcs: [
         "oboeservice_fuzzer.cpp",
     ],
diff --git a/services/oboeservice/fuzzer/README.md b/services/oboeservice/fuzzer/README.md
index ae7af3eb..617822f 100644
--- a/services/oboeservice/fuzzer/README.md
+++ b/services/oboeservice/fuzzer/README.md
@@ -23,21 +23,24 @@
 12. InputPreset
 13. BufferCapacity
 
-| Parameter| Valid Input Values| Configured Value|
-|------------- |-------------| ----- |
-| `AAudioFormat` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `UserId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
-| `ProcessId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
-| `InService`   | `bool` | Value obtained from FuzzedDataProvider |
-| `DeviceId`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
-| `SampleRate`   | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
-| `ChannelMask` | `AAUDIO_UNSPECIFIED`, `AAUDIO_CHANNEL_INDEX_MASK_1`, `AAUDIO_CHANNEL_INDEX_MASK_2`, `AAUDIO_CHANNEL_INDEX_MASK_3`, `AAUDIO_CHANNEL_INDEX_MASK_4`, `AAUDIO_CHANNEL_INDEX_MASK_5`, `AAUDIO_CHANNEL_INDEX_MASK_6`, `AAUDIO_CHANNEL_INDEX_MASK_7`, `AAUDIO_CHANNEL_INDEX_MASK_8`, `AAUDIO_CHANNEL_INDEX_MASK_9`, `AAUDIO_CHANNEL_INDEX_MASK_10`, `AAUDIO_CHANNEL_INDEX_MASK_11`, `AAUDIO_CHANNEL_INDEX_MASK_12`, `AAUDIO_CHANNEL_INDEX_MASK_13`, `AAUDIO_CHANNEL_INDEX_MASK_14`, `AAUDIO_CHANNEL_INDEX_MASK_15`, `AAUDIO_CHANNEL_INDEX_MASK_16`, `AAUDIO_CHANNEL_INDEX_MASK_17`, `AAUDIO_CHANNEL_INDEX_MASK_18`, `AAUDIO_CHANNEL_INDEX_MASK_19`, `AAUDIO_CHANNEL_INDEX_MASK_20`, `AAUDIO_CHANNEL_INDEX_MASK_21`, `AAUDIO_CHANNEL_INDEX_MASK_22`, `AAUDIO_CHANNEL_INDEX_MASK_23`, `AAUDIO_CHANNEL_INDEX_MASK_24`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_FRONT_BACK`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_2POINT1`, `AAUDIO_CHANNEL_TRI`, `AAUDIO_CHANNEL_TRI_BACK`, `AAUDIO_CHANNEL_3POINT1`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_QUAD`, `AAUDIO_CHANNEL_QUAD_SIDE`, `AAUDIO_CHANNEL_SURROUND`, `AAUDIO_CHANNEL_PENTA`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_5POINT1_SIDE`, `AAUDIO_CHANNEL_5POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1POINT4`, `AAUDIO_CHANNEL_6POINT1`, `AAUDIO_CHANNEL_7POINT1`, `AAUDIO_CHANNEL_7POINT1POINT2`, `AAUDIO_CHANNEL_7POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT6` | Value obtained from FuzzedDataProvider |
-| `Direction` | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `SharingMode` | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `Usage` | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `ContentType` | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `InputPreset` | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `BufferCapacity` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| Parameter                 | Valid Input Values| Configured Value|
+|---------------------------|-------------| ----- |
+| `Format`                  | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT`, `AAUDIO_FORMAT_IEC61937`, `AAUDIO_FORMAT_PCM_I24_PACKED`, `AAUDIO_FORMAT_PCM_I32` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `UserId`                  | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
+| `ProcessId`               | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
+| `InService`               | `bool` | Value obtained from FuzzedDataProvider |
+| `DeviceId`                | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SampleRate`              | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `ChannelMask`             | `AAUDIO_UNSPECIFIED`, `AAUDIO_CHANNEL_INDEX_MASK_1`, `AAUDIO_CHANNEL_INDEX_MASK_2`, `AAUDIO_CHANNEL_INDEX_MASK_3`, `AAUDIO_CHANNEL_INDEX_MASK_4`, `AAUDIO_CHANNEL_INDEX_MASK_5`, `AAUDIO_CHANNEL_INDEX_MASK_6`, `AAUDIO_CHANNEL_INDEX_MASK_7`, `AAUDIO_CHANNEL_INDEX_MASK_8`, `AAUDIO_CHANNEL_INDEX_MASK_9`, `AAUDIO_CHANNEL_INDEX_MASK_10`, `AAUDIO_CHANNEL_INDEX_MASK_11`, `AAUDIO_CHANNEL_INDEX_MASK_12`, `AAUDIO_CHANNEL_INDEX_MASK_13`, `AAUDIO_CHANNEL_INDEX_MASK_14`, `AAUDIO_CHANNEL_INDEX_MASK_15`, `AAUDIO_CHANNEL_INDEX_MASK_16`, `AAUDIO_CHANNEL_INDEX_MASK_17`, `AAUDIO_CHANNEL_INDEX_MASK_18`, `AAUDIO_CHANNEL_INDEX_MASK_19`, `AAUDIO_CHANNEL_INDEX_MASK_20`, `AAUDIO_CHANNEL_INDEX_MASK_21`, `AAUDIO_CHANNEL_INDEX_MASK_22`, `AAUDIO_CHANNEL_INDEX_MASK_23`, `AAUDIO_CHANNEL_INDEX_MASK_24`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_FRONT_BACK`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_2POINT1`, `AAUDIO_CHANNEL_TRI`, `AAUDIO_CHANNEL_TRI_BACK`, `AAUDIO_CHANNEL_3POINT1`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_QUAD`, `AAUDIO_CHANNEL_QUAD_SIDE`, `AAUDIO_CHANNEL_SURROUND`, `AAUDIO_CHANNEL_PENTA`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_5POINT1_SIDE`, `AAUDIO_CHANNEL_5POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1POINT4`, `AAUDIO_CHANNEL_6POINT1`, `AAUDIO_CHANNEL_7POINT1`, `AAUDIO_CHANNEL_7POINT1POINT2`, `AAUDIO_CHANNEL_7POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT6` | Value obtained from FuzzedDataProvider |
+| `Direction`               | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `SharingMode`             | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `Usage`                   | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `ContentType`             | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `InputPreset`             | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `BufferCapacity`          | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareSampleRate`      | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareSamplesPerFrame` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareFormat`          | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT`, `AAUDIO_FORMAT_IEC61937`, `AAUDIO_FORMAT_PCM_I24_PACKED`, `AAUDIO_FORMAT_PCM_I32` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
 
 This also ensures that the plugin is always deterministic for any given input.
 
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index 5e48955..f047065 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -34,6 +34,9 @@
     AAUDIO_FORMAT_UNSPECIFIED,
     AAUDIO_FORMAT_PCM_I16,
     AAUDIO_FORMAT_PCM_FLOAT,
+    AAUDIO_FORMAT_PCM_I24_PACKED,
+    AAUDIO_FORMAT_PCM_I32,
+    AAUDIO_FORMAT_IEC61937
 };
 
 aaudio_usage_t kAAudioUsages[] = {
@@ -146,41 +149,42 @@
 
     void registerClient(const sp<IAAudioClient> &client UNUSED_PARAM) override {}
 
-    aaudio_handle_t openStream(const AAudioStreamRequest &request,
-                               AAudioStreamConfiguration &configurationOutput) override;
+    AAudioHandleInfo openStream(const AAudioStreamRequest &request,
+                                AAudioStreamConfiguration &configurationOutput) override;
 
-    aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+    aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
                                          AudioEndpointParcelable &parcelable) override;
 
-    aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+    aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) override;
 
-    aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle, pid_t clientThreadId,
+    aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
+                                        pid_t clientThreadId,
                                         int64_t periodNanoseconds) override;
 
-    aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+    aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                           pid_t clientThreadId) override;
 
-    aaudio_result_t startClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+    aaudio_result_t startClient(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
                                 const AudioClient &client UNUSED_PARAM,
                                 const audio_attributes_t *attr UNUSED_PARAM,
                                 audio_port_handle_t *clientHandle UNUSED_PARAM) override {
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t stopClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+    aaudio_result_t stopClient(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
                                audio_port_handle_t clientHandle UNUSED_PARAM) override {
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    aaudio_result_t exitStandby(aaudio_handle_t streamHandle UNUSED_PARAM,
+    aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
                                 AudioEndpointParcelable &parcelable UNUSED_PARAM) override {
         return AAUDIO_ERROR_UNAVAILABLE;
     }
@@ -247,92 +251,91 @@
     mAAudioService.clear();
 }
 
-aaudio_handle_t FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
-                                             AAudioStreamConfiguration &configurationOutput) {
-    aaudio_handle_t stream;
+AAudioHandleInfo FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
+                                              AAudioStreamConfiguration &configurationOutput) {
     for (int i = 0; i < 2; ++i) {
         AAudioServiceInterface *service = getAAudioService();
         if (!service) {
-            return AAUDIO_ERROR_NO_SERVICE;
+            return {-1, AAUDIO_ERROR_NO_SERVICE};
         }
 
-        stream = service->openStream(request, configurationOutput);
+        auto streamHandleInfo = service->openStream(request, configurationOutput);
 
-        if (stream == AAUDIO_ERROR_NO_SERVICE) {
+        if (streamHandleInfo.getHandle() == AAUDIO_ERROR_NO_SERVICE) {
             dropAAudioService();
         } else {
-            break;
+            return streamHandleInfo;
         }
     }
-    return stream;
+    return {-1, AAUDIO_ERROR_NO_SERVICE};
 }
 
-aaudio_result_t FuzzAAudioClient::closeStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::closeStream(const AAudioHandleInfo& streamHandleInfo) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->closeStream(streamHandle);
+    return service->closeStream(streamHandleInfo);
 }
 
-aaudio_result_t FuzzAAudioClient::getStreamDescription(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
                                                        AudioEndpointParcelable &parcelable) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->getStreamDescription(streamHandle, parcelable);
+    return service->getStreamDescription(streamHandleInfo, parcelable);
 }
 
-aaudio_result_t FuzzAAudioClient::startStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::startStream(const AAudioHandleInfo& streamHandleInfo) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->startStream(streamHandle);
+    return service->startStream(streamHandleInfo);
 }
 
-aaudio_result_t FuzzAAudioClient::pauseStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::pauseStream(const AAudioHandleInfo& streamHandleInfo) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->pauseStream(streamHandle);
+    return service->pauseStream(streamHandleInfo);
 }
 
-aaudio_result_t FuzzAAudioClient::stopStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::stopStream(const AAudioHandleInfo& streamHandleInfo) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->stopStream(streamHandle);
+    return service->stopStream(streamHandleInfo);
 }
 
-aaudio_result_t FuzzAAudioClient::flushStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::flushStream(const AAudioHandleInfo& streamHandleInfo) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->flushStream(streamHandle);
+    return service->flushStream(streamHandleInfo);
 }
 
-aaudio_result_t FuzzAAudioClient::registerAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                       pid_t clientThreadId,
                                                       int64_t periodNanoseconds) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+    return service->registerAudioThread(streamHandleInfo, clientThreadId, periodNanoseconds);
 }
 
-aaudio_result_t FuzzAAudioClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
                                                         pid_t clientThreadId) {
     AAudioServiceInterface *service = getAAudioService();
     if (!service) {
         return AAUDIO_ERROR_NO_SERVICE;
     }
-    return service->unregisterAudioThread(streamHandle, clientThreadId);
+    return service->unregisterAudioThread(streamHandleInfo, clientThreadId);
 }
 
 class OboeserviceFuzzer {
@@ -400,8 +403,15 @@
 
     request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
 
-    aaudio_handle_t stream = mClient->openStream(request, configurationOutput);
-    if (stream < 0) {
+    request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
+    request.getConfiguration().setHardwareFormat((audio_format_t)(
+        fdp.ConsumeBool()
+            ? fdp.ConsumeIntegral<int32_t>()
+            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
+
+    auto streamHandleInfo = mClient->openStream(request, configurationOutput);
+    if (streamHandleInfo.getHandle() < 0) {
         // invalid request, stream not opened.
         return;
     }
@@ -410,23 +420,23 @@
         int action = fdp.ConsumeIntegralInRange<int32_t>(0, 4);
         switch (action) {
             case 0:
-                mClient->getStreamDescription(stream, audioEndpointParcelable);
+                mClient->getStreamDescription(streamHandleInfo, audioEndpointParcelable);
                 break;
             case 1:
-                mClient->startStream(stream);
+                mClient->startStream(streamHandleInfo);
                 break;
             case 2:
-                mClient->pauseStream(stream);
+                mClient->pauseStream(streamHandleInfo);
                 break;
             case 3:
-                mClient->stopStream(stream);
+                mClient->stopStream(streamHandleInfo);
                 break;
             case 4:
-                mClient->flushStream(stream);
+                mClient->flushStream(streamHandleInfo);
                 break;
         }
     }
-    mClient->closeStream(stream);
+    mClient->closeStream(streamHandleInfo);
     assert(mClient->getDeathCount() == 0);
 }
 
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index 5c1dda1..ea5139d 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -15,9 +15,8 @@
     imports: [
         "android.hardware.common-V2",
         "android.hardware.common.fmq-V1",
-        "android.hardware.tv.tuner-V1",
+        "android.hardware.tv.tuner-V2",
     ],
-
     backend: {
         java: {
             enabled: false,
@@ -42,7 +41,7 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V1-ndk",
+        "android.hardware.tv.tuner-V2-ndk",
         "libbase",
         "libbinder",
         "libbinder_ndk",
@@ -85,10 +84,11 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V1-ndk",
+        "android.hardware.tv.tuner-V2-ndk",
         "libbase",
         "libbinder",
         "libfmq",
+        "libhidlbase",
         "liblog",
         "libtunerservice",
         "libutils",
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index a6f3a2c..92fa970 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -26,6 +26,7 @@
 #include <aidl/android/hardware/tv/tuner/Result.h>
 
 #include "TunerDvr.h"
+#include "TunerService.h"
 #include "TunerTimeFilter.h"
 
 using ::aidl::android::hardware::tv::tuner::IDvr;
@@ -41,23 +42,21 @@
 namespace tv {
 namespace tuner {
 
-TunerDemux::TunerDemux(shared_ptr<IDemux> demux, int id) {
+TunerDemux::TunerDemux(const shared_ptr<IDemux> demux, const int id,
+                       const shared_ptr<TunerService> tuner) {
     mDemux = demux;
     mDemuxId = id;
+    mTunerService = tuner;
 }
 
 TunerDemux::~TunerDemux() {
+    close();
     mDemux = nullptr;
+    mTunerService = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerDemux::setFrontendDataSource(
         const shared_ptr<ITunerFrontend>& in_frontend) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     int frontendId;
     in_frontend->getFrontendId(&frontendId);
 
@@ -65,43 +64,26 @@
 }
 
 ::ndk::ScopedAStatus TunerDemux::setFrontendDataSourceById(int frontendId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->setFrontendDataSource(frontendId);
 }
 
 ::ndk::ScopedAStatus TunerDemux::openFilter(const DemuxFilterType& in_type, int32_t in_bufferSize,
                                             const shared_ptr<ITunerFilterCallback>& in_cb,
                                             shared_ptr<ITunerFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> filter;
     shared_ptr<TunerFilter::FilterCallback> filterCb =
             ::ndk::SharedRefBase::make<TunerFilter::FilterCallback>(in_cb);
     shared_ptr<IFilterCallback> cb = filterCb;
     auto status = mDemux->openFilter(in_type, in_bufferSize, cb, &filter);
     if (status.isOk()) {
-        *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filter, filterCb, in_type);
+        *_aidl_return =
+                ::ndk::SharedRefBase::make<TunerFilter>(filter, filterCb, in_type, mTunerService);
     }
 
     return status;
 }
 
 ::ndk::ScopedAStatus TunerDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<ITimeFilter> filter;
     auto status = mDemux->openTimeFilter(&filter);
     if (status.isOk()) {
@@ -113,35 +95,17 @@
 
 ::ndk::ScopedAStatus TunerDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
                                                int32_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> halFilter = (static_cast<TunerFilter*>(tunerFilter.get()))->getHalFilter();
     return mDemux->getAvSyncHwId(halFilter, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->getAvSyncTime(avSyncHwId, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
                                          const shared_ptr<ITunerDvrCallback>& in_cb,
                                          shared_ptr<ITunerDvr>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IDvrCallback> callback = ::ndk::SharedRefBase::make<TunerDvr::DvrCallback>(in_cb);
     shared_ptr<IDvr> halDvr;
     auto res = mDemux->openDvr(in_dvbType, in_bufferSize, callback, &halDvr);
@@ -153,36 +117,15 @@
 }
 
 ::ndk::ScopedAStatus TunerDemux::connectCiCam(int32_t ciCamId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->connectCiCam(ciCamId);
 }
 
 ::ndk::ScopedAStatus TunerDemux::disconnectCiCam() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDemux->disconnectCiCam();
 }
 
 ::ndk::ScopedAStatus TunerDemux::close() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mDemux->close();
-    mDemux = nullptr;
-
-    return res;
+    return mDemux->close();
 }
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
index cdb3aa0..0c71987 100644
--- a/services/tuner/TunerDemux.h
+++ b/services/tuner/TunerDemux.h
@@ -32,10 +32,13 @@
 namespace tv {
 namespace tuner {
 
+class TunerService;
+
 class TunerDemux : public BnTunerDemux {
 
 public:
-    TunerDemux(shared_ptr<IDemux> demux, int demuxId);
+    TunerDemux(const shared_ptr<IDemux> demux, const int demuxId,
+               const shared_ptr<TunerService> tuner);
     virtual ~TunerDemux();
 
     ::ndk::ScopedAStatus setFrontendDataSource(
@@ -60,6 +63,7 @@
 private:
     shared_ptr<IDemux> mDemux;
     int mDemuxId;
+    shared_ptr<TunerService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index 70aee20..ffe0be9 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -41,38 +41,21 @@
 }
 
 TunerDescrambler::~TunerDescrambler() {
+    close();
     mDescrambler = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::setDemuxSource(
         const shared_ptr<ITunerDemux>& in_tunerDemux) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDescrambler->setDemuxSource((static_cast<TunerDemux*>(in_tunerDemux.get()))->getId());
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDescrambler->setKeyToken(in_keyToken);
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::addPid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -83,12 +66,6 @@
 
 ::ndk::ScopedAStatus TunerDescrambler::removePid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -98,16 +75,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::close() {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mDescrambler->close();
-    mDescrambler = nullptr;
-
-    return res;
+    return mDescrambler->close();
 }
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index 8776f7e..fcee966 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -37,36 +37,19 @@
 }
 
 TunerDvr::~TunerDvr() {
+    close();
     mDvr = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->getQueueDesc(_aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerDvr::configure(const DvrSettings& in_settings) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->configure(in_settings);
 }
 
 ::ndk::ScopedAStatus TunerDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -82,12 +65,6 @@
 }
 
 ::ndk::ScopedAStatus TunerDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -103,46 +80,34 @@
 }
 
 ::ndk::ScopedAStatus TunerDvr::start() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->start();
 }
 
 ::ndk::ScopedAStatus TunerDvr::stop() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->stop();
 }
 
 ::ndk::ScopedAStatus TunerDvr::flush() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mDvr->flush();
 }
 
 ::ndk::ScopedAStatus TunerDvr::close() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
+    return mDvr->close();
+}
+
+::ndk::ScopedAStatus TunerDvr::setStatusCheckIntervalHint(const int64_t milliseconds) {
+    if (milliseconds < 0L) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    ::ndk::ScopedAStatus s = mDvr->setStatusCheckIntervalHint(milliseconds);
+    if (s.getStatus() == STATUS_UNKNOWN_TRANSACTION) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    auto status = mDvr->close();
-    mDvr = nullptr;
-
-    return status;
+    return s;
 }
 
 /////////////// IDvrCallback ///////////////////////
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index 1854d08..2330e7b 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -61,6 +61,7 @@
     ::ndk::ScopedAStatus stop() override;
     ::ndk::ScopedAStatus flush() override;
     ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
 
     struct DvrCallback : public BnDvrCallback {
         DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index e8c7767..478e7ea 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -36,28 +36,28 @@
 
 using namespace std;
 
-TunerFilter::TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb,
-                         DemuxFilterType type)
+TunerFilter::TunerFilter(const shared_ptr<IFilter> filter, const shared_ptr<FilterCallback> cb,
+                         const DemuxFilterType type, const shared_ptr<TunerService> tuner)
       : mFilter(filter),
         mType(type),
         mStarted(false),
         mShared(false),
         mClientPid(-1),
-        mFilterCallback(cb) {}
+        mFilterCallback(cb),
+        mTunerService(tuner) {}
 
 TunerFilter::~TunerFilter() {
-    Mutex::Autolock _l(mLock);
-    mFilter = nullptr;
+    close();
+    freeSharedFilterToken("");
+    {
+        Mutex::Autolock _l(mLock);
+        mFilter = nullptr;
+        mTunerService = nullptr;
+    }
 }
 
 ::ndk::ScopedAStatus TunerFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -73,12 +73,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::getId(int32_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -94,12 +88,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::getId64Bit(int64_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -115,12 +103,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configure(const DemuxFilterSettings& in_settings) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -132,12 +114,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configureMonitorEvent(int32_t monitorEventType) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -149,12 +125,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configureIpFilterContextId(int32_t cid) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -166,12 +136,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::configureAvStreamType(const AvStreamType& in_avStreamType) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -183,12 +147,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -207,12 +165,6 @@
 ::ndk::ScopedAStatus TunerFilter::getAvSharedHandle(NativeHandle* out_avMemory,
                                                     int64_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -225,12 +177,6 @@
 ::ndk::ScopedAStatus TunerFilter::releaseAvHandle(const NativeHandle& in_handle,
                                                   int64_t in_avDataId) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -242,12 +188,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::start() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -267,12 +207,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::stop() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -291,12 +225,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::flush() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -312,12 +240,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::close() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -326,7 +248,7 @@
                 mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
                 mFilterCallback->detachSharedFilterCallback();
             }
-            TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
+            mTunerService->removeSharedFilter(this->ref<TunerFilter>());
         } else {
             // Calling from shared process, do not really close this filter.
             if (mFilterCallback != nullptr) {
@@ -341,7 +263,6 @@
         mFilterCallback->detachCallbacks();
     }
     auto res = mFilter->close();
-    mFilter = nullptr;
     mStarted = false;
     mShared = false;
     mClientPid = -1;
@@ -351,12 +272,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::acquireSharedFilterToken(string* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared || mStarted) {
         ALOGD("create SharedFilter in wrong state");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -365,7 +280,7 @@
 
     IPCThreadState* ipc = IPCThreadState::self();
     mClientPid = ipc->getCallingPid();
-    string token = TunerService::getTunerService()->addFilterToShared(this->ref<TunerFilter>());
+    string token = mTunerService->addFilterToShared(this->ref<TunerFilter>());
     _aidl_return->assign(token);
     mShared = true;
 
@@ -374,12 +289,6 @@
 
 ::ndk::ScopedAStatus TunerFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (!mShared) {
         // The filter is not shared or the shared filter has been closed.
         return ::ndk::ScopedAStatus::ok();
@@ -390,7 +299,7 @@
         mFilterCallback->detachSharedFilterCallback();
     }
 
-    TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
+    mTunerService->removeSharedFilter(this->ref<TunerFilter>());
     mShared = false;
 
     return ::ndk::ScopedAStatus::ok();
@@ -398,24 +307,12 @@
 
 ::ndk::ScopedAStatus TunerFilter::getFilterType(DemuxFilterType* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     *_aidl_return = mType;
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerFilter::setDelayHint(const FilterDelayHint& in_hint) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFilter->setDelayHint(in_hint);
 }
 
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index 93d8898..f6178c4 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -53,8 +53,9 @@
 
 using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
 
-class TunerFilter : public BnTunerFilter {
+class TunerService;
 
+class TunerFilter : public BnTunerFilter {
 public:
     class FilterCallback : public BnFilterCallback {
     public:
@@ -75,7 +76,8 @@
         Mutex mCallbackLock;
     };
 
-    TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb, DemuxFilterType type);
+    TunerFilter(const shared_ptr<IFilter> filter, const shared_ptr<FilterCallback> cb,
+                const DemuxFilterType type, const shared_ptr<TunerService> tuner);
     virtual ~TunerFilter();
 
     ::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
@@ -113,6 +115,7 @@
     int32_t mClientPid;
     shared_ptr<FilterCallback> mFilterCallback;
     Mutex mLock;
+    shared_ptr<TunerService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
index 5116305..1e93d95 100644
--- a/services/tuner/TunerFrontend.cpp
+++ b/services/tuner/TunerFrontend.cpp
@@ -37,18 +37,13 @@
 }
 
 TunerFrontend::~TunerFrontend() {
+    close();
     mFrontend = nullptr;
     mId = -1;
 }
 
 ::ndk::ScopedAStatus TunerFrontend::setCallback(
         const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (tunerFrontendCallback == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -60,53 +55,23 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::tune(const FrontendSettings& settings) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->tune(settings);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::stopTune() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->stopTune();
 }
 
 ::ndk::ScopedAStatus TunerFrontend::scan(const FrontendSettings& settings,
                                          FrontendScanType frontendScanType) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->scan(settings, frontendScanType);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::stopScan() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->stopScan();
 }
 
 ::ndk::ScopedAStatus TunerFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (lnb == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -116,46 +81,19 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::linkCiCamToFrontend(int32_t ciCamId, int32_t* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->linkCiCam(ciCamId, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::unlinkCiCamToFrontend(int32_t ciCamId) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->unlinkCiCam(ciCamId);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::close() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mFrontend->close();
-    mFrontend = nullptr;
-
-    return res;
+    return mFrontend->close();
 }
 
 ::ndk::ScopedAStatus TunerFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
                                               vector<FrontendStatus>* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->getStatus(in_statusTypes, _aidl_return);
 }
 
@@ -165,34 +103,16 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::getHardwareInfo(std::string* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->getHardwareInfo(_aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::removeOutputPid(int32_t in_pid) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->removeOutputPid(in_pid);
 }
 
 ::ndk::ScopedAStatus TunerFrontend::getFrontendStatusReadiness(
         const std::vector<FrontendStatusType>& in_statusTypes,
         std::vector<FrontendStatusReadiness>* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mFrontend->getFrontendStatusReadiness(in_statusTypes, _aidl_return);
 }
 
diff --git a/services/tuner/TunerHelper.cpp b/services/tuner/TunerHelper.cpp
index dc67110..a03386f 100644
--- a/services/tuner/TunerHelper.cpp
+++ b/services/tuner/TunerHelper.cpp
@@ -83,6 +83,22 @@
     tunerRM->setFrontendInfoList(feInfos);
     tunerRM->setLnbInfoList(lnbHandles);
 }
+void TunerHelper::updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
+                                       const vector<TunerDemuxInfo>& demuxInfos,
+                                       const vector<int32_t>& lnbHandles) {
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
+    shared_ptr<ITunerResourceManager> tunerRM = ITunerResourceManager::fromBinder(binder);
+    if (tunerRM == nullptr) {
+        return;
+    }
+
+    updateTunerResources(feInfos, lnbHandles);
+
+    // for Tuner 2.0 and below, Demux resource is not really managed under TRM
+    if (demuxInfos.size() > 0) {
+        tunerRM->setDemuxInfoList(demuxInfos);
+    }
+}
 
 // TODO: create a map between resource id and handles.
 int TunerHelper::getResourceIdFromHandle(int resourceHandle, int /*type*/) {
diff --git a/services/tuner/TunerHelper.h b/services/tuner/TunerHelper.h
index 755df57..65a9b0b 100644
--- a/services/tuner/TunerHelper.h
+++ b/services/tuner/TunerHelper.h
@@ -17,9 +17,11 @@
 #ifndef ANDROID_MEDIA_TUNERDVRHELPER_H
 #define ANDROID_MEDIA_TUNERDVRHELPER_H
 
+#include <aidl/android/media/tv/tunerresourcemanager/TunerDemuxInfo.h>
 #include <aidl/android/media/tv/tunerresourcemanager/TunerFrontendInfo.h>
 #include <utils/String16.h>
 
+using ::aidl::android::media::tv::tunerresourcemanager::TunerDemuxInfo;
 using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
 using ::android::String16;
 
@@ -55,6 +57,10 @@
     // TODO: update Demux, Descrambler.
     static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
                                      const vector<int32_t>& lnbHandles);
+
+    static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
+                                     const vector<TunerDemuxInfo>& demuxInfos,
+                                     const vector<int32_t>& lnbHandles);
     // TODO: create a map between resource id and handles.
     static int getResourceIdFromHandle(int resourceHandle, int type);
     static int getResourceHandleFromId(int id, int resourceType);
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 1e143c3..2fb6135 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -36,18 +36,13 @@
 }
 
 TunerLnb::~TunerLnb() {
+    close();
     mLnb = nullptr;
     mId = -1;
 }
 
 ::ndk::ScopedAStatus TunerLnb::setCallback(
         const shared_ptr<ITunerLnbCallback>& in_tunerLnbCallback) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_tunerLnbCallback == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -59,56 +54,23 @@
 }
 
 ::ndk::ScopedAStatus TunerLnb::setVoltage(LnbVoltage in_voltage) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->setVoltage(in_voltage);
 }
 
 ::ndk::ScopedAStatus TunerLnb::setTone(LnbTone in_tone) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->setTone(in_tone);
 }
 
 ::ndk::ScopedAStatus TunerLnb::setSatellitePosition(LnbPosition in_position) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->setSatellitePosition(in_position);
 }
 
 ::ndk::ScopedAStatus TunerLnb::sendDiseqcMessage(const vector<uint8_t>& in_diseqcMessage) {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mLnb->sendDiseqcMessage(in_diseqcMessage);
 }
 
 ::ndk::ScopedAStatus TunerLnb::close() {
-    if (mLnb == nullptr) {
-        ALOGE("ILnb is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto res = mLnb->close();
-    mLnb = nullptr;
-
-    return res;
+    return mLnb->close();
 }
 
 /////////////// ILnbCallback ///////////////////////
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index 4833aaf..9a1e8bb 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -27,6 +27,7 @@
 #include <android/binder_manager.h>
 #include <binder/IPCThreadState.h>
 #include <binder/PermissionCache.h>
+#include <cutils/properties.h>
 #include <utils/Log.h>
 
 #include <string>
@@ -51,107 +52,125 @@
 namespace tv {
 namespace tuner {
 
-shared_ptr<TunerService> TunerService::sTunerService = nullptr;
-
 TunerService::TunerService() {
-    if (!TunerHelper::checkTunerFeature()) {
-        ALOGD("Device doesn't have tuner hardware.");
-        return;
+    const string statsServiceName = string() + ITuner::descriptor + "/default";
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService(statsServiceName.c_str()));
+    mTuner = ITuner::fromBinder(binder);
+    ALOGE_IF(mTuner == nullptr, "Failed to get Tuner HAL Service");
+
+    mTunerVersion = TUNER_HAL_VERSION_2_0;
+    if (mTuner->getInterfaceVersion(&mTunerVersion).isOk()) {
+        // Tuner AIDL HAL version 1 will be Tuner HAL 2.0
+        mTunerVersion = (mTunerVersion + 1) << 16;
     }
 
+    // Register the tuner resources to TRM.
     updateTunerResources();
 }
 
-TunerService::~TunerService() {}
+TunerService::~TunerService() {
+    mTuner = nullptr;
+}
 
 binder_status_t TunerService::instantiate() {
-    sTunerService = ::ndk::SharedRefBase::make<TunerService>();
-    return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
-}
-
-shared_ptr<TunerService> TunerService::getTunerService() {
-    return sTunerService;
-}
-
-bool TunerService::hasITuner() {
-    ALOGV("hasITuner");
-    if (mTuner != nullptr) {
-        return true;
+    shared_ptr<TunerService> tunerService = ::ndk::SharedRefBase::make<TunerService>();
+    bool lazyHal = property_get_bool("ro.tuner.lazyhal", false);
+    if (lazyHal) {
+        return AServiceManager_registerLazyService(tunerService->asBinder().get(),
+                                                   getServiceName());
     }
-    const string statsServiceName = string() + ITuner::descriptor + "/default";
-    if (AServiceManager_isDeclared(statsServiceName.c_str())) {
-        ::ndk::SpAIBinder binder(AServiceManager_waitForService(statsServiceName.c_str()));
-        mTuner = ITuner::fromBinder(binder);
-    } else {
-        mTuner = nullptr;
-        ALOGE("Failed to get Tuner HAL Service");
-        return false;
-    }
-
-    mTunerVersion = TUNER_HAL_VERSION_2_0;
-    // TODO: Enable this after Tuner HAL is frozen.
-    // if (mTuner->getInterfaceVersion(&mTunerVersion).isOk()) {
-    //  // Tuner AIDL HAL version 1 will be Tuner HAL 2.0
-    //  mTunerVersion = (mTunerVersion + 1) << 16;
-    //}
-
-    return true;
+    return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
 }
 
-::ndk::ScopedAStatus TunerService::openDemux(int32_t /* in_demuxHandle */,
+::ndk::ScopedAStatus TunerService::openDemux(int32_t in_demuxHandle,
                                              shared_ptr<ITunerDemux>* _aidl_return) {
     ALOGV("openDemux");
-    if (!hasITuner()) {
+    shared_ptr<IDemux> demux;
+    bool fallBackToOpenDemux = false;
+    vector<int32_t> ids;
+
+    if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
+        fallBackToOpenDemux = true;
+    } else {
+        mTuner->getDemuxIds(&ids);
+        if (ids.size() == 0) {
+            fallBackToOpenDemux = true;
+        }
+    }
+
+    if (fallBackToOpenDemux) {
+        auto status = mTuner->openDemux(&ids, &demux);
+        if (status.isOk()) {
+            *_aidl_return = ::ndk::SharedRefBase::make<TunerDemux>(demux, ids[0],
+                                                                   this->ref<TunerService>());
+        }
+        return status;
+    } else {
+        int id = TunerHelper::getResourceIdFromHandle(in_demuxHandle, DEMUX);
+        auto status = mTuner->openDemuxById(id, &demux);
+        if (status.isOk()) {
+            *_aidl_return =
+                    ::ndk::SharedRefBase::make<TunerDemux>(demux, id, this->ref<TunerService>());
+        }
+        return status;
+    }
+}
+
+::ndk::ScopedAStatus TunerService::getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) {
+    if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
     }
-    vector<int32_t> id;
-    shared_ptr<IDemux> demux;
-    auto status = mTuner->openDemux(&id, &demux);
-    if (status.isOk()) {
-        *_aidl_return = ::ndk::SharedRefBase::make<TunerDemux>(demux, id[0]);
+    int id = TunerHelper::getResourceIdFromHandle(in_demuxHandle, DEMUX);
+    return mTuner->getDemuxInfo(id, _aidl_return);
+}
+
+::ndk::ScopedAStatus TunerService::getDemuxInfoList(vector<DemuxInfo>* _aidl_return) {
+    if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+    vector<DemuxInfo> demuxInfoList;
+    vector<int32_t> ids;
+    auto status = mTuner->getDemuxIds(&ids);
+    if (!status.isOk()) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    return status;
+    for (int i = 0; i < ids.size(); i++) {
+        DemuxInfo demuxInfo;
+        auto res = mTuner->getDemuxInfo(ids[i], &demuxInfo);
+        if (!res.isOk()) {
+            continue;
+        }
+        demuxInfoList.push_back(demuxInfo);
+    }
+
+    if (demuxInfoList.size() > 0) {
+        *_aidl_return = demuxInfoList;
+        return ::ndk::ScopedAStatus::ok();
+    } else {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
 }
 
 ::ndk::ScopedAStatus TunerService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
     ALOGV("getDemuxCaps");
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getDemuxCaps(_aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerService::getFrontendIds(vector<int32_t>* ids) {
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getFrontendIds(ids);
 }
 
 ::ndk::ScopedAStatus TunerService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getFrontendInfo(id, _aidl_return);
 }
 
 ::ndk::ScopedAStatus TunerService::openFrontend(int32_t frontendHandle,
                                                 shared_ptr<ITunerFrontend>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
     shared_ptr<IFrontend> frontend;
     auto status = mTuner->openFrontendById(id, &frontend);
@@ -163,12 +182,6 @@
 }
 
 ::ndk::ScopedAStatus TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<ILnb> lnb;
     int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
     auto status = mTuner->openLnbById(id, &lnb);
@@ -181,12 +194,6 @@
 
 ::ndk::ScopedAStatus TunerService::openLnbByName(const string& lnbName,
                                                  shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     vector<int32_t> id;
     shared_ptr<ILnb> lnb;
     auto status = mTuner->openLnbByName(lnbName, &id, &lnb);
@@ -199,12 +206,6 @@
 
 ::ndk::ScopedAStatus TunerService::openDescrambler(int32_t /*descramblerHandle*/,
                                                    shared_ptr<ITunerDescrambler>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     shared_ptr<IDescrambler> descrambler;
     // int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
     auto status = mTuner->openDescrambler(&descrambler);
@@ -216,7 +217,6 @@
 }
 
 ::ndk::ScopedAStatus TunerService::getTunerHalVersion(int* _aidl_return) {
-    hasITuner();
     *_aidl_return = mTunerVersion;
     return ::ndk::ScopedAStatus::ok();
 }
@@ -224,12 +224,6 @@
 ::ndk::ScopedAStatus TunerService::openSharedFilter(const string& in_filterToken,
                                                     const shared_ptr<ITunerFilterCallback>& in_cb,
                                                     shared_ptr<ITunerFilter>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (!PermissionCache::checkCallingPermission(sSharedFilterPermission)) {
         ALOGE("Request requires android.permission.ACCESS_TV_SHARED_FILTER");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -260,35 +254,22 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
-::ndk::ScopedAStatus TunerService::setLna(bool bEnable) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
+::ndk::ScopedAStatus TunerService::isLnaSupported(bool* _aidl_return) {
+    ALOGV("isLnaSupported");
+    return mTuner->isLnaSupported(_aidl_return);
+}
 
+::ndk::ScopedAStatus TunerService::setLna(bool bEnable) {
     return mTuner->setLna(bEnable);
 }
 
 ::ndk::ScopedAStatus TunerService::setMaxNumberOfFrontends(FrontendType in_frontendType,
                                                            int32_t in_maxNumber) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->setMaxNumberOfFrontends(in_frontendType, in_maxNumber);
 }
 
 ::ndk::ScopedAStatus TunerService::getMaxNumberOfFrontends(FrontendType in_frontendType,
                                                            int32_t* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTuner->getMaxNumberOfFrontends(in_frontendType, _aidl_return);
 }
 
@@ -309,12 +290,9 @@
 }
 
 void TunerService::updateTunerResources() {
-    if (!hasITuner()) {
-        ALOGE("Failed to updateTunerResources");
-        return;
-    }
-
-    TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
+    TunerHelper::updateTunerResources(getTRMFrontendInfos(),
+                                      getTRMDemuxInfos(),
+                                      getTRMLnbHandles());
 }
 
 vector<TunerFrontendInfo> TunerService::getTRMFrontendInfos() {
@@ -342,6 +320,32 @@
     return infos;
 }
 
+vector<TunerDemuxInfo> TunerService::getTRMDemuxInfos() {
+    vector<TunerDemuxInfo> infos;
+    vector<int32_t> ids;
+
+    if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
+        return infos;
+    }
+
+    auto status = mTuner->getDemuxIds(&ids);
+    if (!status.isOk()) {
+        return infos;
+    }
+
+    for (int i = 0; i < ids.size(); i++) {
+        DemuxInfo demuxInfo;
+        mTuner->getDemuxInfo(ids[i], &demuxInfo);
+        TunerDemuxInfo tunerDemuxInfo{
+                .handle = TunerHelper::getResourceHandleFromId((int)ids[i], DEMUX),
+                .filterTypes = static_cast<int>(demuxInfo.filterTypes)
+        };
+        infos.push_back(tunerDemuxInfo);
+    }
+
+    return infos;
+}
+
 vector<int32_t> TunerService::getTRMLnbHandles() {
     vector<int32_t> lnbHandles;
     if (mTuner != nullptr) {
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
index 7fc2aa4..190ccd4 100644
--- a/services/tuner/TunerService.h
+++ b/services/tuner/TunerService.h
@@ -32,6 +32,7 @@
 using ::aidl::android::hardware::tv::tuner::DemuxCapabilities;
 using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
 using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::DemuxInfo;
 using ::aidl::android::hardware::tv::tuner::FrontendInfo;
 using ::aidl::android::hardware::tv::tuner::FrontendType;
 using ::aidl::android::hardware::tv::tuner::ITuner;
@@ -71,12 +72,15 @@
     ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
                                    shared_ptr<ITunerDemux>* _aidl_return) override;
     ::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
     ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
                                          shared_ptr<ITunerDescrambler>* _aidl_return) override;
     ::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
     ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
                                           const shared_ptr<ITunerFilterCallback>& in_cb,
                                           shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus isLnaSupported(bool* _aidl_return) override;
     ::ndk::ScopedAStatus setLna(bool in_bEnable) override;
     ::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
                                                  int32_t in_maxNumber) override;
@@ -86,20 +90,16 @@
     string addFilterToShared(const shared_ptr<TunerFilter>& sharedFilter);
     void removeSharedFilter(const shared_ptr<TunerFilter>& sharedFilter);
 
-    static shared_ptr<TunerService> getTunerService();
-
 private:
-    bool hasITuner();
     void updateTunerResources();
     vector<TunerFrontendInfo> getTRMFrontendInfos();
+    vector<TunerDemuxInfo> getTRMDemuxInfos();
     vector<int32_t> getTRMLnbHandles();
 
     shared_ptr<ITuner> mTuner;
     int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
     Mutex mSharedFiltersLock;
     map<string, shared_ptr<TunerFilter>> mSharedFilters;
-
-    static shared_ptr<TunerService> sTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 73cd6b4..385a063 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -35,37 +35,19 @@
 }
 
 TunerTimeFilter::~TunerTimeFilter() {
+    close();
     mTimeFilter = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::setTimeStamp(int64_t timeStamp) {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTimeFilter->setTimeStamp(timeStamp);
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::clearTimeStamp() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     return mTimeFilter->clearTimeStamp();
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     auto status = mTimeFilter->getSourceTime(_aidl_return);
     if (!status.isOk()) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
@@ -74,13 +56,6 @@
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     auto status = mTimeFilter->getTimeStamp(_aidl_return);
     if (!status.isOk()) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
@@ -89,16 +64,7 @@
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::close() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    auto status = mTimeFilter->close();
-    mTimeFilter = nullptr;
-
-    return status;
+    return mTimeFilter->close();
 }
 
 }  // namespace tuner
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
index 2c01c4e..cafe075 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
@@ -66,4 +66,9 @@
      * close the DVR instance to release resource for DVR.
      */
     void close();
+
+    /**
+     * Set status check time interval.
+     */
+    void setStatusCheckIntervalHint(in long milliseconds);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
index b8084ab..932133e 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -17,6 +17,7 @@
 package android.media.tv.tuner;
 
 import android.hardware.tv.tuner.DemuxCapabilities;
+import android.hardware.tv.tuner.DemuxInfo;
 import android.hardware.tv.tuner.FrontendInfo;
 import android.hardware.tv.tuner.FrontendType;
 import android.media.tv.tuner.ITunerDemux;
@@ -77,6 +78,21 @@
     ITunerDemux openDemux(in int demuxHandle);
 
     /**
+     * Retrieve the supported filter main types
+     *
+     * @param demuxHandle the handle of the demux to query demux info for
+     * @return the demux info
+     */
+    DemuxInfo getDemuxInfo(in int demuxHandle);
+
+    /**
+     * Retrieve the list of demux info for all the demuxes on the system
+     *
+     * @return the list of DemuxInfo
+     */
+    DemuxInfo[] getDemuxInfoList();
+
+    /**
      * Retrieve the Tuner Demux capabilities.
      *
      * @return the demux’s capabilities.
@@ -107,6 +123,13 @@
     ITunerFilter openSharedFilter(in String filterToken, in ITunerFilterCallback cb);
 
     /**
+     * Is Low Noise Amplifier (LNA) supported by the Tuner.
+     *
+     * @return {@code true} if supported, otherwise {@code false}.
+     */
+    boolean isLnaSupported();
+
+    /**
      * Enable or Disable Low Noise Amplifier (LNA).
      *
      * @param bEnable enable Lna or not.
diff --git a/services/tuner/hidl/TunerHidlDemux.cpp b/services/tuner/hidl/TunerHidlDemux.cpp
index a8151d2..bbb7782 100644
--- a/services/tuner/hidl/TunerHidlDemux.cpp
+++ b/services/tuner/hidl/TunerHidlDemux.cpp
@@ -20,6 +20,7 @@
 
 #include "TunerHidlDvr.h"
 #include "TunerHidlFilter.h"
+#include "TunerHidlService.h"
 #include "TunerHidlTimeFilter.h"
 
 using ::aidl::android::hardware::tv::tuner::DemuxFilterSubType;
@@ -42,23 +43,20 @@
 namespace tv {
 namespace tuner {
 
-TunerHidlDemux::TunerHidlDemux(sp<IDemux> demux, int id) {
+TunerHidlDemux::TunerHidlDemux(const sp<IDemux> demux, const int id,
+                               const shared_ptr<TunerHidlService> tuner) {
     mDemux = demux;
     mDemuxId = id;
+    mTunerService = tuner;
 }
 
 TunerHidlDemux::~TunerHidlDemux() {
     mDemux = nullptr;
+    mTunerService = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSource(
         const shared_ptr<ITunerFrontend>& in_frontend) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     int frontendId;
     in_frontend->getFrontendId(&frontendId);
     HidlResult res = mDemux->setFrontendDataSource(frontendId);
@@ -69,12 +67,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSourceById(int frontendId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->setFrontendDataSource(frontendId);
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -86,12 +78,6 @@
                                                 int32_t in_bufferSize,
                                                 const shared_ptr<ITunerFilterCallback>& in_cb,
                                                 shared_ptr<ITunerFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlDemuxFilterMainType mainType = static_cast<HidlDemuxFilterMainType>(in_type.mainType);
     HidlDemuxFilterType filterType{
             .mainType = mainType,
@@ -132,17 +118,12 @@
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
     }
 
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlFilter>(filterSp, filterCb, in_type);
+    *_aidl_return =
+            ::ndk::SharedRefBase::make<TunerHidlFilter>(filterSp, filterCb, in_type, mTunerService);
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlITimeFilter> filterSp;
     mDemux->openTimeFilter([&](HidlResult r, const sp<HidlITimeFilter>& filter) {
@@ -159,12 +140,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
                                                    int32_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     uint32_t avSyncHwId;
     HidlResult res;
     sp<HidlIFilter> halFilter = static_cast<TunerHidlFilter*>(tunerFilter.get())->getHalFilter();
@@ -181,12 +156,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     uint64_t time;
     HidlResult res;
     mDemux->getAvSyncTime(static_cast<uint32_t>(avSyncHwId), [&](HidlResult r, uint64_t ts) {
@@ -204,12 +173,6 @@
 ::ndk::ScopedAStatus TunerHidlDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
                                              const shared_ptr<ITunerDvrCallback>& in_cb,
                                              shared_ptr<ITunerDvr>* _aidl_return) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res;
     sp<HidlIDvrCallback> callback = new TunerHidlDvr::DvrCallback(in_cb);
     sp<HidlIDvr> hidlDvr;
@@ -228,12 +191,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::connectCiCam(int32_t ciCamId) {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->connectCiCam(static_cast<uint32_t>(ciCamId));
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -242,12 +199,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::disconnectCiCam() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->disconnectCiCam();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -256,15 +207,7 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDemux::close() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(HidlResult::UNAVAILABLE));
-    }
-
     HidlResult res = mDemux->close();
-    mDemux = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/hidl/TunerHidlDemux.h b/services/tuner/hidl/TunerHidlDemux.h
index d535da6..94a715e 100644
--- a/services/tuner/hidl/TunerHidlDemux.h
+++ b/services/tuner/hidl/TunerHidlDemux.h
@@ -37,9 +37,12 @@
 namespace tv {
 namespace tuner {
 
+class TunerHidlService;
+
 class TunerHidlDemux : public BnTunerDemux {
 public:
-    TunerHidlDemux(sp<HidlIDemux> demux, int demuxId);
+    TunerHidlDemux(const sp<HidlIDemux> demux, const int demuxId,
+                   const shared_ptr<TunerHidlService> tuner);
     virtual ~TunerHidlDemux();
 
     ::ndk::ScopedAStatus setFrontendDataSource(
@@ -64,6 +67,7 @@
 private:
     sp<HidlIDemux> mDemux;
     int mDemuxId;
+    shared_ptr<TunerHidlService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlDescrambler.cpp b/services/tuner/hidl/TunerHidlDescrambler.cpp
index dd8cd9c..51b7ede 100644
--- a/services/tuner/hidl/TunerHidlDescrambler.cpp
+++ b/services/tuner/hidl/TunerHidlDescrambler.cpp
@@ -45,12 +45,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::setDemuxSource(
         const shared_ptr<ITunerDemux>& in_tunerDemux) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDescrambler->setDemuxSource(
             static_cast<TunerHidlDemux*>(in_tunerDemux.get())->getId());
     if (res != HidlResult::SUCCESS) {
@@ -60,12 +54,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDescrambler->setKeyToken(in_keyToken);
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -75,12 +63,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::addPid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     sp<HidlIFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -94,12 +76,6 @@
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::removePid(
         const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     sp<HidlIFilter> halFilter =
             (in_optionalSourceFilter == nullptr)
                     ? nullptr
@@ -112,15 +88,7 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDescrambler::close() {
-    if (mDescrambler == nullptr) {
-        ALOGE("IDescrambler is not initialized.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDescrambler->close();
-    mDescrambler = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/hidl/TunerHidlDvr.cpp b/services/tuner/hidl/TunerHidlDvr.cpp
index 1a619d5..285e32b 100644
--- a/services/tuner/hidl/TunerHidlDvr.cpp
+++ b/services/tuner/hidl/TunerHidlDvr.cpp
@@ -54,12 +54,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     MQDesc dvrMQDesc;
     HidlResult res;
     mDvr->getQueueDesc([&](HidlResult r, const MQDesc& desc) {
@@ -72,17 +66,11 @@
 
     AidlMQDesc aidlMQDesc;
     unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(dvrMQDesc, &aidlMQDesc);
-    *_aidl_return = move(aidlMQDesc);
+    *_aidl_return = std::move(aidlMQDesc);
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::configure(const DvrSettings& in_settings) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->configure(getHidlDvrSettings(in_settings));
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -91,12 +79,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -116,12 +98,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (in_filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -141,12 +117,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::start() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->start();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -155,12 +125,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::stop() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->stop();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -169,12 +133,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::flush() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->flush();
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -183,21 +141,18 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlDvr::close() {
-    if (mDvr == nullptr) {
-        ALOGE("IDvr is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mDvr->close();
-    mDvr = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
     return ::ndk::ScopedAStatus::ok();
 }
 
+::ndk::ScopedAStatus TunerHidlDvr::setStatusCheckIntervalHint(int64_t /* in_milliseconds */) {
+    HidlResult res = HidlResult::UNAVAILABLE;
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
 HidlDvrSettings TunerHidlDvr::getHidlDvrSettings(const DvrSettings& settings) {
     HidlDvrSettings s;
     switch (mType) {
diff --git a/services/tuner/hidl/TunerHidlDvr.h b/services/tuner/hidl/TunerHidlDvr.h
index a280ff7..aa86b14 100644
--- a/services/tuner/hidl/TunerHidlDvr.h
+++ b/services/tuner/hidl/TunerHidlDvr.h
@@ -63,6 +63,7 @@
     ::ndk::ScopedAStatus stop() override;
     ::ndk::ScopedAStatus flush() override;
     ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
 
     struct DvrCallback : public HidlIDvrCallback {
         DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index fe74a5c..617622d 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -92,31 +92,32 @@
 namespace tv {
 namespace tuner {
 
-TunerHidlFilter::TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb,
-                                 DemuxFilterType type)
+TunerHidlFilter::TunerHidlFilter(const sp<HidlIFilter> filter, const sp<FilterCallback> cb,
+                                 const DemuxFilterType type,
+                                 const shared_ptr<TunerHidlService> tuner)
       : mFilter(filter),
         mType(type),
         mStarted(false),
         mShared(false),
         mClientPid(-1),
-        mFilterCallback(cb) {
+        mFilterCallback(cb),
+        mTunerService(tuner) {
     mFilter_1_1 = ::android::hardware::tv::tuner::V1_1::IFilter::castFrom(filter);
 }
 
 TunerHidlFilter::~TunerHidlFilter() {
-    Mutex::Autolock _l(mLock);
-    mFilter = nullptr;
-    mFilter_1_1 = nullptr;
+    freeSharedFilterToken("");
+
+    {
+        Mutex::Autolock _l(mLock);
+        mFilter = nullptr;
+        mFilter_1_1 = nullptr;
+        mTunerService = nullptr;
+    }
 }
 
 ::ndk::ScopedAStatus TunerHidlFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -139,19 +140,13 @@
 
     AidlMQDesc aidlMQDesc;
     unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(filterMQDesc, &aidlMQDesc);
-    *_aidl_return = move(aidlMQDesc);
+    *_aidl_return = std::move(aidlMQDesc);
 
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerHidlFilter::getId(int32_t* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -200,12 +195,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::configure(const DemuxFilterSettings& in_settings) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -318,12 +307,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (filter == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -378,12 +361,6 @@
 ::ndk::ScopedAStatus TunerHidlFilter::releaseAvHandle(const NativeHandle& in_handle,
                                                       int64_t in_avDataId) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         ALOGD("%s is called on a shared filter", __FUNCTION__);
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -391,7 +368,7 @@
     }
 
     hidl_handle handle;
-    handle.setTo(makeFromAidl(in_handle), true);
+    handle.setTo(makeFromAidl(in_handle));
     HidlResult res = mFilter->releaseAvHandle(handle, in_avDataId);
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -407,12 +384,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::start() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -434,12 +405,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::stop() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -461,12 +426,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::flush() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -487,12 +446,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::close() {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared) {
         IPCThreadState* ipc = IPCThreadState::self();
         int32_t callingPid = ipc->getCallingPid();
@@ -501,7 +454,7 @@
                 mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
                 mFilterCallback->detachSharedFilterCallback();
             }
-            TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+            mTunerService->removeSharedFilter(this->ref<TunerHidlFilter>());
         } else {
             // Calling from shared process, do not really close this filter.
             if (mFilterCallback != nullptr) {
@@ -516,8 +469,6 @@
         mFilterCallback->detachCallbacks();
     }
     HidlResult res = mFilter->close();
-    mFilter = nullptr;
-    mFilter_1_1 = nullptr;
     mStarted = false;
     mShared = false;
     mClientPid = -1;
@@ -531,12 +482,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::acquireSharedFilterToken(string* _aidl_return) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (mShared || mStarted) {
         ALOGD("create SharedFilter in wrong state");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -545,8 +490,7 @@
 
     IPCThreadState* ipc = IPCThreadState::self();
     mClientPid = ipc->getCallingPid();
-    string token =
-            TunerHidlService::getTunerService()->addFilterToShared(this->ref<TunerHidlFilter>());
+    string token = mTunerService->addFilterToShared(this->ref<TunerHidlFilter>());
     _aidl_return->assign(token);
     mShared = true;
 
@@ -555,12 +499,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
     Mutex::Autolock _l(mLock);
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (!mShared) {
         // The filter is not shared or the shared filter has been closed.
         return ::ndk::ScopedAStatus::ok();
@@ -571,19 +509,13 @@
         mFilterCallback->detachSharedFilterCallback();
     }
 
-    TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+    mTunerService->removeSharedFilter(this->ref<TunerHidlFilter>());
     mShared = false;
 
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerHidlFilter::getFilterType(DemuxFilterType* _aidl_return) {
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     *_aidl_return = mType;
     return ::ndk::ScopedAStatus::ok();
 }
@@ -908,6 +840,10 @@
                 static_cast<uint32_t>(settings.scIndexMask.get<DemuxFilterScIndexMask::scHevc>()));
         break;
     }
+    case DemuxFilterScIndexMask::scVvc: {
+        // Shouldn't be here.
+        break;
+    }
     }
     return record;
 }
@@ -1084,8 +1020,8 @@
         }
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::media>(move(media));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::media>(std::move(media));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1101,8 +1037,8 @@
         section.dataLength = static_cast<int64_t>(sectionEvent.dataLength);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::section>(move(section));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::section>(std::move(section));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1117,8 +1053,8 @@
         pes.mpuSequenceNumber = static_cast<int32_t>(pesEvent.mpuSequenceNumber);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::pes>(move(pes));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::pes>(std::move(pes));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1167,8 +1103,8 @@
         }
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::tsRecord>(move(tsRecord));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::tsRecord>(std::move(tsRecord));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1194,8 +1130,8 @@
         }
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::mmtpRecord>(move(mmtpRecord));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::mmtpRecord>(std::move(mmtpRecord));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1213,8 +1149,8 @@
         download.dataLength = static_cast<int32_t>(downloadEvent.dataLength);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::download>(move(download));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::download>(std::move(download));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1227,8 +1163,8 @@
         ipPayload.dataLength = static_cast<int32_t>(ipPayloadEvent.dataLength);
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::ipPayload>(move(ipPayload));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::ipPayload>(std::move(ipPayload));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1245,8 +1181,8 @@
         copy(descrData.begin(), descrData.end(), temi.descrData.begin());
 
         DemuxFilterEvent filterEvent;
-        filterEvent.set<DemuxFilterEvent::temi>(move(temi));
-        res.push_back(move(filterEvent));
+        filterEvent.set<DemuxFilterEvent::temi>(std::move(temi));
+        res.push_back(std::move(filterEvent));
     }
 }
 
@@ -1268,15 +1204,15 @@
     }
 
     DemuxFilterEvent filterEvent;
-    filterEvent.set<DemuxFilterEvent::monitorEvent>(move(monitor));
-    res.push_back(move(filterEvent));
+    filterEvent.set<DemuxFilterEvent::monitorEvent>(std::move(monitor));
+    res.push_back(std::move(filterEvent));
 }
 
 void TunerHidlFilter::FilterCallback::getRestartEvent(
         const vector<HidlDemuxFilterEventExt::Event>& eventsExt, vector<DemuxFilterEvent>& res) {
     DemuxFilterEvent filterEvent;
     filterEvent.set<DemuxFilterEvent::startId>(static_cast<int32_t>(eventsExt[0].startId()));
-    res.push_back(move(filterEvent));
+    res.push_back(std::move(filterEvent));
 }
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlFilter.h b/services/tuner/hidl/TunerHidlFilter.h
index 63c7a1b..a58eeca 100644
--- a/services/tuner/hidl/TunerHidlFilter.h
+++ b/services/tuner/hidl/TunerHidlFilter.h
@@ -114,6 +114,8 @@
 const static int IP_V4_LENGTH = 4;
 const static int IP_V6_LENGTH = 16;
 
+class TunerHidlService;
+
 class TunerHidlFilter : public BnTunerFilter {
 public:
     class FilterCallback : public HidlIFilterCallback {
@@ -165,7 +167,8 @@
         Mutex mCallbackLock;
     };
 
-    TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb, DemuxFilterType type);
+    TunerHidlFilter(const sp<HidlIFilter> filter, const sp<FilterCallback> cb,
+                    const DemuxFilterType type, const shared_ptr<TunerHidlService> tuner);
     virtual ~TunerHidlFilter();
 
     ::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
@@ -230,6 +233,7 @@
     int32_t mClientPid;
     sp<FilterCallback> mFilterCallback;
     Mutex mLock;
+    shared_ptr<TunerHidlService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlFrontend.cpp b/services/tuner/hidl/TunerHidlFrontend.cpp
index 03957f3..7ffb2a4 100644
--- a/services/tuner/hidl/TunerHidlFrontend.cpp
+++ b/services/tuner/hidl/TunerHidlFrontend.cpp
@@ -176,26 +176,23 @@
 namespace tv {
 namespace tuner {
 
-TunerHidlFrontend::TunerHidlFrontend(sp<HidlIFrontend> frontend, int id) {
+TunerHidlFrontend::TunerHidlFrontend(const sp<HidlIFrontend> frontend, const int id,
+                                     const shared_ptr<TunerHidlService> tuner) {
     mFrontend = frontend;
     mFrontend_1_1 = ::android::hardware::tv::tuner::V1_1::IFrontend::castFrom(mFrontend);
     mId = id;
+    mTunerService = tuner;
 }
 
 TunerHidlFrontend::~TunerHidlFrontend() {
     mFrontend = nullptr;
     mFrontend_1_1 = nullptr;
     mId = -1;
+    mTunerService = nullptr;
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::setCallback(
         const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (tunerFrontendCallback == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -211,12 +208,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::tune(const FrontendSettings& settings) {
-    if (mFrontend == nullptr) {
-        ALOGE("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     HidlFrontendSettings frontendSettings;
     HidlFrontendSettingsExt1_1 frontendSettingsExt;
@@ -234,12 +225,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::stopTune() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mFrontend->stopTune();
     if (status == HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::ok();
@@ -250,12 +235,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFrontend::scan(const FrontendSettings& settings,
                                              FrontendScanType frontendScanType) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     HidlFrontendSettings frontendSettings;
     HidlFrontendSettingsExt1_1 frontendSettingsExt;
@@ -276,12 +255,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::stopScan() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mFrontend->stopScan();
     if (status == HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::ok();
@@ -291,12 +264,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     if (lnb == nullptr) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -349,18 +316,8 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlFrontend::close() {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    TunerHidlService::getTunerService()->removeFrontend(this->ref<TunerHidlFrontend>());
-
+    mTunerService->removeFrontend(this->ref<TunerHidlFrontend>());
     HidlResult status = mFrontend->close();
-    mFrontend = nullptr;
-    mFrontend_1_1 = nullptr;
-
     if (status != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
     }
@@ -370,12 +327,6 @@
 
 ::ndk::ScopedAStatus TunerHidlFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
                                                   vector<FrontendStatus>* _aidl_return) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res;
     vector<HidlFrontendStatus> status;
     vector<HidlFrontendStatusExt1_1> statusExt;
@@ -442,11 +393,6 @@
 }
 
 void TunerHidlFrontend::setLna(bool bEnable) {
-    if (mFrontend == nullptr) {
-        ALOGD("IFrontend is not initialized");
-        return;
-    }
-
     mFrontend->setLna(bEnable);
 }
 
diff --git a/services/tuner/hidl/TunerHidlFrontend.h b/services/tuner/hidl/TunerHidlFrontend.h
index f698655..f54127b 100644
--- a/services/tuner/hidl/TunerHidlFrontend.h
+++ b/services/tuner/hidl/TunerHidlFrontend.h
@@ -64,9 +64,12 @@
 namespace tv {
 namespace tuner {
 
+class TunerHidlService;
+
 class TunerHidlFrontend : public BnTunerFrontend {
 public:
-    TunerHidlFrontend(sp<HidlIFrontend> frontend, int id);
+    TunerHidlFrontend(const sp<HidlIFrontend> frontend, const int id,
+                      const shared_ptr<TunerHidlService> tuner);
     virtual ~TunerHidlFrontend();
 
     ::ndk::ScopedAStatus setCallback(
@@ -118,6 +121,7 @@
     int mId;
     sp<HidlIFrontend> mFrontend;
     sp<::android::hardware::tv::tuner::V1_1::IFrontend> mFrontend_1_1;
+    shared_ptr<TunerHidlService> mTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlService.cpp b/services/tuner/hidl/TunerHidlService.cpp
index 6f55f1e..6bc36be 100644
--- a/services/tuner/hidl/TunerHidlService.cpp
+++ b/services/tuner/hidl/TunerHidlService.cpp
@@ -24,6 +24,7 @@
 #include <android/binder_manager.h>
 #include <binder/IPCThreadState.h>
 #include <binder/PermissionCache.h>
+#include <cutils/properties.h>
 #include <utils/Log.h>
 
 #include "TunerHelper.h"
@@ -46,7 +47,6 @@
 using ::aidl::android::hardware::tv::tuner::FrontendIsdbtTimeInterleaveMode;
 using ::aidl::android::hardware::tv::tuner::FrontendType;
 using ::aidl::android::hardware::tv::tuner::Result;
-using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
 using ::android::IPCThreadState;
 using ::android::PermissionCache;
 using ::android::hardware::hidl_vec;
@@ -63,47 +63,9 @@
 namespace tv {
 namespace tuner {
 
-shared_ptr<TunerHidlService> TunerHidlService::sTunerService = nullptr;
-
 TunerHidlService::TunerHidlService() {
-    if (!TunerHelper::checkTunerFeature()) {
-        ALOGD("Device doesn't have tuner hardware.");
-        return;
-    }
-
-    updateTunerResources();
-}
-
-TunerHidlService::~TunerHidlService() {
-    mOpenedFrontends.clear();
-    mLnaStatus = -1;
-}
-
-binder_status_t TunerHidlService::instantiate() {
-    if (HidlITuner::getService() == nullptr) {
-        ALOGD("Failed to get ITuner HIDL HAL");
-        return STATUS_NAME_NOT_FOUND;
-    }
-
-    sTunerService = ::ndk::SharedRefBase::make<TunerHidlService>();
-    return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
-}
-
-shared_ptr<TunerHidlService> TunerHidlService::getTunerService() {
-    return sTunerService;
-}
-
-bool TunerHidlService::hasITuner() {
-    ALOGV("hasITuner");
-    if (mTuner != nullptr) {
-        return true;
-    }
-
     mTuner = HidlITuner::getService();
-    if (mTuner == nullptr) {
-        ALOGE("Failed to get ITuner service");
-        return false;
-    }
+    ALOGE_IF(mTuner == nullptr, "Failed to get ITuner service");
     mTunerVersion = TUNER_HAL_VERSION_1_0;
 
     mTuner_1_1 = ::android::hardware::tv::tuner::V1_1::ITuner::castFrom(mTuner);
@@ -113,23 +75,35 @@
         ALOGD("Failed to get ITuner_1_1 service");
     }
 
-    return true;
+    // Register tuner resources to TRM.
+    updateTunerResources();
 }
 
-bool TunerHidlService::hasITuner_1_1() {
-    ALOGV("hasITuner_1_1");
-    hasITuner();
-    return (mTunerVersion == TUNER_HAL_VERSION_1_1);
+TunerHidlService::~TunerHidlService() {
+    mOpenedFrontends.clear();
+    mLnaStatus = -1;
+    mTuner = nullptr;
+    mTuner_1_1 = nullptr;
+}
+
+binder_status_t TunerHidlService::instantiate() {
+    if (HidlITuner::getService() == nullptr) {
+        ALOGD("Failed to get ITuner HIDL HAL");
+        return STATUS_NAME_NOT_FOUND;
+    }
+
+    shared_ptr<TunerHidlService> tunerService = ::ndk::SharedRefBase::make<TunerHidlService>();
+    bool lazyHal = property_get_bool("ro.tuner.lazyhal", false);
+    if (lazyHal) {
+        return AServiceManager_registerLazyService(tunerService->asBinder().get(),
+                                                   getServiceName());
+    }
+    return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
 }
 
 ::ndk::ScopedAStatus TunerHidlService::openDemux(int32_t /* in_demuxHandle */,
                                                  shared_ptr<ITunerDemux>* _aidl_return) {
     ALOGV("openDemux");
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res;
     uint32_t id;
     sp<IDemux> demuxSp = nullptr;
@@ -140,7 +114,8 @@
         ALOGD("open demux, id = %d", demuxId);
     });
     if (res == HidlResult::SUCCESS) {
-        *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDemux>(demuxSp, id);
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDemux>(demuxSp, id,
+                                                                   this->ref<TunerHidlService>());
         return ::ndk::ScopedAStatus::ok();
     }
 
@@ -148,13 +123,22 @@
     return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
 }
 
+::ndk::ScopedAStatus TunerHidlService::getDemuxInfo(int32_t /* in_demuxHandle */,
+                                                    DemuxInfo* /* _aidl_return */) {
+    ALOGE("getDemuxInfo is not supported");
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(HidlResult::UNAVAILABLE));
+}
+
+::ndk::ScopedAStatus TunerHidlService::getDemuxInfoList(
+        vector<DemuxInfo>* /* _aidle_return */) {
+    ALOGE("getDemuxInfoList is not supported");
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(HidlResult::UNAVAILABLE));
+}
+
 ::ndk::ScopedAStatus TunerHidlService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
     ALOGV("getDemuxCaps");
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res;
     HidlDemuxCapabilities caps;
     mTuner->getDemuxCaps([&](HidlResult r, const HidlDemuxCapabilities& demuxCaps) {
@@ -171,11 +155,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::getFrontendIds(vector<int32_t>* ids) {
-    if (!hasITuner()) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     hidl_vec<HidlFrontendId> feIds;
     HidlResult res = getHidlFrontendIds(feIds);
     if (res != HidlResult::SUCCESS) {
@@ -188,12 +167,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlFrontendInfo info;
     HidlResult res = getHidlFrontendInfo(id, info);
     if (res != HidlResult::SUCCESS) {
@@ -202,7 +175,7 @@
 
     HidlFrontendDtmbCapabilities dtmbCaps;
     if (static_cast<HidlFrontendType>(info.type) == HidlFrontendType::DTMB) {
-        if (!hasITuner_1_1()) {
+        if (mTuner_1_1 == nullptr) {
             ALOGE("ITuner_1_1 service is not init.");
             return ::ndk::ScopedAStatus::fromServiceSpecificError(
                     static_cast<int32_t>(Result::UNAVAILABLE));
@@ -224,12 +197,6 @@
 
 ::ndk::ScopedAStatus TunerHidlService::openFrontend(int32_t frontendHandle,
                                                     shared_ptr<ITunerFrontend>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlIFrontend> frontend;
     int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
@@ -241,8 +208,8 @@
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
     }
 
-    shared_ptr<TunerHidlFrontend> tunerFrontend =
-            ::ndk::SharedRefBase::make<TunerHidlFrontend>(frontend, id);
+    shared_ptr<TunerHidlFrontend> tunerFrontend = ::ndk::SharedRefBase::make<TunerHidlFrontend>(
+            frontend, id, this->ref<TunerHidlService>());
     if (mLnaStatus != -1) {
         tunerFrontend->setLna(mLnaStatus == 1);
     }
@@ -255,12 +222,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlILnb> lnb;
     int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
@@ -278,12 +239,6 @@
 
 ::ndk::ScopedAStatus TunerHidlService::openLnbByName(const string& lnbName,
                                                      shared_ptr<ITunerLnb>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     int lnbId;
     HidlResult status;
     sp<HidlILnb> lnb;
@@ -302,12 +257,6 @@
 
 ::ndk::ScopedAStatus TunerHidlService::openDescrambler(
         int32_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGD("get ITuner failed");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     sp<HidlIDescrambler> descrambler;
     //int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
@@ -324,7 +273,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::getTunerHalVersion(int* _aidl_return) {
-    hasITuner();
     *_aidl_return = mTunerVersion;
     return ::ndk::ScopedAStatus::ok();
 }
@@ -332,7 +280,7 @@
 ::ndk::ScopedAStatus TunerHidlService::openSharedFilter(
         const string& in_filterToken, const shared_ptr<ITunerFilterCallback>& in_cb,
         shared_ptr<ITunerFilter>* _aidl_return) {
-    if (!hasITuner()) {
+    if (mTuner == nullptr) {
         ALOGE("get ITuner failed");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
@@ -368,8 +316,13 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
+::ndk::ScopedAStatus TunerHidlService::isLnaSupported(bool* /* _aidl_return */) {
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
 ::ndk::ScopedAStatus TunerHidlService::setLna(bool bEnable) {
-    if (!hasITuner()) {
+    if (mTuner == nullptr) {
         ALOGE("get ITuner failed");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
@@ -428,11 +381,6 @@
 }
 
 void TunerHidlService::updateTunerResources() {
-    if (!hasITuner()) {
-        ALOGE("Failed to updateTunerResources");
-        return;
-    }
-
     TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
 }
 
diff --git a/services/tuner/hidl/TunerHidlService.h b/services/tuner/hidl/TunerHidlService.h
index 2252d35..526c5e6 100644
--- a/services/tuner/hidl/TunerHidlService.h
+++ b/services/tuner/hidl/TunerHidlService.h
@@ -33,6 +33,7 @@
 using ::aidl::android::hardware::tv::tuner::DemuxCapabilities;
 using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
 using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::DemuxInfo;
 using ::aidl::android::hardware::tv::tuner::FrontendInfo;
 using ::aidl::android::hardware::tv::tuner::FrontendType;
 using ::aidl::android::media::tv::tuner::ITunerDemux;
@@ -83,12 +84,15 @@
     ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
                                    shared_ptr<ITunerDemux>* _aidl_return) override;
     ::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
     ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
                                          shared_ptr<ITunerDescrambler>* _aidl_return) override;
     ::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
     ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
                                           const shared_ptr<ITunerFilterCallback>& in_cb,
                                           shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus isLnaSupported(bool* _aidl_return) override;
     ::ndk::ScopedAStatus setLna(bool in_bEnable) override;
     ::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
                                                  int32_t in_maxNumber) override;
@@ -99,11 +103,7 @@
     void removeSharedFilter(const shared_ptr<TunerHidlFilter>& sharedFilter);
     void removeFrontend(const shared_ptr<TunerHidlFrontend>& frontend);
 
-    static shared_ptr<TunerHidlService> getTunerService();
-
 private:
-    bool hasITuner();
-    bool hasITuner_1_1();
     void updateTunerResources();
     vector<TunerFrontendInfo> getTRMFrontendInfos();
     vector<int32_t> getTRMLnbHandles();
@@ -121,8 +121,6 @@
     Mutex mOpenedFrontendsLock;
     unordered_set<shared_ptr<TunerHidlFrontend>> mOpenedFrontends;
     int mLnaStatus = -1;
-
-    static shared_ptr<TunerHidlService> sTunerService;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlTimeFilter.cpp b/services/tuner/hidl/TunerHidlTimeFilter.cpp
index d0606d6..06a71d0 100644
--- a/services/tuner/hidl/TunerHidlTimeFilter.cpp
+++ b/services/tuner/hidl/TunerHidlTimeFilter.cpp
@@ -43,12 +43,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::setTimeStamp(int64_t timeStamp) {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mTimeFilter->setTimeStamp(static_cast<uint64_t>(timeStamp));
     if (status != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
@@ -57,12 +51,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::clearTimeStamp() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status = mTimeFilter->clearTimeStamp();
     if (status != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
@@ -71,13 +59,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::getSourceTime(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     mTimeFilter->getSourceTime([&](HidlResult r, uint64_t t) {
         status = r;
@@ -91,13 +72,6 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::getTimeStamp(int64_t* _aidl_return) {
-    if (mTimeFilter == nullptr) {
-        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult status;
     mTimeFilter->getTimeStamp([&](HidlResult r, uint64_t t) {
         status = r;
@@ -111,15 +85,7 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlTimeFilter::close() {
-    if (mTimeFilter == nullptr) {
-        ALOGE("ITimeFilter is not initialized");
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(
-                static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
     HidlResult res = mTimeFilter->close();
-    mTimeFilter = nullptr;
-
     if (res != HidlResult::SUCCESS) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
     }
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index a014dea..90f1731 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -18,6 +18,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
+#include <hidl/HidlTransportSupport.h>
 
 #include "TunerService.h"
 #include "hidl/TunerHidlService.h"
@@ -32,6 +33,8 @@
 
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
+    hardware::configureRpcThreadpool(16, true);
+    ProcessState::self()->setThreadPoolMaxThreadCount(16);
 
     // Check legacy HIDL HAL first. If it's not existed, use AIDL HAL.
     binder_status_t status = TunerHidlService::instantiate();
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
index 6a3e199..8e5d100 100644
--- a/services/tuner/mediatuner.rc
+++ b/services/tuner/mediatuner.rc
@@ -1,8 +1,15 @@
+# media.tuner service is not started by default unless tuner.server.enable is set
+# as "true" by TRM (Tuner Resource Manager). TRM checks ro.tuner.lazyhal, if it
+# isn't true , TRM sets tuner.server.enable as "true".
 service media.tuner /system/bin/mediatuner
     class main
     group media
+    user root
     ioprio rt 4
-    onrestart restart vendor.tuner-hal-1-0
-    onrestart restart vendor.tuner-hal-1-1
-    onrestart restart vendor.tuner-default
     task_profiles ProcessCapacityHigh HighPerformance
+    interface aidl media.tuner
+    oneshot
+    disabled
+
+on property:tuner.server.enable=true
+    enable media.tuner
diff --git a/tools/OWNERS b/tools/OWNERS
new file mode 100644
index 0000000..7598c6f
--- /dev/null
+++ b/tools/OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1344
+essick@google.com
+
+# reliability builds mainline trains, so needs to manage these scripts
+include platform/frameworks/av/:/media/janitors/reliability_mainline_OWNERS
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
index bd82315..cd3e579 100755
--- a/tools/mainline_hook_partial.sh
+++ b/tools/mainline_hook_partial.sh
Binary files differ
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
index cb5fc44..1cc3b2b 100755
--- a/tools/mainline_hook_project.sh
+++ b/tools/mainline_hook_project.sh
@@ -17,7 +17,7 @@
 
 # tunables
 DEV_BRANCH=master
-MAINLINE_BRANCH=sc-mainline-prod
+MAINLINE_BRANCH=tm-mainline-prod
 
 ###
 RED=$(tput setaf 1)